Version 2.14.0-244.0.dev
Merge commit '155ac440f76b8ae02b5a32a7953afe9138ba01f4' into 'dev'
diff --git a/pkg/compiler/lib/src/js_backend/deferred_holder_expression.dart b/pkg/compiler/lib/src/js_backend/deferred_holder_expression.dart
index f076a8a..18e3b5a 100644
--- a/pkg/compiler/lib/src/js_backend/deferred_holder_expression.dart
+++ b/pkg/compiler/lib/src/js_backend/deferred_holder_expression.dart
@@ -3,14 +3,15 @@
// BSD-style license that can be found in the LICENSE file.
import 'package:js_ast/src/precedence.dart' as js show PRIMARY;
+import 'package:front_end/src/api_unstable/dart2js.dart' show $A;
import '../common_elements.dart' show JCommonElements;
import '../elements/entities.dart';
import '../js/js.dart' as js;
import '../serialization/serialization.dart';
import '../util/util.dart';
-import '../js_emitter/model.dart' show Fragment;
-
+import '../js_emitter/model.dart';
+import '../constants/values.dart' show ConstantValue;
import 'namer.dart';
// TODO(joshualitt): Figure out how to subsume more of the modular naming
@@ -302,15 +303,29 @@
/// An abstraction representing a [Holder] object, which will contain some
/// portion of the programs code.
class Holder {
- final String name;
+ final String key;
final Map<String, int> refCountPerResource = {};
+ final Map<String, String> localNames = {};
final Map<String, List<js.Property>> propertiesPerResource = {};
int _index;
int _hashCode;
- Holder(this.name);
+ Holder(this.key);
- int refCount(String resource) => refCountPerResource[resource];
+ int refCount(String resource) {
+ assert(refCountPerResource.containsKey(resource));
+ return refCountPerResource[resource];
+ }
+
+ String localName(String resource) {
+ assert(localNames.containsKey(resource));
+ return localNames[resource];
+ }
+
+ void setLocalName(String resource, String name) {
+ assert(!localNames.containsKey(resource));
+ localNames[resource] = name;
+ }
void registerUse(String resource) {
refCountPerResource.update(resource, (count) => count + 1,
@@ -334,12 +349,12 @@
@override
bool operator ==(that) {
- return that is Holder && name == that.name;
+ return that is Holder && key == that.key;
}
@override
int get hashCode {
- return _hashCode ??= Hashing.objectsHash(name);
+ return _hashCode ??= Hashing.objectsHash(key);
}
}
@@ -353,11 +368,25 @@
final List<DeferredHolderParameter> holderParameters = [];
final List<DeferredHolderResource> holderResources = [];
final Map<String, Set<Holder>> holdersPerResource = {};
- final Map<String, Holder> holderMap = {};
final JCommonElements _commonElements;
+ final bool enableMinification;
+ final Holder globalObjectForStaticState =
+ Holder(globalObjectNameForStaticState());
+ final Holder globalObjectForInterceptors =
+ Holder(globalObjectNameForInterceptors());
+ final Set<Holder> allHolders = {};
DeferredHolderResource mainHolderResource;
+ Holder mainHolder;
+ Holder mainConstantHolder;
- DeferredHolderExpressionFinalizerImpl(this._commonElements) {
+ /// Maps of various object types to the holders they ended up in.
+ final Map<Library, Holder> libraryMap = {};
+ final Map<ClassEntity, Holder> classEntityMap = {};
+ final Map<ConstantValue, Holder> constantValueMap = {};
+ final Map<MemberEntity, Holder> memberEntityMap = {};
+
+ DeferredHolderExpressionFinalizerImpl(this._commonElements,
+ {this.enableMinification = true}) {
_visitor = _DeferredHolderExpressionCollectorVisitor(this);
}
@@ -366,25 +395,17 @@
_visitor.setResourceNameAndVisit(resourceName, code);
}
- final List<String> userGlobalObjects =
- List.from(Namer.reservedGlobalObjectNames)
- ..remove('C')
- ..remove('H')
- ..remove('J')
- ..remove('P')
- ..remove('W');
-
- /// Returns the [reservedGlobalObjectNames] for [library].
- String globalObjectNameForLibrary(LibraryEntity library) {
- if (library == _commonElements.interceptorsLibrary)
- return globalObjectNameForInterceptors();
- Uri uri = library.canonicalUri;
- if (uri.scheme == 'dart') {
- if (uri.path == 'html') return 'W';
- if (uri.path.startsWith('_')) return 'H';
- return 'P';
+ Holder _lookup<T>(T data, LibraryEntity library, Map<T, Holder> map) {
+ if (library == _commonElements.interceptorsLibrary) {
+ return globalObjectForInterceptors;
}
- return userGlobalObjects[library.name.hashCode % userGlobalObjects.length];
+ // See the below note on globalObjectForConstants.
+ return map[data] ?? mainHolder;
+ }
+
+ /// Returns the [Holder] for [library].
+ Holder globalObjectForLibrary(Library library) {
+ return _lookup(library, library.element, libraryMap);
}
/// Returns true if [element] is stored in the static state holder
@@ -398,88 +419,70 @@
return element.isField;
}
- String globalObjectNameForMember(MemberEntity entity) {
+ Holder globalObjectForMember(MemberEntity entity) {
if (_isPropertyOfStaticStateHolder(entity)) {
- return globalObjectNameForStaticState();
+ return globalObjectForStaticState;
} else {
- return globalObjectNameForLibrary(entity.library);
+ return _lookup(entity, entity.library, memberEntityMap);
}
}
- String globalObjectNameForClass(ClassEntity entity) {
- return globalObjectNameForLibrary(entity.library);
+ Holder globalObjectForClass(ClassEntity entity) {
+ return _lookup(entity, entity.library, classEntityMap);
}
- final Holder globalObjectForStaticState =
- Holder(globalObjectNameForStaticState());
-
- static String globalObjectNameForInterceptors() => 'J';
-
static String globalObjectNameForStaticState() => r'$';
- String globalObjectNameForConstants() => 'C';
+ static String globalObjectNameForInterceptors() => 'J';
- String globalObjectNameForEntity(Entity entity) {
- if (entity is MemberEntity) {
- return globalObjectNameForMember(entity);
- } else if (entity is ClassEntity) {
- return globalObjectNameForLibrary(entity.library);
- } else {
- assert(entity is LibraryEntity);
- return globalObjectNameForLibrary(entity);
- }
- }
-
- Holder holderNameToHolder(String holderKey) {
- if (holderKey == globalObjectNameForStaticState()) {
- return globalObjectForStaticState;
- } else {
- return holderMap[holderKey];
- }
+ Holder globalObjectForConstant(ConstantValue constant) {
+ // TODO(46009): There is a bug where constants are referenced without being
+ // emitted. However, in practice it may not matter because these constants
+ // may not be used. Until this bug is fixed, we say these constants are in
+ // the [mainHolder] even though they aren't in the code at all.
+ return constantValueMap[constant] ?? mainConstantHolder;
}
Holder globalObjectForEntity(Entity entity) {
- return holderNameToHolder(globalObjectNameForEntity(entity));
+ if (entity is MemberEntity) {
+ return globalObjectForMember(entity);
+ } else if (entity is ClassEntity) {
+ return globalObjectForClass(entity);
+ } else {
+ assert((entity as LibraryEntity) == _commonElements.interceptorsLibrary);
+ return globalObjectForInterceptors;
+ }
}
/// Registers a [holder] use within a given [resource], if [properties] are
/// provided then it is assumed this is an update to a holder.
- void registerHolderUseOrUpdate(String resourceName, String holderName,
+ void registerHolderUseOrUpdate(String resourceName, Holder holder,
{List<js.Property> properties}) {
- // For simplicity, we don't currently track the static state holder per
- // resource.
- if (holderName == globalObjectNameForStaticState()) return;
- Holder holder = holderMap[holderName] ??= Holder(holderName);
if (properties == null) {
holder.registerUse(resourceName);
} else {
holder.registerUpdate(resourceName, properties);
}
+ allHolders.add(holder);
(holdersPerResource[resourceName] ??= {}).add(holder);
}
- /// Returns a key to a global object for a given [Object] based on the
- /// [DeferredHolderExpressionKind].
- String kindToHolderName(DeferredHolderExpressionKind kind, Object data) {
- switch (kind) {
- case DeferredHolderExpressionKind.globalObjectForInterceptors:
- return globalObjectNameForInterceptors();
- case DeferredHolderExpressionKind.globalObjectForClass:
- return globalObjectNameForClass(data);
- case DeferredHolderExpressionKind.globalObjectForMember:
- return globalObjectNameForMember(data);
- case DeferredHolderExpressionKind.globalObjectForConstant:
- return globalObjectNameForConstants();
- case DeferredHolderExpressionKind.globalObjectForStaticState:
- return globalObjectNameForStaticState();
- }
- throw UnsupportedError("Unreachable");
- }
-
/// Returns a global object for a given [Object] based on the
/// [DeferredHolderExpressionKind].
Holder kindToHolder(DeferredHolderExpressionKind kind, Object data) {
- return holderNameToHolder(kindToHolderName(kind, data));
+ switch (kind) {
+ case DeferredHolderExpressionKind.globalObjectForInterceptors:
+ return globalObjectForInterceptors;
+ case DeferredHolderExpressionKind.globalObjectForClass:
+ return globalObjectForClass(data);
+ case DeferredHolderExpressionKind.globalObjectForMember:
+ return globalObjectForMember(data);
+ case DeferredHolderExpressionKind.globalObjectForConstant:
+ return globalObjectForConstant(data);
+ case DeferredHolderExpressionKind.globalObjectForStaticState:
+ return globalObjectForStaticState;
+ }
+ throw UnsupportedError("Unreachable");
}
/// Finalizes [DeferredHolderParameter]s.
@@ -493,11 +496,12 @@
/// Finalizes all of the [DeferredHolderExpression]s associated with a
/// [DeferredHolderResource].
void finalizeReferences(DeferredHolderResource resource) {
- if (!holderReferences.containsKey(resource.name)) return;
- for (var reference in holderReferences[resource.name]) {
+ var resourceName = resource.name;
+ if (!holderReferences.containsKey(resourceName)) return;
+ for (var reference in holderReferences[resourceName]) {
if (reference.isFinalized) continue;
- String holder = kindToHolder(reference.kind, reference.data).name;
- js.Expression value = js.VariableUse(holder);
+ var holder = kindToHolder(reference.kind, reference.data);
+ js.Expression value = js.VariableUse(holder.localName(resourceName));
reference.value =
value.withSourceInformation(reference.sourceInformation);
}
@@ -508,8 +512,8 @@
// Register all holders used in all [DeferredHolderResource]s.
for (var resource in holderResources) {
resource.holderCode.forEach((entity, properties) {
- String holderName = globalObjectNameForEntity(entity);
- registerHolderUseOrUpdate(resource.name, holderName,
+ Holder holder = globalObjectForEntity(entity);
+ registerHolderUseOrUpdate(resource.name, holder,
properties: properties);
});
}
@@ -517,22 +521,24 @@
// Register all holders used in [DeferredHolderReference]s.
holderReferences.forEach((resource, references) {
for (var reference in references) {
- String holderName = kindToHolderName(reference.kind, reference.data);
- registerHolderUseOrUpdate(resource, holderName);
+ var holder = kindToHolder(reference.kind, reference.data);
+ registerHolderUseOrUpdate(resource, holder);
}
});
+
+ // Finally, because all holders are needed in the main holder, we register
+ // their use here.
+ for (var holder in allHolders) {
+ registerHolderUseOrUpdate(mainHolderResource.name, holder);
+ }
}
/// Returns an [Iterable<Holder>] containing all of the holders used within a
- /// given [DeferredHolderResource]except the static state holder (if any).
+ /// given [DeferredHolderResource] except the static state holder (if any).
Iterable<Holder> nonStaticStateHolders(DeferredHolderResource resource) {
- return holdersPerResource[resource.name] ?? [];
- }
-
- /// Returns an [Iterable<Holder>] containing all of the holders used within a
- /// given [DeferredHolderResource] except the static state holder.
- Iterable<Holder> get allNonStaticStateHolders {
- return holderMap.values;
+ if (!holdersPerResource.containsKey(resource.name)) return [];
+ return holdersPerResource[resource.name]
+ .where((holder) => holder != globalObjectForStaticState);
}
/// Generates code to declare holders for a given [resourceName].
@@ -552,7 +558,7 @@
List<Holder> activeHolders = [];
List<js.VariableInitialization> holderInitializations = [];
for (var holder in holders) {
- var holderName = holder.name;
+ var holderName = holder.localName(resourceName);
List<js.Property> properties =
holder.propertiesPerResource[resourceName] ?? [];
if (properties.isEmpty) {
@@ -576,13 +582,14 @@
/// Finalizes [resource] to code that updates holders. [resource] must be in
/// the AST of a deferred fragment.
void updateHolders(DeferredHolderResource resource) {
+ var resourceName = resource.name;
final holderCode =
- declareHolders(resource.name, nonStaticStateHolders(resource));
+ declareHolders(resourceName, nonStaticStateHolders(resource));
// Set names if necessary on deferred holders list.
js.Expression deferredHoldersList = js.ArrayInitializer(holderCode
.activeHolders
- .map((holder) => js.js("#", holder.name))
+ .map((holder) => js.js("#", holder.localName(resourceName)))
.toList(growable: false));
js.Statement setNames = js.js.statement(
'hunkHelpers.setFunctionNamesIfNecessary(#deferredHoldersList)',
@@ -594,7 +601,7 @@
setNames
];
for (var holder in holderCode.allHolders) {
- var holderName = holder.name;
+ var holderName = holder.localName(resourceName);
var holderIndex = js.number(holder.index);
if (holderCode.activeHolders.contains(holder)) {
updateHolderAssignments.add(js.js.statement(
@@ -617,8 +624,9 @@
/// fragment.
void declareHoldersInMainResource() {
// Declare holders in main output unit.
- var holders = allNonStaticStateHolders;
- var holderCode = declareHolders(mainHolderResource.name, holders,
+ var holders = nonStaticStateHolders(mainHolderResource);
+ var mainHolderResourceName = mainHolderResource.name;
+ var holderCode = declareHolders(mainHolderResourceName, holders,
initializeEmptyHolders: true);
// Create holder uses and init holder indices.
@@ -626,7 +634,7 @@
int i = 0;
for (var holder in holders) {
holder.index = i++;
- holderUses.add(js.VariableUse(holder.name));
+ holderUses.add(js.VariableUse(holder.localName(mainHolderResourceName)));
}
// Create holders array statement.
@@ -640,6 +648,104 @@
js.Block([holderCode.statement, holderArray]);
}
+ /// Initializes local names for [Holder] objects, and also performs frequency
+ /// based renaming if requested.
+ void setLocalHolderNames() {
+ bool shouldMinify(Holder holder) {
+ // We minify all holders if minification is enabled, except for holders
+ // which are already minified.
+ return enableMinification &&
+ holder != globalObjectForStaticState &&
+ holder != globalObjectForInterceptors;
+ }
+
+ holdersPerResource.forEach((resource, holders) {
+ // Sort holders by reference count within this resource.
+ var sortedHolders = holders.toList(growable: false);
+ sortedHolders.sort((a, b) {
+ return a.refCount(resource).compareTo(b.refCount(resource));
+ });
+
+ // Assign names based on frequency. This will be ignored unless
+ // minification is enabled.
+ var reservedNames = Namer.reservedCapitalizedGlobalSymbols
+ .union({globalObjectNameForInterceptors()});
+ var namer = TokenScope(initialChar: $A, illegalNames: reservedNames);
+ for (var holder in sortedHolders) {
+ // We will use minified local names for all holders, unless minification
+ // is disabled or the holder is the static state holder.
+ String localHolderName;
+ if (shouldMinify(holder)) {
+ localHolderName = namer.getNextName();
+ } else {
+ localHolderName = holder.key;
+ }
+ holder.setLocalName(resource, localHolderName);
+ }
+ });
+ }
+
+ /// Initializes [Holder] objects with their default names and sets up maps of
+ /// [Entity] / [ConstantValue] to [Holder].
+ void initializeHolders() {
+ void _addMembers(Holder holder, List<Method> methods) {
+ for (var method in methods) {
+ memberEntityMap[method.element] = holder;
+ if (method is DartMethod) {
+ _addMembers(holder, method.parameterStubs);
+ }
+ }
+ }
+
+ void _addClass(Holder holder, Class cls) {
+ classEntityMap[cls.element] = holder;
+ _addMembers(holder, cls.methods);
+ _addMembers(holder, cls.isChecks);
+ _addMembers(holder, cls.checkedSetters);
+ _addMembers(holder, cls.gettersSetters);
+ _addMembers(holder, cls.callStubs);
+ _addMembers(holder, cls.noSuchMethodStubs);
+ if (cls.nativeExtensions != null) {
+ for (var extClass in cls.nativeExtensions) {
+ _addClass(holder, extClass);
+ }
+ }
+ }
+
+ for (var resource in holderResources) {
+ // Our default names are either 'MAIN,' 'PART<N>', or '<NAME>_C'.
+ var holderName =
+ resource.isMainFragment ? mainResourceName : 'part${resource.name}';
+ holderName = holderName.toUpperCase();
+ var holder = Holder(holderName);
+
+ // Constant properties are not unique globally and must live in their own
+ // holder.
+ var constantHolder = Holder('${holderName}_C');
+
+ // Initialize the [mainHolder] and [mainConstantHolder].
+ if (resource.isMainFragment) {
+ mainHolder = holder;
+ mainConstantHolder = constantHolder;
+ }
+
+ for (var fragment in resource.fragments) {
+ for (var constant in fragment.constants) {
+ constantValueMap[constant.value] = constantHolder;
+ }
+ for (var library in fragment.libraries) {
+ libraryMap[library] = holder;
+ for (var cls in library.classes) {
+ _addClass(holder, cls);
+ }
+ for (var staticMethod in library.statics) {
+ memberEntityMap[staticMethod.element] = holder;
+ }
+ }
+ }
+ }
+ }
+
/// Allocates all [DeferredHolderResource]s and finalizes the associated
/// [DeferredHolderExpression]s.
void allocateResourcesAndFinalizeReferences() {
@@ -664,7 +770,9 @@
@override
void finalize() {
+ initializeHolders();
registerHolders();
+ setLocalHolderNames();
finalizeParameters();
allocateResourcesAndFinalizeReferences();
}
diff --git a/pkg/compiler/lib/src/js_backend/frequency_namer.dart b/pkg/compiler/lib/src/js_backend/frequency_namer.dart
index 11988e9..cfa4155 100644
--- a/pkg/compiler/lib/src/js_backend/frequency_namer.dart
+++ b/pkg/compiler/lib/src/js_backend/frequency_namer.dart
@@ -30,9 +30,9 @@
illegalNames.add(illegal.substring(1));
}
}
- return new TokenScope(illegalNames);
+ return new TokenScope(illegalNames: illegalNames);
} else {
- return new TokenScope(jsReserved);
+ return new TokenScope(illegalNames: jsReserved);
}
}
@@ -78,10 +78,13 @@
}
class TokenScope {
- List<int> _nextName = [$a];
+ int initialChar;
+ List<int> _nextName;
final Set<String> illegalNames;
- TokenScope([this.illegalNames = const {}]);
+ TokenScope({this.illegalNames = const {}, this.initialChar: $a}) {
+ _nextName = [initialChar];
+ }
/// Increments the letter at [pos] in the current name. Also takes care of
/// overflows to the left. Returns the carry bit, i.e., it returns `true`
@@ -105,7 +108,7 @@
value = $A;
} else if (value == $Z) {
overflow = _incrementPosition(pos - 1);
- value = (pos > 0) ? $_ : $a;
+ value = (pos > 0) ? $_ : initialChar;
} else {
value++;
}
diff --git a/pkg/compiler/lib/src/js_emitter/startup_emitter/fragment_emitter.dart b/pkg/compiler/lib/src/js_emitter/startup_emitter/fragment_emitter.dart
index 3cc03f9..52f5eea 100644
--- a/pkg/compiler/lib/src/js_emitter/startup_emitter/fragment_emitter.dart
+++ b/pkg/compiler/lib/src/js_emitter/startup_emitter/fragment_emitter.dart
@@ -878,8 +878,14 @@
Map<js.Name, js.Expression> propertyMap = emitStaticMethod(method);
propertyMap.forEach((js.Name key, js.Expression value) {
var property = new js.Property(js.quoteName(key), value);
- Entity holderKey =
- method is StaticStubMethod ? method.library : method.element;
+ Entity holderKey;
+ if (method is StaticStubMethod) {
+ // [StaticStubMethod]s should only be created for interceptors.
+ assert(method.library == _commonElements.interceptorsLibrary);
+ holderKey = method.library;
+ } else {
+ holderKey = method.element;
+ }
(holderCode[holderKey] ??= []).add(property);
registerEntityAst(method.element, property, library: library.element);
});
diff --git a/pkg/compiler/test/codegen/class_codegen_test.dart b/pkg/compiler/test/codegen/class_codegen_test.dart
index 6236208..78b0887 100644
--- a/pkg/compiler/test/codegen/class_codegen_test.dart
+++ b/pkg/compiler/test/codegen/class_codegen_test.dart
@@ -82,9 +82,10 @@
subClass() async {
checkOutput(String generated) {
+ Expect.isTrue(generated
+ .contains(RegExp(r'_inheritMany\([$A-Z]+\.Object, .*, [$A-Z]+\.A]')));
Expect.isTrue(
- generated.contains(RegExp(r'_inheritMany\(.\.Object, .*, .\.A]')));
- Expect.isTrue(generated.contains(RegExp(r'_inherit\(.\.B, .\.A\)')));
+ generated.contains(RegExp(r'_inherit\([$A-Z]+\.B, [$A-Z]+\.A\)')));
}
checkOutput(await compileAll(TEST_TWO));
diff --git a/pkg/compiler/test/codegen/codegen_test_helper.dart b/pkg/compiler/test/codegen/codegen_test_helper.dart
index f28f670..b2bb6b1 100644
--- a/pkg/compiler/test/codegen/codegen_test_helper.dart
+++ b/pkg/compiler/test/codegen/codegen_test_helper.dart
@@ -57,7 +57,7 @@
forUserLibrariesOnly: true,
args: args,
options: options,
- testedConfigs: allInternalConfigs,
+ testedConfigs: allInternalConfigs + [canaryConfig],
skip: skip,
shardIndex: shardIndex ?? 0,
shards: shardIndex == null ? 1 : shards);
diff --git a/pkg/compiler/test/codegen/data/array_add.dart b/pkg/compiler/test/codegen/data/array_add.dart
index d9c0fd3..c0447fe 100644
--- a/pkg/compiler/test/codegen/data/array_add.dart
+++ b/pkg/compiler/test/codegen/data/array_add.dart
@@ -16,13 +16,21 @@
t1.push(1);
return t1;
}*/
+/*canary.member: test1:function() {
+ var t1 = B._setArrayType([], type$.JSArray_int);
+ A.JSArray_methods.add$1(t1, 1);
+ return t1;
+}*/
test1() {
return <int>[]..add(1);
}
-/*member: main:function() {
+/*spec|prod.member: main:function() {
F.test1();
}*/
+/*canary.member: main:function() {
+ B.test1();
+}*/
main() {
test1();
}
diff --git a/pkg/compiler/test/codegen/data/codeUnitAt_folding.dart b/pkg/compiler/test/codegen/data/codeUnitAt_folding.dart
index f898183..e52a16a 100644
--- a/pkg/compiler/test/codegen/data/codeUnitAt_folding.dart
+++ b/pkg/compiler/test/codegen/data/codeUnitAt_folding.dart
@@ -20,6 +20,9 @@
/*prod.member: foo2:function() {
return C.JSString_methods.codeUnitAt$1("Hello", 1.5);
}*/
+/*canary.member: foo2:function() {
+ return A.JSString_methods.codeUnitAt$1("Hello", B._asInt(1.5));
+}*/
foo2() {
var a = 'Hello';
dynamic b = 1.5;
@@ -28,9 +31,12 @@
}
@pragma('dart2js:noInline')
-/*member: foo3:function() {
+/*spec|prod.member: foo3:function() {
return C.JSString_methods._codeUnitAt$1("Hello", 55);
}*/
+/*canary.member: foo3:function() {
+ return A.JSString_methods._codeUnitAt$1("Hello", 55);
+}*/
foo3() {
var a = 'Hello';
dynamic b = 55;
diff --git a/pkg/compiler/test/codegen/data/marker.options b/pkg/compiler/test/codegen/data/marker.options
index a1cab2b..de94521 100644
--- a/pkg/compiler/test/codegen/data/marker.options
+++ b/pkg/compiler/test/codegen/data/marker.options
@@ -1,2 +1,3 @@
spec=pkg/compiler/test/codegen/codegen_test_helper.dart
prod=pkg/compiler/test/codegen/codegen_test_helper.dart
+canary=pkg/compiler/test/codegen/codegen_test_helper.dart
diff --git a/pkg/compiler/test/codegen/data/shift_right_unsigned.dart b/pkg/compiler/test/codegen/data/shift_right_unsigned.dart
index af2ef54..7cdad26 100644
--- a/pkg/compiler/test/codegen/data/shift_right_unsigned.dart
+++ b/pkg/compiler/test/codegen/data/shift_right_unsigned.dart
@@ -30,14 +30,20 @@
/*prod.member: cannotRecognize:function(thing) {
return J.$shru$n(thing, 1);
}*/
+/*canary.member: cannotRecognize:function(thing) {
+ return B._asInt(J.$shru$n(thing, 1));
+}*/
int cannotRecognize(dynamic thing) {
return thing >>> 1;
}
@pragma('dart2js:noInline')
-/*member: cannotConstantFold:function() {
+/*spec|prod.member: cannotConstantFold:function() {
return C.JSInt_methods.$shru(1, -1);
}*/
+/*canary.member: cannotConstantFold:function() {
+ return A.JSInt_methods.$shru(1, -1);
+}*/
int cannotConstantFold() {
var a = 1;
return a >>> -1;
@@ -62,17 +68,23 @@
}
@pragma('dart2js:noInline')
-/*member: unspecialized:function(a) {
+/*spec|prod.member: unspecialized:function(a) {
return C.JSInt_methods.$shru(1, a);
}*/
+/*canary.member: unspecialized:function(a) {
+ return A.JSInt_methods.$shru(1, a);
+}*/
int unspecialized(int a) {
return 1 >>> a;
}
@pragma('dart2js:noInline')
-/*member: otherPositive2:function(param) {
+/*spec|prod.member: otherPositive2:function(param) {
return C.JSInt_methods._shruOtherPositive$1(1, param ? 1 : 2);
}*/
+/*canary.member: otherPositive2:function(param) {
+ return A.JSInt_methods._shruOtherPositive$1(1, param ? 1 : 2);
+}*/
int otherPositive2(bool param) {
var a = param ? 1 : 2;
return 1 >>> a;
@@ -98,9 +110,12 @@
}
@pragma('dart2js:noInline')
-/*member: otherPositive6:function(a, b) {
+/*spec|prod.member: otherPositive6:function(a, b) {
return C.JSInt_methods._shruOtherPositive$1(a, b);
}*/
+/*canary.member: otherPositive6:function(a, b) {
+ return A.JSInt_methods._shruOtherPositive$1(a, b);
+}*/
int otherPositive6(int a, int b) {
return a >>> b;
}
diff --git a/pkg/compiler/test/codegen/data/tdiv1.dart b/pkg/compiler/test/codegen/data/tdiv1.dart
index 89beb19..747fe8f 100644
--- a/pkg/compiler/test/codegen/data/tdiv1.dart
+++ b/pkg/compiler/test/codegen/data/tdiv1.dart
@@ -45,9 +45,12 @@
}
@pragma('dart2js:noInline')
-/*member: foo3:function(param) {
+/*spec|prod.member: foo3:function(param) {
return C.JSInt_methods._tdivFast$1(param ? 4294967295 : -1, 2);
}*/
+/*canary.member: foo3:function(param) {
+ return A.JSInt_methods._tdivFast$1(param ? 4294967295 : -1, 2);
+}*/
int foo3(bool param) {
var a = param ? 0xFFFFFFFF : -1;
return a ~/ 2;
@@ -56,9 +59,12 @@
}
@pragma('dart2js:noInline')
-/*member: foo4:function(param1, param2) {
+/*spec|prod.member: foo4:function(param1, param2) {
return C.JSInt_methods.$tdiv(param1 ? 4294967295 : 0, param2);
}*/
+/*canary.member: foo4:function(param1, param2) {
+ return A.JSInt_methods.$tdiv(param1 ? 4294967295 : 0, param2);
+}*/
int foo4(bool param1, int param2) {
var a = param1 ? 0xFFFFFFFF : 0;
return a ~/ param2;
@@ -68,10 +74,14 @@
}
@pragma('dart2js:noInline')
-/*member: foo5:function(param1, param2) {
+/*spec|prod.member: foo5:function(param1, param2) {
var a = param1 ? 4294967295 : 0;
return C.JSInt_methods.$tdiv(a, param2 ? 3 : 4);
}*/
+/*canary.member: foo5:function(param1, param2) {
+ var a = param1 ? 4294967295 : 0;
+ return A.JSInt_methods.$tdiv(a, param2 ? 3 : 4);
+}*/
int foo5(bool param1, bool param2) {
var a = param1 ? 0xFFFFFFFF : 0;
var b = param2 ? 3 : 4;
@@ -83,10 +93,14 @@
}
@pragma('dart2js:noInline')
-/*member: foo_regress_37502:function(param1, param2) {
+/*spec|prod.member: foo_regress_37502:function(param1, param2) {
var a = param1 ? 1.2 : 12.3;
return C.JSInt_methods.gcd$1(C.JSNumber_methods.$tdiv(a, param2 ? 3.14 : 2.81), 2);
}*/
+/*canary.member: foo_regress_37502:function(param1, param2) {
+ var a = param1 ? 1.2 : 12.3;
+ return A.JSInt_methods.gcd$1(A.JSNumber_methods.$tdiv(a, param2 ? 3.14 : 2.81), 2);
+}*/
foo_regress_37502(param1, param2) {
var a = param1 ? 1.2 : 12.3;
var b = param2 ? 3.14 : 2.81;
diff --git a/pkg/compiler/test/codegen/data_2/marker.options b/pkg/compiler/test/codegen/data_2/marker.options
index a1cab2b..de94521 100644
--- a/pkg/compiler/test/codegen/data_2/marker.options
+++ b/pkg/compiler/test/codegen/data_2/marker.options
@@ -1,2 +1,3 @@
spec=pkg/compiler/test/codegen/codegen_test_helper.dart
prod=pkg/compiler/test/codegen/codegen_test_helper.dart
+canary=pkg/compiler/test/codegen/codegen_test_helper.dart
diff --git a/pkg/compiler/test/codegen/data_2/tdiv1.dart b/pkg/compiler/test/codegen/data_2/tdiv1.dart
index 43eb113..e647e4a 100644
--- a/pkg/compiler/test/codegen/data_2/tdiv1.dart
+++ b/pkg/compiler/test/codegen/data_2/tdiv1.dart
@@ -29,6 +29,9 @@
/*prod.member: foo1:function(param) {
return (param ? 4294967295 : 1) / 2 | 0;
}*/
+/*canary.member: foo1:function(param) {
+ return (B.boolConversionCheck(param) ? 4294967295 : 1) / 2 | 0;
+}*/
int foo1(bool param) {
var a = param ? 0xFFFFFFFF : 1;
return a ~/ 2;
@@ -43,6 +46,9 @@
/*prod.member: foo2:function(param) {
return (param ? 4294967295 : 1) / 3 | 0;
}*/
+/*canary.member: foo2:function(param) {
+ return (B.boolConversionCheck(param) ? 4294967295 : 1) / 3 | 0;
+}*/
int foo2(bool param) {
var a = param ? 0xFFFFFFFF : 1;
return a ~/ 3;
@@ -57,6 +63,9 @@
/*prod.member: foo3:function(param) {
return C.JSInt_methods._tdivFast$1(param ? 4294967295 : -1, 2);
}*/
+/*canary.member: foo3:function(param) {
+ return A.JSInt_methods._tdivFast$1(B.boolConversionCheck(param) ? 4294967295 : -1, 2);
+}*/
int foo3(bool param) {
var a = param ? 0xFFFFFFFF : -1;
return a ~/ 2;
@@ -71,6 +80,9 @@
/*prod.member: foo4:function(param1, param2) {
return C.JSInt_methods.$tdiv(param1 ? 4294967295 : 0, param2);
}*/
+/*canary.member: foo4:function(param1, param2) {
+ return A.JSInt_methods.$tdiv(B.boolConversionCheck(param1) ? 4294967295 : 0, param2);
+}*/
int foo4(bool param1, int param2) {
var a = param1 ? 0xFFFFFFFF : 0;
return a ~/ param2;
@@ -88,6 +100,10 @@
var a = param1 ? 4294967295 : 0;
return C.JSInt_methods.$tdiv(a, param2 ? 3 : 4);
}*/
+/*canary.member: foo5:function(param1, param2) {
+ var a = B.boolConversionCheck(param1) ? 4294967295 : 0;
+ return A.JSInt_methods.$tdiv(a, B.boolConversionCheck(param2) ? 3 : 4);
+}*/
int foo5(bool param1, bool param2) {
var a = param1 ? 0xFFFFFFFF : 0;
var b = param2 ? 3 : 4;
@@ -107,6 +123,10 @@
var a = param1 ? 1.2 : 12.3;
return C.JSInt_methods.gcd$1(C.JSNumber_methods.$tdiv(a, param2 ? 3.14 : 2.81), 2);
}*/
+/*canary.member: foo_regress_37502:function(param1, param2) {
+ var a = B.boolConversionCheck(param1) ? 1.2 : 12.3;
+ return A.JSInt_methods.gcd$1(A.JSNumber_methods.$tdiv(a, B.boolConversionCheck(param2) ? 3.14 : 2.81), 2);
+}*/
foo_regress_37502(param1, param2) {
var a = param1 ? 1.2 : 12.3;
var b = param2 ? 3.14 : 2.81;
diff --git a/pkg/compiler/test/codegen/shift_right_unsigned_test.dart b/pkg/compiler/test/codegen/shift_right_unsigned_test.dart
index 8b5dcc0..413af76 100644
--- a/pkg/compiler/test/codegen/shift_right_unsigned_test.dart
+++ b/pkg/compiler/test/codegen/shift_right_unsigned_test.dart
@@ -49,7 +49,7 @@
int foo(int value, int shift) {
return value >>> shift;
// Default code pattern:
- // present: 'return C.JSInt_methods.$shru(value, shift);'
+ // present: 'JSInt_methods.$shru(value, shift);'
}
int callFoo(int a, int b, int c) => foo(a, b);
""",
diff --git a/pkg/compiler/test/equivalence/id_equivalence_helper.dart b/pkg/compiler/test/equivalence/id_equivalence_helper.dart
index 1b5fa5d..a6423eb 100644
--- a/pkg/compiler/test/equivalence/id_equivalence_helper.dart
+++ b/pkg/compiler/test/equivalence/id_equivalence_helper.dart
@@ -29,6 +29,7 @@
const String specMarker = 'spec';
const String prodMarker = 'prod';
+const String canaryMarker = 'canary';
const String twoDeferredFragmentMarker = 'two-frag';
const String threeDeferredFragmentMarker = 'three-frag';
@@ -37,6 +38,9 @@
const TestConfig prodConfig = TestConfig(prodMarker, 'production mode',
[Flags.omitImplicitChecks, Flags.laxRuntimeTypeToString]);
+const TestConfig canaryConfig =
+ TestConfig(canaryMarker, 'canary mode', [Flags.canary]);
+
const TestConfig twoDeferredFragmentConfig = TestConfig(
twoDeferredFragmentMarker,
'two deferred fragment mode',
diff --git a/pkg/compiler/test/model/token_naming_test.dart b/pkg/compiler/test/model/token_naming_test.dart
index beb5f66..6c7afd6 100644
--- a/pkg/compiler/test/model/token_naming_test.dart
+++ b/pkg/compiler/test/model/token_naming_test.dart
@@ -7,6 +7,7 @@
import "package:compiler/src/js_backend/js_backend.dart" show TokenScope;
import "package:expect/expect.dart";
+import 'package:front_end/src/api_unstable/dart2js.dart' show $A;
String forwardN(TokenScope scope, int N) {
for (int i = 1; i < N; ++i) {
@@ -46,7 +47,7 @@
// Test a filtered scope.
Set<String> illegal = new Set.from(["b", "aa"]);
- scope = new TokenScope(illegal);
+ scope = new TokenScope(illegalNames: illegal);
// We start with 'a'.
Expect.equals("a", forwardN(scope, 1));
@@ -62,4 +63,21 @@
// Make sure 'aa' is skipped on wrapping
Expect.equals("ab", forwardN(scope, 1));
Expect.equals("az", forwardN(scope, 24));
+
+ // Test a initial char
+ {
+ TokenScope scope = new TokenScope(initialChar: $A);
+
+ // We start with 'A'.
+ Expect.equals("A", scope.getNextName());
+
+ // Overflow should still start with 'A'.
+ Expect.equals("A_", forwardN(scope, 26));
+ }
+ {
+ TokenScope scope = new TokenScope(initialChar: $A + 1);
+
+ // We start with 'A'.
+ Expect.equals("B", scope.getNextName());
+ }
}
diff --git a/pkg/front_end/lib/src/fasta/builder/class_builder.dart b/pkg/front_end/lib/src/fasta/builder/class_builder.dart
index c528f5f..5357d8b 100644
--- a/pkg/front_end/lib/src/fasta/builder/class_builder.dart
+++ b/pkg/front_end/lib/src/fasta/builder/class_builder.dart
@@ -394,7 +394,9 @@
Builder findStaticBuilder(
String name, int charOffset, Uri fileUri, LibraryBuilder accessingLibrary,
{bool isSetter: false}) {
- if (accessingLibrary.origin != library.origin && name.startsWith("_")) {
+ if (accessingLibrary.nameOriginBuilder.origin !=
+ library.nameOriginBuilder.origin &&
+ name.startsWith("_")) {
return null;
}
Builder declaration = isSetter
@@ -411,7 +413,9 @@
@override
MemberBuilder findConstructorOrFactory(
String name, int charOffset, Uri uri, LibraryBuilder accessingLibrary) {
- if (accessingLibrary.origin != library.origin && name.startsWith("_")) {
+ if (accessingLibrary.nameOriginBuilder.origin !=
+ library.nameOriginBuilder.origin &&
+ name.startsWith("_")) {
return null;
}
MemberBuilder declaration = constructors.lookup(name, charOffset, uri);
diff --git a/pkg/front_end/lib/src/fasta/builder/extension_builder.dart b/pkg/front_end/lib/src/fasta/builder/extension_builder.dart
index d7978bd..f233bb3 100644
--- a/pkg/front_end/lib/src/fasta/builder/extension_builder.dart
+++ b/pkg/front_end/lib/src/fasta/builder/extension_builder.dart
@@ -74,7 +74,9 @@
Builder findStaticBuilder(
String name, int charOffset, Uri fileUri, LibraryBuilder accessingLibrary,
{bool isSetter: false}) {
- if (accessingLibrary.origin != library.origin && name.startsWith("_")) {
+ if (accessingLibrary.nameOriginBuilder.origin !=
+ library.nameOriginBuilder.origin &&
+ name.startsWith("_")) {
return null;
}
Builder declaration = isSetter
diff --git a/pkg/front_end/lib/src/fasta/builder/library_builder.dart b/pkg/front_end/lib/src/fasta/builder/library_builder.dart
index 74355bf..ec8fad3 100644
--- a/pkg/front_end/lib/src/fasta/builder/library_builder.dart
+++ b/pkg/front_end/lib/src/fasta/builder/library_builder.dart
@@ -53,6 +53,8 @@
LibraryBuilder partOfLibrary;
+ LibraryBuilder get nameOriginBuilder;
+
bool mayImplementRestrictedTypes;
bool get isPart;
diff --git a/pkg/front_end/lib/src/fasta/dill/dill_library_builder.dart b/pkg/front_end/lib/src/fasta/dill/dill_library_builder.dart
index 27e97e4..031a9ea 100644
--- a/pkg/front_end/lib/src/fasta/dill/dill_library_builder.dart
+++ b/pkg/front_end/lib/src/fasta/dill/dill_library_builder.dart
@@ -147,6 +147,9 @@
@override
String get name => library.name;
+ @override
+ LibraryBuilder get nameOriginBuilder => this;
+
void addSyntheticDeclarationOfDynamic() {
addBuilder("dynamic",
new DynamicTypeDeclarationBuilder(const DynamicType(), this, -1), -1);
diff --git a/pkg/front_end/lib/src/fasta/incremental_compiler.dart b/pkg/front_end/lib/src/fasta/incremental_compiler.dart
index 0cc4f5d..04062a8 100644
--- a/pkg/front_end/lib/src/fasta/incremental_compiler.dart
+++ b/pkg/front_end/lib/src/fasta/incremental_compiler.dart
@@ -1940,7 +1940,7 @@
userCode.loader,
null,
scope: libraryBuilder.scope.createNestedScope("expression"),
- nameOrigin: libraryBuilder.library,
+ nameOrigin: libraryBuilder,
);
ticker.logMs("Created debug library");
diff --git a/pkg/front_end/lib/src/fasta/source/source_library_builder.dart b/pkg/front_end/lib/src/fasta/source/source_library_builder.dart
index 2addb57..bffa568 100644
--- a/pkg/front_end/lib/src/fasta/source/source_library_builder.dart
+++ b/pkg/front_end/lib/src/fasta/source/source_library_builder.dart
@@ -213,8 +213,9 @@
// A library to use for Names generated when compiling code in this library.
// This allows code generated in one library to use the private namespace of
// another, for example during expression compilation (debugging).
- Library get nameOrigin => _nameOrigin ?? library;
- final Library _nameOrigin;
+ Library get nameOrigin => _nameOrigin?.library ?? library;
+ LibraryBuilder get nameOriginBuilder => _nameOrigin ?? this;
+ final LibraryBuilder _nameOrigin;
final Library referencesFrom;
final IndexedLibrary referencesFromIndexed;
@@ -264,7 +265,7 @@
Scope scope,
SourceLibraryBuilder actualOrigin,
Library library,
- Library nameOrigin,
+ LibraryBuilder nameOrigin,
Library referencesFrom,
bool referenceIsPartOwner)
: this.fromScopes(
@@ -422,7 +423,7 @@
SourceLibraryBuilder actualOrigin,
{Scope scope,
Library target,
- Library nameOrigin,
+ LibraryBuilder nameOrigin,
Library referencesFrom,
bool referenceIsPartOwner})
: this.internal(
diff --git a/pkg/front_end/test/weekly_tester.dart b/pkg/front_end/test/weekly_tester.dart
index 51a670a..ab01773 100644
--- a/pkg/front_end/test/weekly_tester.dart
+++ b/pkg/front_end/test/weekly_tester.dart
@@ -18,6 +18,7 @@
"\n\n");
List<WrappedProcess> startedProcesses = [];
+ WrappedProcess? leakTest;
{
// Very slow: Leak-test.
Uri leakTester =
@@ -28,13 +29,14 @@
} else {
// The tools/bots/flutter/compile_flutter.sh script passes `--path`
// --- we'll just pass everything along.
- startedProcesses.add(await run(
+ leakTest = await run(
[
leakTester.toString(),
...args,
],
"leak test",
- ));
+ );
+ startedProcesses.add(leakTest);
}
}
{
@@ -53,7 +55,9 @@
// ignore: unawaited_futures
() async {
for (int i = 0; i < 10 * 60; i++) {
- if (observatoryLines.isNotEmpty) break;
+ if (leakTest == null || leakTest.observatoryLines.isNotEmpty) {
+ break;
+ }
await Future.delayed(new Duration(seconds: 1));
}
@@ -124,23 +128,24 @@
List<int> exitCodes =
await Future.wait(startedProcesses.map((e) => e.process.exitCode));
if (exitCodes.where((e) => e != 0).isNotEmpty) {
+ print("\n\nFound failures!:\n");
// At least one failed.
- exitCode = 1;
for (WrappedProcess p in startedProcesses) {
int pExitCode = await p.process.exitCode;
if (pExitCode != 0) {
print("${p.id} failed with exist-code $pExitCode");
}
}
+
+ throw "There were failures!";
}
}
-List<String> observatoryLines = [];
-
Future<WrappedProcess> run(List<String> args, String id) async {
Stopwatch stopwatch = new Stopwatch()..start();
Process process = await Process.start(
Platform.resolvedExecutable, ["--enable-asserts", ...args]);
+ List<String> observatoryLines = [];
process.stderr
.transform(utf8.decoder)
.transform(new LineSplitter())
@@ -162,14 +167,16 @@
// ignore: unawaited_futures
process.exitCode.then((int exitCode) {
stopwatch.stop();
- print("$id finished in ${stopwatch.elapsed.toString()}");
+ print("$id finished in ${stopwatch.elapsed.toString()} "
+ "with exit code $exitCode");
});
- return new WrappedProcess(process, id);
+ return new WrappedProcess(process, id, observatoryLines);
}
class WrappedProcess {
final Process process;
final String id;
+ final List<String> observatoryLines;
- WrappedProcess(this.process, this.id);
+ WrappedProcess(this.process, this.id, this.observatoryLines);
}
diff --git a/pkg/front_end/testcases/expression/main_private.dart b/pkg/front_end/testcases/expression/main_private.dart
new file mode 100644
index 0000000..cce57ee
--- /dev/null
+++ b/pkg/front_end/testcases/expression/main_private.dart
@@ -0,0 +1,263 @@
+// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// @dart = 2.9
+
+class Foo {
+ int what;
+
+ Foo() : what = 0;
+ Foo.publicNamed() : what = 1;
+ Foo._privateNamed() : what = 2;
+
+ int publicMethod() {
+ return 42;
+ }
+
+ int _privateMethod() {
+ return 43;
+ }
+
+ static int publicStaticMethod() {
+ return 44;
+ }
+
+ static int _privateStaticMethod() {
+ return 45;
+ }
+
+ int publicField = 84;
+ int _privateField = 85;
+ static int publicStaticField = 86;
+ static int _privateStaticField = 87;
+
+ int get publicGetter => -1;
+ int get _privateGetter => -2;
+ static int get publicStaticGetter => -3;
+ static int get _privateStaticGetter => -4;
+
+ void set publicSetter(int x) {}
+ void set _privateSetter(int x) {}
+ static void set publicStaticSetter(int x) {}
+ static void set _privateStaticSetter(int x) {}
+}
+
+extension PublicExtension on Foo {
+ int publicPublicExtensionMethod() {
+ return 20;
+ }
+
+ int _publicPrivateExtensionMethod() {
+ return 21;
+ }
+
+ static int publicPublicStaticExtensionMethod() {
+ return 22;
+ }
+
+ static int _publicPrivateStaticExtensionMethod() {
+ return 23;
+ }
+
+ static int publicPublicStaticExtensionField = 24;
+ static int _publicPrivateStaticExtensionField = 25;
+
+ int get publicPublicExtensionGetter {
+ return 26;
+ }
+
+ int get _publicPrivateExtensionGetter {
+ return 27;
+ }
+
+ static int get publicPublicStaticExtensionGetter {
+ return 28;
+ }
+
+ static int get _publicPrivateStaticExtensionGetter {
+ return 29;
+ }
+
+ void set publicPublicExtensionSetter(int x) {}
+
+ void set _publicPrivateExtensionSetter(int x) {}
+
+ static void set publicPublicStaticExtensionSetter(int x) {}
+
+ static void set _publicPrivateStaticExtensionSetter(int x) {}
+}
+
+extension _PrivateExtension on Foo {
+ int privatePublicExtensionMethod() {
+ return 30;
+ }
+
+ int _privatePrivateExtensionMethod() {
+ return 31;
+ }
+
+ static int privatePublicStaticExtensionMethod() {
+ return 32;
+ }
+
+ static int _privatePrivateStaticExtensionMethod() {
+ return 33;
+ }
+
+ static int privatePublicStaticExtensionField = 34;
+ static int _privatePrivateStaticExtensionField = 35;
+
+ int get privatePublicExtensionGetter {
+ return 36;
+ }
+
+ int get _privatePrivateExtensionGetter {
+ return 37;
+ }
+
+ static int get privatePublicStaticExtensionGetter {
+ return 38;
+ }
+
+ static int get _privatePrivateStaticExtensionGetter {
+ return 39;
+ }
+
+ void set privatePublicExtensionSetter(int x) {}
+
+ void set _privatePrivateExtensionSetter(int x) {}
+
+ static void set privatePublicStaticExtensionSetter(int x) {}
+
+ static void set _privatePrivateStaticExtensionSetter(int x) {}
+}
+
+int publicTopLevelMethod() {
+ return 50;
+}
+
+int _privateTopLevelMethod() {
+ return 51;
+}
+
+int publicTopLevelField = 52;
+int _privateTopLevelField = 53;
+
+int get publicTopLevelGetter {
+ return 54;
+}
+
+int get _privateTopLevelGetter {
+ return 55;
+}
+
+void set publicTopLevelSetter(int x) {}
+
+void set _privateTopLevelSetter(int x) {}
+
+main() {
+ // Class constructors.
+ Foo foo = new Foo();
+ assert(foo.what == 0);
+ foo = new Foo.publicNamed();
+ assert(foo.what == 1);
+ foo = new Foo._privateNamed();
+ assert(foo.what == 2);
+
+ // Class methods.
+ assert(foo.publicMethod() == 42);
+ assert(foo._privateMethod() == 43);
+ assert(Foo.publicStaticMethod() == 44);
+ assert(Foo._privateStaticMethod() == 45);
+
+ // Class fields.
+ assert(foo.publicField == 84);
+ foo.publicField = -84;
+ assert(foo.publicField == -84);
+ assert(foo._privateField == 85);
+ foo._privateField = -85;
+ assert(foo._privateField == -85);
+ assert(Foo.publicStaticField == 86);
+ Foo.publicStaticField = -86;
+ assert(Foo.publicStaticField == -86);
+ assert(Foo._privateStaticField == 87);
+ Foo._privateStaticField = -87;
+ assert(Foo._privateStaticField == -87);
+
+ // Class getters.
+ assert(foo.publicGetter == -1);
+ assert(foo._privateGetter == -2);
+ assert(Foo.publicStaticGetter == -3);
+ assert(Foo._privateStaticGetter == -4);
+
+ // Class setters.
+ foo.publicSetter = 42;
+ foo._privateSetter = 42;
+ Foo.publicStaticSetter = 42;
+ Foo._privateStaticSetter = 42;
+
+ // Extension methods.
+ assert(foo.publicPublicExtensionMethod() == 20);
+ assert(foo._publicPrivateExtensionMethod() == 21);
+ assert(PublicExtension.publicPublicStaticExtensionMethod() == 22);
+ assert(PublicExtension._publicPrivateStaticExtensionMethod() == 23);
+ assert(foo.privatePublicExtensionMethod() == 30);
+ assert(foo._privatePrivateExtensionMethod() == 31);
+ assert(_PrivateExtension.privatePublicStaticExtensionMethod() == 32);
+ assert(_PrivateExtension._privatePrivateStaticExtensionMethod() == 33);
+
+ // Extension fields.
+ assert(PublicExtension.publicPublicStaticExtensionField == 24);
+ PublicExtension.publicPublicStaticExtensionField = -24;
+ assert(PublicExtension.publicPublicStaticExtensionField == -24);
+ assert(PublicExtension._publicPrivateStaticExtensionField == 25);
+ PublicExtension._publicPrivateStaticExtensionField = -25;
+ assert(PublicExtension._publicPrivateStaticExtensionField == -25);
+ assert(_PrivateExtension.privatePublicStaticExtensionField == 34);
+ _PrivateExtension.privatePublicStaticExtensionField = -34;
+ assert(_PrivateExtension.privatePublicStaticExtensionField == -34);
+ assert(_PrivateExtension._privatePrivateStaticExtensionField == 35);
+ _PrivateExtension._privatePrivateStaticExtensionField = -35;
+ assert(_PrivateExtension._privatePrivateStaticExtensionField == -35);
+
+ // Extension getters.
+ assert(foo.publicPublicExtensionGetter == 26);
+ assert(foo._publicPrivateExtensionGetter == 27);
+ assert(PublicExtension.publicPublicStaticExtensionGetter == 28);
+ assert(PublicExtension._publicPrivateStaticExtensionGetter == 29);
+ assert(foo.privatePublicExtensionGetter == 36);
+ assert(foo._privatePrivateExtensionGetter == 37);
+ assert(_PrivateExtension.privatePublicStaticExtensionGetter == 38);
+ assert(_PrivateExtension._privatePrivateStaticExtensionGetter == 39);
+
+ // Extension setters.
+ foo.publicPublicExtensionSetter = 42;
+ foo._publicPrivateExtensionSetter = 42;
+ PublicExtension.publicPublicStaticExtensionSetter = 42;
+ PublicExtension._publicPrivateStaticExtensionSetter = 42;
+ foo.privatePublicExtensionSetter = 42;
+ foo._privatePrivateExtensionSetter = 42;
+ _PrivateExtension.privatePublicStaticExtensionSetter = 42;
+ _PrivateExtension._privatePrivateStaticExtensionSetter = 42;
+
+ // Top-level methods.
+ assert(publicTopLevelMethod() == 50);
+ assert(_privateTopLevelMethod() == 51);
+
+ // Top-level fields.
+ assert(publicTopLevelField == 52);
+ publicTopLevelField = -52;
+ assert(publicTopLevelField == -52);
+ assert(_privateTopLevelField == 53);
+ _privateTopLevelField = -53;
+ assert(_privateTopLevelField == -53);
+
+ // Top-level getters.
+ assert(publicTopLevelGetter == 54);
+ assert(_privateTopLevelGetter == 55);
+
+ // Top-level setters.
+ publicTopLevelSetter = 42;
+ _privateTopLevelSetter = 42;
+}
diff --git a/pkg/front_end/testcases/expression/private_stuff.expression.yaml b/pkg/front_end/testcases/expression/private_stuff.expression.yaml
new file mode 100644
index 0000000..2e9f8c6
--- /dev/null
+++ b/pkg/front_end/testcases/expression/private_stuff.expression.yaml
@@ -0,0 +1,113 @@
+# Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
+# for details. All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+entry_point: "main_private.dart"
+definitions: []
+position: "main_private.dart"
+expression: |
+ () {
+ // Class constructors.
+ Foo foo = new Foo();
+ assert(foo.what == 0);
+ foo = new Foo.publicNamed();
+ assert(foo.what == 1);
+ foo = new Foo._privateNamed();
+ assert(foo.what == 2);
+
+ // Class methods.
+ assert(foo.publicMethod() == 42);
+ assert(foo._privateMethod() == 43);
+ assert(Foo.publicStaticMethod() == 44);
+ assert(Foo._privateStaticMethod() == 45);
+
+ // Class fields.
+ assert(foo.publicField == 84);
+ foo.publicField = -84;
+ assert(foo.publicField == -84);
+ assert(foo._privateField == 85);
+ foo._privateField = -85;
+ assert(foo._privateField == -85);
+ assert(Foo.publicStaticField == 86);
+ Foo.publicStaticField = -86;
+ assert(Foo.publicStaticField == -86);
+ assert(Foo._privateStaticField == 87);
+ Foo._privateStaticField = -87;
+ assert(Foo._privateStaticField == -87);
+
+ // Class getters.
+ assert(foo.publicGetter == -1);
+ assert(foo._privateGetter == -2);
+ assert(Foo.publicStaticGetter == -3);
+ assert(Foo._privateStaticGetter == -4);
+
+ // Class setters.
+ foo.publicSetter = 42;
+ foo._privateSetter = 42;
+ Foo.publicStaticSetter = 42;
+ Foo._privateStaticSetter = 42;
+
+ // Extension methods.
+ assert(foo.publicPublicExtensionMethod() == 20);
+ assert(foo._publicPrivateExtensionMethod() == 21);
+ assert(PublicExtension.publicPublicStaticExtensionMethod() == 22);
+ assert(PublicExtension._publicPrivateStaticExtensionMethod() == 23);
+ assert(foo.privatePublicExtensionMethod() == 30);
+ assert(foo._privatePrivateExtensionMethod() == 31);
+ assert(_PrivateExtension.privatePublicStaticExtensionMethod() == 32);
+ assert(_PrivateExtension._privatePrivateStaticExtensionMethod() == 33);
+
+ // Extension fields.
+ assert(PublicExtension.publicPublicStaticExtensionField == 24);
+ PublicExtension.publicPublicStaticExtensionField = -24;
+ assert(PublicExtension.publicPublicStaticExtensionField == -24);
+ assert(PublicExtension._publicPrivateStaticExtensionField == 25);
+ PublicExtension._publicPrivateStaticExtensionField = -25;
+ assert(PublicExtension._publicPrivateStaticExtensionField == -25);
+ assert(_PrivateExtension.privatePublicStaticExtensionField == 34);
+ _PrivateExtension.privatePublicStaticExtensionField = -34;
+ assert(_PrivateExtension.privatePublicStaticExtensionField == -34);
+ assert(_PrivateExtension._privatePrivateStaticExtensionField == 35);
+ _PrivateExtension._privatePrivateStaticExtensionField = -35;
+ assert(_PrivateExtension._privatePrivateStaticExtensionField == -35);
+
+ // Extension getters.
+ assert(foo.publicPublicExtensionGetter == 26);
+ assert(foo._publicPrivateExtensionGetter == 27);
+ assert(PublicExtension.publicPublicStaticExtensionGetter == 28);
+ assert(PublicExtension._publicPrivateStaticExtensionGetter == 29);
+ assert(foo.privatePublicExtensionGetter == 36);
+ assert(foo._privatePrivateExtensionGetter == 37);
+ assert(_PrivateExtension.privatePublicStaticExtensionGetter == 38);
+ assert(_PrivateExtension._privatePrivateStaticExtensionGetter == 39);
+
+ // Extension setters.
+ foo.publicPublicExtensionSetter = 42;
+ foo._publicPrivateExtensionSetter = 42;
+ PublicExtension.publicPublicStaticExtensionSetter = 42;
+ PublicExtension._publicPrivateStaticExtensionSetter = 42;
+ foo.privatePublicExtensionSetter = 42;
+ foo._privatePrivateExtensionSetter = 42;
+ _PrivateExtension.privatePublicStaticExtensionSetter = 42;
+ _PrivateExtension._privatePrivateStaticExtensionSetter = 42;
+
+ // Top-level methods.
+ assert(publicTopLevelMethod() == 50);
+ assert(_privateTopLevelMethod() == 51);
+
+ // Top-level fields.
+ assert(publicTopLevelField == 52);
+ publicTopLevelField = -52;
+ assert(publicTopLevelField == -52);
+ assert(_privateTopLevelField == 53);
+ _privateTopLevelField = -53;
+ assert(_privateTopLevelField == -53);
+
+ // Top-level getters.
+ assert(publicTopLevelGetter == 54);
+ assert(_privateTopLevelGetter == 55);
+
+ // Top-level setters.
+ publicTopLevelSetter = 42;
+ _privateTopLevelSetter = 42;
+ }
diff --git a/pkg/front_end/testcases/expression/private_stuff.expression.yaml.expect b/pkg/front_end/testcases/expression/private_stuff.expression.yaml.expect
new file mode 100644
index 0000000..b4fd256
--- /dev/null
+++ b/pkg/front_end/testcases/expression/private_stuff.expression.yaml.expect
@@ -0,0 +1,83 @@
+Errors: {
+}
+method /* from org-dartlang-debug:synthetic_debug_expression */ debugExpr() → dynamic
+ return () → Null {
+ #lib1::Foo* foo = new #lib1::Foo::•();
+ assert(foo.{#lib1::Foo::what}{dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 0);
+ foo = new #lib1::Foo::publicNamed();
+ assert(foo.{#lib1::Foo::what}{dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 1);
+ foo = new #lib1::Foo::_privateNamed();
+ assert(foo.{#lib1::Foo::what}{dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 2);
+ assert(foo.{#lib1::Foo::publicMethod}(){() →* dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 42);
+ assert(foo.{#lib1::Foo::_privateMethod}(){() →* dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 43);
+ assert(#lib1::Foo::publicStaticMethod() =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 44);
+ assert(#lib1::Foo::_privateStaticMethod() =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 45);
+ assert(foo.{#lib1::Foo::publicField}{dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 84);
+ foo.{#lib1::Foo::publicField} = 84.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(foo.{#lib1::Foo::publicField}{dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 84.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(foo.{#lib1::Foo::_privateField}{dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 85);
+ foo.{#lib1::Foo::_privateField} = 85.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(foo.{#lib1::Foo::_privateField}{dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 85.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::Foo::publicStaticField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 86);
+ #lib1::Foo::publicStaticField = 86.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(#lib1::Foo::publicStaticField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 86.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::Foo::_privateStaticField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 87);
+ #lib1::Foo::_privateStaticField = 87.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(#lib1::Foo::_privateStaticField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 87.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(foo.{#lib1::Foo::publicGetter}{dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 1.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(foo.{#lib1::Foo::_privateGetter}{dart.core::int*} =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 2.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::Foo::publicStaticGetter =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 3.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::Foo::_privateStaticGetter =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 4.{dart.core::int::unary-}(){() →* dart.core::int*});
+ foo.{#lib1::Foo::publicSetter} = 42;
+ foo.{#lib1::Foo::_privateSetter} = 42;
+ #lib1::Foo::publicStaticSetter = 42;
+ #lib1::Foo::_privateStaticSetter = 42;
+ assert(#lib1::PublicExtension|publicPublicExtensionMethod(foo) =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 20);
+ assert(#lib1::PublicExtension|_publicPrivateExtensionMethod(foo) =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 21);
+ assert(#lib1::PublicExtension|publicPublicStaticExtensionMethod() =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 22);
+ assert(#lib1::PublicExtension|_publicPrivateStaticExtensionMethod() =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 23);
+ assert(#lib1::_PrivateExtension|privatePublicExtensionMethod(foo) =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 30);
+ assert(#lib1::_PrivateExtension|_privatePrivateExtensionMethod(foo) =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 31);
+ assert(#lib1::_PrivateExtension|privatePublicStaticExtensionMethod() =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 32);
+ assert(#lib1::_PrivateExtension|_privatePrivateStaticExtensionMethod() =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 33);
+ assert(#lib1::PublicExtension|publicPublicStaticExtensionField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 24);
+ #lib1::PublicExtension|publicPublicStaticExtensionField = 24.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(#lib1::PublicExtension|publicPublicStaticExtensionField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 24.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::PublicExtension|_publicPrivateStaticExtensionField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 25);
+ #lib1::PublicExtension|_publicPrivateStaticExtensionField = 25.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(#lib1::PublicExtension|_publicPrivateStaticExtensionField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 25.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::_PrivateExtension|privatePublicStaticExtensionField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 34);
+ #lib1::_PrivateExtension|privatePublicStaticExtensionField = 34.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(#lib1::_PrivateExtension|privatePublicStaticExtensionField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 34.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::_PrivateExtension|_privatePrivateStaticExtensionField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 35);
+ #lib1::_PrivateExtension|_privatePrivateStaticExtensionField = 35.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(#lib1::_PrivateExtension|_privatePrivateStaticExtensionField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 35.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::PublicExtension|get#publicPublicExtensionGetter(foo) =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 26);
+ assert(#lib1::PublicExtension|get#_publicPrivateExtensionGetter(foo) =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 27);
+ assert(#lib1::PublicExtension|publicPublicStaticExtensionGetter =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 28);
+ assert(#lib1::PublicExtension|_publicPrivateStaticExtensionGetter =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 29);
+ assert(#lib1::_PrivateExtension|get#privatePublicExtensionGetter(foo) =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 36);
+ assert(#lib1::_PrivateExtension|get#_privatePrivateExtensionGetter(foo) =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 37);
+ assert(#lib1::_PrivateExtension|privatePublicStaticExtensionGetter =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 38);
+ assert(#lib1::_PrivateExtension|_privatePrivateStaticExtensionGetter =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 39);
+ #lib1::PublicExtension|set#publicPublicExtensionSetter(foo, 42);
+ #lib1::PublicExtension|set#_publicPrivateExtensionSetter(foo, 42);
+ #lib1::PublicExtension|publicPublicStaticExtensionSetter = 42;
+ #lib1::PublicExtension|_publicPrivateStaticExtensionSetter = 42;
+ #lib1::_PrivateExtension|set#privatePublicExtensionSetter(foo, 42);
+ #lib1::_PrivateExtension|set#_privatePrivateExtensionSetter(foo, 42);
+ #lib1::_PrivateExtension|privatePublicStaticExtensionSetter = 42;
+ #lib1::_PrivateExtension|_privatePrivateStaticExtensionSetter = 42;
+ assert(#lib1::publicTopLevelMethod() =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 50);
+ assert(#lib1::_privateTopLevelMethod() =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 51);
+ assert(#lib1::publicTopLevelField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 52);
+ #lib1::publicTopLevelField = 52.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(#lib1::publicTopLevelField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 52.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::_privateTopLevelField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 53);
+ #lib1::_privateTopLevelField = 53.{dart.core::int::unary-}(){() →* dart.core::int*};
+ assert(#lib1::_privateTopLevelField =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 53.{dart.core::int::unary-}(){() →* dart.core::int*});
+ assert(#lib1::publicTopLevelGetter =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 54);
+ assert(#lib1::_privateTopLevelGetter =={dart.core::num::==}{(dart.core::Object*) →* dart.core::bool*} 55);
+ #lib1::publicTopLevelSetter = 42;
+ #lib1::_privateTopLevelSetter = 42;
+ };
diff --git a/pkg/kernel/lib/transformations/async.dart b/pkg/kernel/lib/transformations/async.dart
index 86bff985..b18d98f 100644
--- a/pkg/kernel/lib/transformations/async.dart
+++ b/pkg/kernel/lib/transformations/async.dart
@@ -215,8 +215,6 @@
@override
TreeNode visitVariableSet(VariableSet expr) => unary(expr);
@override
- TreeNode visitPropertyGet(PropertyGet expr) => unary(expr);
- @override
TreeNode visitInstanceGet(InstanceGet expr) => unary(expr);
@override
TreeNode visitDynamicGet(DynamicGet expr) => unary(expr);
@@ -238,14 +236,6 @@
TreeNode visitThrow(Throw expr) => unary(expr);
@override
- TreeNode visitPropertySet(PropertySet expr) {
- return transformTreeNode(expr, () {
- expr.value = transform(expr.value)..parent = expr;
- expr.receiver = transform(expr.receiver)..parent = expr;
- });
- }
-
- @override
TreeNode visitInstanceSet(InstanceSet expr) {
return transformTreeNode(expr, () {
expr.value = transform(expr.value)..parent = expr;
@@ -275,14 +265,6 @@
}
@override
- TreeNode visitMethodInvocation(MethodInvocation expr) {
- return transformTreeNode(expr, () {
- visitArguments(expr.arguments);
- expr.receiver = transform(expr.receiver)..parent = expr;
- });
- }
-
- @override
TreeNode visitInstanceInvocation(InstanceInvocation expr) {
return transformTreeNode(expr, () {
visitArguments(expr.arguments);
diff --git a/pkg/vm/lib/transformations/call_site_annotator.dart b/pkg/vm/lib/transformations/call_site_annotator.dart
index 9ca67ac..76803a9 100644
--- a/pkg/vm/lib/transformations/call_site_annotator.dart
+++ b/pkg/vm/lib/transformations/call_site_annotator.dart
@@ -60,13 +60,16 @@
}
@override
- visitPropertySet(PropertySet node) {
- super.visitPropertySet(node);
+ visitPropertyGet(PropertyGet node) =>
+ throw 'Unexpected node ${node.runtimeType}: $node at ${node.location}';
- if (hasGenericCovariantParameters(node.interfaceTarget)) {
- annotateWithReceiver(node, node.receiver);
- }
- }
+ @override
+ visitPropertySet(PropertySet node) =>
+ throw 'Unexpected node ${node.runtimeType}: $node at ${node.location}';
+
+ @override
+ visitMethodInvocation(MethodInvocation node) =>
+ throw 'Unexpected node ${node.runtimeType}: $node at ${node.location}';
@override
visitInstanceSet(InstanceSet node) {
@@ -78,18 +81,6 @@
}
@override
- visitMethodInvocation(MethodInvocation node) {
- super.visitMethodInvocation(node);
-
- // TODO(34162): We don't need to save the type here for calls, just whether
- // or not it's a statically-checked call.
- if (node.name.text == 'call' ||
- hasGenericCovariantParameters(node.interfaceTarget)) {
- annotateWithReceiver(node, node.receiver);
- }
- }
-
- @override
visitInstanceInvocation(InstanceInvocation node) {
super.visitInstanceInvocation(node);
diff --git a/pkg/vm/lib/transformations/devirtualization.dart b/pkg/vm/lib/transformations/devirtualization.dart
index 16189aa..38de80f 100644
--- a/pkg/vm/lib/transformations/devirtualization.dart
+++ b/pkg/vm/lib/transformations/devirtualization.dart
@@ -120,12 +120,6 @@
}
@override
- visitMethodInvocation(MethodInvocation node) {
- super.visitMethodInvocation(node);
- _handleMethodInvocation(node, node.interfaceTarget, node.arguments);
- }
-
- @override
visitInstanceInvocation(InstanceInvocation node) {
super.visitInstanceInvocation(node);
_handleMethodInvocation(node, node.interfaceTarget, node.arguments);
@@ -163,12 +157,6 @@
}
@override
- visitPropertyGet(PropertyGet node) {
- super.visitPropertyGet(node);
- _handlePropertyGet(node, node.interfaceTarget);
- }
-
- @override
visitInstanceGet(InstanceGet node) {
super.visitInstanceGet(node);
_handlePropertyGet(node, node.interfaceTarget);
@@ -189,12 +177,6 @@
}
@override
- visitPropertySet(PropertySet node) {
- super.visitPropertySet(node);
- _handlePropertySet(node, node.interfaceTarget);
- }
-
- @override
visitInstanceSet(InstanceSet node) {
super.visitInstanceSet(node);
_handlePropertySet(node, node.interfaceTarget);
diff --git a/pkg/vm/lib/transformations/ffi_use_sites.dart b/pkg/vm/lib/transformations/ffi_use_sites.dart
index b213615..8cce3b0 100644
--- a/pkg/vm/lib/transformations/ffi_use_sites.dart
+++ b/pkg/vm/lib/transformations/ffi_use_sites.dart
@@ -675,44 +675,6 @@
}
@override
- visitMethodInvocation(MethodInvocation node) {
- super.visitMethodInvocation(node);
-
- final Member target = node.interfaceTarget;
- try {
- if (target == elementAtMethod) {
- final DartType pointerType =
- node.receiver.getStaticType(_staticTypeContext);
- final DartType nativeType = _pointerTypeGetTypeArg(pointerType);
-
- _ensureNativeTypeValid(nativeType, node, allowCompounds: true);
-
- Expression inlineSizeOf = _inlineSizeOf(nativeType);
- if (inlineSizeOf != null) {
- // Generates `receiver.offsetBy(inlineSizeOfExpression)`.
- return InstanceInvocation(
- InstanceAccessKind.Instance,
- node.receiver,
- offsetByMethod.name,
- Arguments(
- [multiply(node.arguments.positional.single, inlineSizeOf)]),
- interfaceTarget: offsetByMethod,
- functionType:
- Substitution.fromInterfaceType(pointerType as InterfaceType)
- .substituteType(offsetByMethod.getterType)
- as FunctionType);
- }
- }
- } on _FfiStaticTypeError {
- // It's OK to swallow the exception because the diagnostics issued will
- // cause compilation to fail. By continuing, we can report more
- // diagnostics before compilation ends.
- }
-
- return node;
- }
-
- @override
visitInstanceInvocation(InstanceInvocation node) {
super.visitInstanceInvocation(node);
diff --git a/pkg/vm/lib/transformations/mixin_deduplication.dart b/pkg/vm/lib/transformations/mixin_deduplication.dart
index 8cfc503..ba65266 100644
--- a/pkg/vm/lib/transformations/mixin_deduplication.dart
+++ b/pkg/vm/lib/transformations/mixin_deduplication.dart
@@ -166,12 +166,6 @@
}
@override
- visitPropertyGet(PropertyGet node) {
- node.interfaceTarget = _resolveNewInterfaceTarget(node.interfaceTarget);
- super.visitPropertyGet(node);
- }
-
- @override
visitInstanceGet(InstanceGet node) {
node.interfaceTarget = _resolveNewInterfaceTarget(node.interfaceTarget)!;
super.visitInstanceGet(node);
@@ -185,24 +179,12 @@
}
@override
- visitPropertySet(PropertySet node) {
- node.interfaceTarget = _resolveNewInterfaceTarget(node.interfaceTarget);
- super.visitPropertySet(node);
- }
-
- @override
visitInstanceSet(InstanceSet node) {
node.interfaceTarget = _resolveNewInterfaceTarget(node.interfaceTarget)!;
super.visitInstanceSet(node);
}
@override
- visitMethodInvocation(MethodInvocation node) {
- node.interfaceTarget = _resolveNewInterfaceTarget(node.interfaceTarget);
- super.visitMethodInvocation(node);
- }
-
- @override
visitInstanceInvocation(InstanceInvocation node) {
node.interfaceTarget =
_resolveNewInterfaceTarget(node.interfaceTarget) as Procedure;
diff --git a/pkg/vm/lib/transformations/no_dynamic_invocations_annotator.dart b/pkg/vm/lib/transformations/no_dynamic_invocations_annotator.dart
index 0f9284b..5e9600e 100644
--- a/pkg/vm/lib/transformations/no_dynamic_invocations_annotator.dart
+++ b/pkg/vm/lib/transformations/no_dynamic_invocations_annotator.dart
@@ -176,20 +176,6 @@
}
@override
- visitMethodInvocation(MethodInvocation node) {
- super.visitMethodInvocation(node);
-
- Selector selector;
- if (node.interfaceTarget == null) {
- dynamicSelectors.add(new Selector.doInvoke(node.name));
- } else {
- if (node.receiver is! ThisExpression) {
- nonThisSelectors.add(selector ??= new Selector.doInvoke(node.name));
- }
- }
- }
-
- @override
visitInstanceInvocation(InstanceInvocation node) {
super.visitInstanceInvocation(node);
if (node.receiver is! ThisExpression) {
@@ -212,25 +198,6 @@
}
@override
- visitPropertyGet(PropertyGet node) {
- super.visitPropertyGet(node);
-
- Selector selector;
- if (node.interfaceTarget == null) {
- dynamicSelectors.add(selector = new Selector.doGet(node.name));
- } else {
- if (node.receiver is! ThisExpression) {
- nonThisSelectors.add(selector ??= new Selector.doGet(node.name));
- }
-
- final target = node.interfaceTarget;
- if (target is Procedure && target.kind == ProcedureKind.Method) {
- tearOffSelectors.add(new Selector.doInvoke(node.name));
- }
- }
- }
-
- @override
visitInstanceGet(InstanceGet node) {
super.visitInstanceGet(node);
if (node.receiver is! ThisExpression) {
@@ -254,20 +221,6 @@
}
@override
- visitPropertySet(PropertySet node) {
- super.visitPropertySet(node);
-
- Selector selector;
- if (node.interfaceTarget == null) {
- dynamicSelectors.add(selector = new Selector.doSet(node.name));
- } else {
- if (node.receiver is! ThisExpression) {
- nonThisSelectors.add(selector ??= new Selector.doSet(node.name));
- }
- }
- }
-
- @override
visitInstanceSet(InstanceSet node) {
super.visitInstanceSet(node);
if (node.receiver is! ThisExpression) {
diff --git a/pkg/vm/lib/transformations/type_flow/protobuf_handler.dart b/pkg/vm/lib/transformations/type_flow/protobuf_handler.dart
index 65e79ac..5bd8db7 100644
--- a/pkg/vm/lib/transformations/type_flow/protobuf_handler.dart
+++ b/pkg/vm/lib/transformations/type_flow/protobuf_handler.dart
@@ -155,17 +155,6 @@
Statistics.protobufMetadataFieldsPruned += cls.numberOfFieldsPruned;
}
- bool _isUnusedMetadataMethodInvocation(
- _MessageClass cls, MethodInvocation node) {
- if (node.interfaceTarget != null &&
- node.interfaceTarget.enclosingClass == _builderInfoClass &&
- fieldAddingMethods.contains(node.name.text)) {
- final tagNumber = (node.arguments.positional[0] as IntLiteral).value;
- return !cls._usedTags.contains(tagNumber);
- }
- return false;
- }
-
bool _isUnusedMetadata(_MessageClass cls, InstanceInvocation node) {
if (node.interfaceTarget.enclosingClass == _builderInfoClass &&
fieldAddingMethods.contains(node.name.text)) {
@@ -191,38 +180,6 @@
_MetadataTransformer(this.ph, this.cls);
@override
- TreeNode visitMethodInvocation(MethodInvocation node) {
- if (!ph._isUnusedMetadataMethodInvocation(cls, node)) {
- super.visitMethodInvocation(node);
- return node;
- }
- // Replace the field metadata method with a dummy call to
- // `BuilderInfo.add`. This is to preserve the index calculations when
- // removing a field.
- // Change the tag-number to 0. Otherwise the decoder will get confused.
- ++numberOfFieldsPruned;
- return InstanceInvocation(
- InstanceAccessKind.Instance,
- node.receiver,
- ph._builderInfoAddMethod.name,
- Arguments(
- <Expression>[
- IntLiteral(0), // tagNumber
- NullLiteral(), // name
- NullLiteral(), // fieldType
- NullLiteral(), // defaultOrMaker
- NullLiteral(), // subBuilder
- NullLiteral(), // valueOf
- NullLiteral(), // enumValues
- ],
- types: const <DartType>[NullType()],
- ),
- interfaceTarget: ph._builderInfoAddMethod,
- functionType: ph._typeOfBuilderInfoAddOfNull)
- ..fileOffset = node.fileOffset;
- }
-
- @override
TreeNode visitInstanceInvocation(InstanceInvocation node) {
if (!ph._isUnusedMetadata(cls, node)) {
super.visitInstanceInvocation(node);
diff --git a/pkg/vm/lib/transformations/type_flow/signature_shaking.dart b/pkg/vm/lib/transformations/type_flow/signature_shaking.dart
index 709604c..2d16a7b 100644
--- a/pkg/vm/lib/transformations/type_flow/signature_shaking.dart
+++ b/pkg/vm/lib/transformations/type_flow/signature_shaking.dart
@@ -297,12 +297,6 @@
}
@override
- void visitMethodInvocation(MethodInvocation node) {
- collectCall(node.interfaceTarget, node.arguments);
- super.visitMethodInvocation(node);
- }
-
- @override
void visitInstanceInvocation(InstanceInvocation node) {
collectCall(node.interfaceTarget, node.arguments);
super.visitInstanceInvocation(node);
@@ -640,12 +634,6 @@
}
@override
- void visitMethodInvocation(MethodInvocation node) {
- super.visitMethodInvocation(node);
- transformCall(node.interfaceTarget, node, node.receiver, node.arguments);
- }
-
- @override
void visitInstanceInvocation(InstanceInvocation node) {
super.visitInstanceInvocation(node);
transformCall(node.interfaceTarget, node, node.receiver, node.arguments);
diff --git a/pkg/vm/lib/transformations/type_flow/summary_collector.dart b/pkg/vm/lib/transformations/type_flow/summary_collector.dart
index 3dc3fd6..0986d7e 100644
--- a/pkg/vm/lib/transformations/type_flow/summary_collector.dart
+++ b/pkg/vm/lib/transformations/type_flow/summary_collector.dart
@@ -1134,9 +1134,7 @@
}
TypeExpr _makeNarrowNotNull(TreeNode node, TypeExpr arg) {
- assert(node is NullCheck ||
- node is MethodInvocation && isComparisonWithNull(node) ||
- node is EqualsNull);
+ assert(node is NullCheck || node is EqualsNull);
if (arg is NarrowNotNull) {
nullTests[node] = arg;
return arg;
@@ -1355,44 +1353,6 @@
}
_variableValues = null;
return;
- } else if (node is MethodInvocation &&
- node.receiver is VariableGet &&
- node.name.text == '==') {
- assert(node.arguments.positional.length == 1 &&
- node.arguments.types.isEmpty &&
- node.arguments.named.isEmpty);
- final lhs = node.receiver as VariableGet;
- final rhs = node.arguments.positional.single;
- if (isNullLiteral(rhs)) {
- // 'x == null', where x is a variable.
- final expr = _visit(lhs);
- _makeCall(node, DirectSelector(_environment.coreTypes.objectEquals),
- Args<TypeExpr>([expr, _nullType]));
- final narrowedNotNull = _makeNarrowNotNull(node, expr);
- final int varIndex = _variablesInfo.varIndex[lhs.variable];
- if (_variableCells[varIndex] == null) {
- trueState[varIndex] = _nullType;
- falseState[varIndex] = narrowedNotNull;
- }
- _variableValues = null;
- return;
- } else if ((rhs is IntLiteral &&
- _isSubtype(lhs.variable.type,
- _environment.coreTypes.intLegacyRawType)) ||
- (rhs is StringLiteral &&
- _isSubtype(lhs.variable.type,
- _environment.coreTypes.stringLegacyRawType)) ||
- (rhs is ConstantExpression &&
- !_hasOverriddenEquals(lhs.variable.type))) {
- // 'x == c', where x is a variable and c is a constant.
- _addUse(_visit(node));
- final int varIndex = _variablesInfo.varIndex[lhs.variable];
- if (_variableCells[varIndex] == null) {
- trueState[varIndex] = _visit(rhs);
- }
- _variableValues = null;
- return;
- }
} else if (node is EqualsCall && node.left is VariableGet) {
final lhs = node.left as VariableGet;
final rhs = node.right;
@@ -1621,55 +1581,6 @@
}
@override
- TypeExpr visitMethodInvocation(MethodInvocation node) {
- if (isComparisonWithNull(node)) {
- final arg = _visit(getArgumentOfComparisonWithNull(node));
- _makeNarrowNotNull(node, arg);
- _makeCall(node, DirectSelector(_environment.coreTypes.objectEquals),
- Args<TypeExpr>([arg, _nullType]));
- return _boolType;
- }
- final receiverNode = node.receiver;
- final receiver = _visit(receiverNode);
- final args = _visitArguments(receiver, node.arguments);
- final target = node.interfaceTarget;
- if (receiverNode is ConstantExpression && node.name.text == '[]') {
- Constant constant = receiverNode.constant;
- if (constant is ListConstant) {
- return _handleIndexingIntoListConstant(constant);
- }
- }
- TypeExpr result;
- if (target == null) {
- if (node.name.text == '==') {
- _makeCall(node, new DynamicSelector(CallKind.Method, node.name), args);
- return new Type.nullable(_boolType);
- }
- if (node.name.text == 'call') {
- final recvType = _staticDartType(node.receiver);
- if ((recvType is FunctionType) ||
- (recvType == _environment.functionLegacyRawType)) {
- // Call to a Function.
- return _staticType(node);
- }
- }
- result = _makeCall(
- node, new DynamicSelector(CallKind.Method, node.name), args);
- } else {
- assert(target is Procedure && !target.isGetter);
- // TODO(alexmarkov): overloaded arithmetic operators
- result = _makeCall(
- node,
- (node.receiver is ThisExpression)
- ? new VirtualSelector(target)
- : new InterfaceSelector(target),
- args);
- }
- _updateReceiverAfterCall(receiverNode, receiver, node.name);
- return result;
- }
-
- @override
TypeExpr visitInstanceInvocation(InstanceInvocation node) {
final receiverNode = node.receiver;
final receiver = _visit(receiverNode);
@@ -1781,12 +1692,6 @@
}
@override
- TypeExpr visitPropertyGet(PropertyGet node) {
- return _handlePropertyGet(
- node, node.receiver, node.interfaceTarget, node.name);
- }
-
- @override
TypeExpr visitInstanceGet(InstanceGet node) {
return _handlePropertyGet(
node, node.receiver, node.interfaceTarget, node.name);
@@ -1809,29 +1714,6 @@
}
@override
- TypeExpr visitPropertySet(PropertySet node) {
- var receiver = _visit(node.receiver);
- var value = _visit(node.value);
- var args = new Args<TypeExpr>([receiver, value]);
- final target = node.interfaceTarget;
- if (target == null) {
- _makeCall(
- node, new DynamicSelector(CallKind.PropertySet, node.name), args);
- } else {
- assert((target is Field) || ((target is Procedure) && target.isSetter));
- _makeCall(
- node,
- (node.receiver is ThisExpression)
- ? new VirtualSelector(target, callKind: CallKind.PropertySet)
- : new InterfaceSelector(target, callKind: CallKind.PropertySet),
- args);
- }
- _updateReceiverAfterCall(node.receiver, receiver, node.name,
- isSetter: true);
- return value;
- }
-
- @override
TypeExpr visitInstanceSet(InstanceSet node) {
var receiver = _visit(node.receiver);
var value = _visit(node.value);
diff --git a/pkg/vm/lib/transformations/type_flow/transformer.dart b/pkg/vm/lib/transformations/type_flow/transformer.dart
index cadd093..a29766c 100644
--- a/pkg/vm/lib/transformations/type_flow/transformer.dart
+++ b/pkg/vm/lib/transformations/type_flow/transformer.dart
@@ -306,11 +306,9 @@
}
final bool markSkipCheck = !callSite.useCheckedEntry &&
- (node is MethodInvocation ||
- node is InstanceInvocation ||
+ (node is InstanceInvocation ||
node is DynamicInvocation ||
node is EqualsCall ||
- node is PropertySet ||
node is InstanceSet ||
node is DynamicSet);
@@ -355,16 +353,12 @@
// Tell the table selector assigner about the callsite.
final Selector selector = callSite.selector;
if (selector is InterfaceSelector && !_callSiteUsesDirectCall(node)) {
- if (node is PropertyGet ||
- node is InstanceGet ||
- node is InstanceTearOff) {
+ if (node is InstanceGet || node is InstanceTearOff) {
_tableSelectorAssigner.registerGetterCall(
selector.member, callSite.isNullableReceiver);
} else {
- assert(node is MethodInvocation ||
- node is InstanceInvocation ||
+ assert(node is InstanceInvocation ||
node is EqualsCall ||
- node is PropertySet ||
node is InstanceSet);
_tableSelectorAssigner.registerMethodOrSetterCall(
selector.member, callSite.isNullableReceiver);
@@ -485,12 +479,6 @@
}
@override
- visitMethodInvocation(MethodInvocation node) {
- _annotateCallSite(node, node.interfaceTarget);
- super.visitMethodInvocation(node);
- }
-
- @override
visitInstanceInvocation(InstanceInvocation node) {
_annotateCallSite(node, node.interfaceTarget);
super.visitInstanceInvocation(node);
@@ -521,12 +509,6 @@
}
@override
- visitPropertyGet(PropertyGet node) {
- _annotateCallSite(node, node.interfaceTarget);
- super.visitPropertyGet(node);
- }
-
- @override
visitInstanceGet(InstanceGet node) {
_annotateCallSite(node, node.interfaceTarget);
super.visitInstanceGet(node);
@@ -551,12 +533,6 @@
}
@override
- visitPropertySet(PropertySet node) {
- _annotateCallSite(node, node.interfaceTarget);
- super.visitPropertySet(node);
- }
-
- @override
visitInstanceSet(InstanceSet node) {
_annotateCallSite(node, node.interfaceTarget);
super.visitInstanceSet(node);
@@ -1105,30 +1081,6 @@
}
@override
- TreeNode visitMethodInvocation(
- MethodInvocation node, TreeNode removalSentinel) {
- node.transformOrRemoveChildren(this);
- if (_isUnreachable(node)) {
- return _makeUnreachableCall(
- _flattenArguments(node.arguments, receiver: node.receiver));
- }
- if (isComparisonWithNull(node)) {
- final nullTest = _getNullTest(node);
- if (nullTest.isAlwaysNull || nullTest.isAlwaysNotNull) {
- return _evaluateArguments(
- _flattenArguments(node.arguments, receiver: node.receiver),
- BoolLiteral(nullTest.isAlwaysNull));
- }
- }
- node.interfaceTarget =
- fieldMorpher.adjustInstanceCallTarget(node.interfaceTarget);
- if (node.interfaceTarget != null) {
- shaker.addUsedMember(node.interfaceTarget);
- }
- return node;
- }
-
- @override
TreeNode visitInstanceInvocation(
InstanceInvocation node, TreeNode removalSentinel) {
node.transformOrRemoveChildren(this);
@@ -1201,21 +1153,6 @@
}
@override
- TreeNode visitPropertyGet(PropertyGet node, TreeNode removalSentinel) {
- node.transformOrRemoveChildren(this);
- if (_isUnreachable(node)) {
- return _makeUnreachableCall([node.receiver]);
- } else {
- node.interfaceTarget =
- fieldMorpher.adjustInstanceCallTarget(node.interfaceTarget);
- if (node.interfaceTarget != null) {
- shaker.addUsedMember(node.interfaceTarget);
- }
- return node;
- }
- }
-
- @override
TreeNode visitInstanceGet(InstanceGet node, TreeNode removalSentinel) {
node.transformOrRemoveChildren(this);
if (_isUnreachable(node)) {
@@ -1264,21 +1201,6 @@
}
@override
- TreeNode visitPropertySet(PropertySet node, TreeNode removalSentinel) {
- node.transformOrRemoveChildren(this);
- if (_isUnreachable(node)) {
- return _makeUnreachableCall([node.receiver, node.value]);
- } else {
- node.interfaceTarget = fieldMorpher
- .adjustInstanceCallTarget(node.interfaceTarget, isSetter: true);
- if (node.interfaceTarget != null) {
- shaker.addUsedMember(node.interfaceTarget);
- }
- return node;
- }
- }
-
- @override
TreeNode visitInstanceSet(InstanceSet node, TreeNode removalSentinel) {
node.transformOrRemoveChildren(this);
if (_isUnreachable(node)) {
diff --git a/pkg/vm/lib/transformations/type_flow/utils.dart b/pkg/vm/lib/transformations/type_flow/utils.dart
index 2762bd8..815736c 100644
--- a/pkg/vm/lib/transformations/type_flow/utils.dart
+++ b/pkg/vm/lib/transformations/type_flow/utils.dart
@@ -377,26 +377,6 @@
String get suffix => nullabilitySuffix[this];
}
-bool isNullLiteral(Expression expr) =>
- expr is NullLiteral ||
- (expr is ConstantExpression && expr.constant is NullConstant);
-
-Expression getArgumentOfComparisonWithNull(MethodInvocation node) {
- if (node.name.text == '==') {
- final lhs = node.receiver;
- final rhs = node.arguments.positional.single;
- if (isNullLiteral(lhs)) {
- return rhs;
- } else if (isNullLiteral(rhs)) {
- return lhs;
- }
- }
- return null;
-}
-
-bool isComparisonWithNull(MethodInvocation node) =>
- getArgumentOfComparisonWithNull(node) != null;
-
bool mayHaveSideEffects(Expression node) {
// Keep this function in sync with mayHaveOrSeeSideEffects:
// If new false cases are added here, add the corresponding visibility cases
diff --git a/pkg/vm_snapshot_analysis/analysis_options.yaml b/pkg/vm_snapshot_analysis/analysis_options.yaml
index 8981941..cdcc022 100644
--- a/pkg/vm_snapshot_analysis/analysis_options.yaml
+++ b/pkg/vm_snapshot_analysis/analysis_options.yaml
@@ -1,4 +1,12 @@
-include: package:pedantic/analysis_options.1.8.0.yaml
+include: package:lints/recommended.yaml
+
analyzer:
exclude:
- lib/src/assets/**
+
+linter:
+ rules:
+ # Enable additional rules.
+ depend_on_referenced_packages: true
+ directives_ordering: true
+ sort_pub_dependencies: true
diff --git a/pkg/vm_snapshot_analysis/lib/ascii_table.dart b/pkg/vm_snapshot_analysis/lib/ascii_table.dart
index 3721814..4650cca 100644
--- a/pkg/vm_snapshot_analysis/lib/ascii_table.dart
+++ b/pkg/vm_snapshot_analysis/lib/ascii_table.dart
@@ -21,10 +21,10 @@
enum Separator {
/// Line separator looks like this: `+-------+------+`
- Line,
+ line,
/// Wave separator looks like this: `~~~~~~~~~~~~~~~~`.
- Wave,
+ wave,
}
/// A separator row in the [AsciiTable].
@@ -36,7 +36,7 @@
String render(List<int> widths, List<AlignmentDirection> alignments) {
final sb = StringBuffer();
switch (filler) {
- case Separator.Line:
+ case Separator.line:
sb.write('+');
for (var i = 0; i < widths.length; i++) {
sb.write('-' * (widths[i] + 2));
@@ -44,7 +44,7 @@
}
break;
- case Separator.Wave:
+ case Separator.wave:
sb.write('~' * Row.totalWidth(widths));
break;
}
@@ -56,7 +56,7 @@
class TextSeparatorRow extends Row {
final Text text;
TextSeparatorRow(String text)
- : text = Text(value: text, direction: AlignmentDirection.Center);
+ : text = Text(value: text, direction: AlignmentDirection.center);
@override
String render(List<int> widths, List<AlignmentDirection> alignments) {
@@ -84,7 +84,7 @@
}
}
-enum AlignmentDirection { Left, Right, Center }
+enum AlignmentDirection { left, right, center }
/// A chunk of text aligned in the given direction within a cell.
class Text {
@@ -93,11 +93,11 @@
Text({required this.value, required this.direction});
Text.left(String value)
- : this(value: value, direction: AlignmentDirection.Left);
+ : this(value: value, direction: AlignmentDirection.left);
Text.right(String value)
- : this(value: value, direction: AlignmentDirection.Right);
+ : this(value: value, direction: AlignmentDirection.right);
Text.center(String value)
- : this(value: value, direction: AlignmentDirection.Center);
+ : this(value: value, direction: AlignmentDirection.center);
String render(int width) {
if (value.length > width) {
@@ -105,11 +105,11 @@
return value.substring(0, width - 2) + '..';
}
switch (direction) {
- case AlignmentDirection.Left:
+ case AlignmentDirection.left:
return value.padRight(width);
- case AlignmentDirection.Right:
+ case AlignmentDirection.right:
return value.padLeft(width);
- case AlignmentDirection.Center:
+ case AlignmentDirection.center:
final diff = width - value.length;
return ' ' * (diff ~/ 2) + value + (' ' * (diff - diff ~/ 2));
}
@@ -135,7 +135,7 @@
void addRow(List<dynamic> columns) => rows.add(NormalRow(columns));
- void addSeparator([Separator filler = Separator.Line]) =>
+ void addSeparator([Separator filler = Separator.line]) =>
rows.add(SeparatorRow(filler));
void addTextSeparator(String text) => rows.add(TextSeparatorRow(text));
@@ -147,7 +147,7 @@
.whereType<NormalRow>()
.first
.columns
- .map((v) => v is Text ? v.direction : AlignmentDirection.Left)
+ .map((v) => v is Text ? v.direction : AlignmentDirection.left)
.toList();
List<int> widths =
List<int>.filled(rows.whereType<NormalRow>().first.columns.length, 0);
diff --git a/pkg/vm_snapshot_analysis/lib/precompiler_trace.dart b/pkg/vm_snapshot_analysis/lib/precompiler_trace.dart
index 3c9843c..06e8076 100644
--- a/pkg/vm_snapshot_analysis/lib/precompiler_trace.dart
+++ b/pkg/vm_snapshot_analysis/lib/precompiler_trace.dart
@@ -5,9 +5,9 @@
/// Helpers for working with the output of `--trace-precompiler-to` VM flag.
library vm_snapshot_analysis.precompiler_trace;
-import 'package:vm_snapshot_analysis/src/dominators.dart' as dominators;
import 'package:vm_snapshot_analysis/name.dart';
import 'package:vm_snapshot_analysis/program_info.dart';
+import 'package:vm_snapshot_analysis/src/dominators.dart' as dominators;
/// Build [CallGraph] based on the trace written by `--trace-precompiler-to`
/// flag.
@@ -32,10 +32,9 @@
/// Predecessors of this node.
final List<CallGraphNode> pred = [];
- /// Datum associated with this node: a [ProgramInfoNode] (function),
- /// a [String] (dynamic call selector) or an [int] (dispatch table
- /// selector id).
- final data;
+ /// Datum associated with this node: a [ProgramInfoNode] (function), a
+ /// [String] (dynamic call selector) or an [int] (dispatch table selector id).
+ final dynamic data;
/// Dominator of this node.
///
@@ -201,7 +200,7 @@
final selectorIdMap = <ProgramInfoNode, int>{};
/// Set of functions which can be reached through dynamic dispatch.
- final dynamicFunctions = Set<ProgramInfoNode>();
+ final dynamicFunctions = <ProgramInfoNode>{};
_TraceReader(Map<String, dynamic> data)
: strings = (data['strings'] as List<dynamic>).cast<String>(),
@@ -217,7 +216,7 @@
final nodes = <CallGraphNode>[];
final nodeByEntityId = <CallGraphNode?>[];
final callNodesBySelector = <dynamic, CallGraphNode>{};
- final allocated = Set<ProgramInfoNode>();
+ final allocated = <ProgramInfoNode>{};
T next<T>() => trace[pos++] as T;
@@ -281,7 +280,7 @@
pos--;
return false;
} else {
- throw FormatException('unexpected ref: ${ref}');
+ throw FormatException('unexpected ref: $ref');
}
return true;
}
@@ -305,7 +304,7 @@
readRefs();
break;
default:
- throw FormatException('Unknown event: ${op} at ${pos - 1}');
+ throw FormatException('Unknown event: $op at ${pos - 1}');
}
}
}
@@ -413,7 +412,7 @@
name: fieldName, parent: classNode, type: NodeType.other);
default:
- throw FormatException('unrecognized entity type ${type}');
+ throw FormatException('unrecognized entity type $type');
}
}
diff --git a/pkg/vm_snapshot_analysis/lib/program_info.dart b/pkg/vm_snapshot_analysis/lib/program_info.dart
index cca2bca..bbffd0f 100644
--- a/pkg/vm_snapshot_analysis/lib/program_info.dart
+++ b/pkg/vm_snapshot_analysis/lib/program_info.dart
@@ -63,7 +63,7 @@
void recurse(ProgramInfoNode node) {
final prevContext = context[node._type];
if (prevContext != null && node._type == NodeType.functionNode.index) {
- context[node._type] = '${prevContext}.${node.name}';
+ context[node._type] = '$prevContext.${node.name}';
} else {
context[node._type] = node.name;
}
@@ -187,7 +187,7 @@
@override
String toString() {
- return '${_typeToJson(type)} ${qualifiedName}';
+ return '${_typeToJson(type)} $qualifiedName';
}
/// Returns path to this node such that [ProgramInfo.lookup] would return
@@ -308,7 +308,7 @@
if (filter != null) {
final re = RegExp(filter.replaceAll('*', '.*'), caseSensitive: false);
matchesFilter =
- (lib, cls, fun) => re.hasMatch("${lib}::${cls ?? ''}.${fun ?? ''}");
+ (lib, cls, fun) => re.hasMatch("$lib::${cls ?? ''}.${fun ?? ''}");
} else {
matchesFilter = (_, __, ___) => true;
}
@@ -397,9 +397,9 @@
@override
String bucketFor(String? pkg, String lib, String? cls, String? fun) {
if (fun == null) {
- return '@other${_nameSeparator}';
+ return '@other$_nameSeparator';
}
- return '$lib${_nameSeparator}${cls ?? ''}${cls != '' && cls != null ? '.' : ''}${fun}';
+ return '$lib$_nameSeparator${cls ?? ''}${cls != '' && cls != null ? '.' : ''}$fun';
}
@override
@@ -412,9 +412,9 @@
@override
String bucketFor(String? pkg, String lib, String? cls, String? fun) {
if (cls == null) {
- return '@other${_nameSeparator}';
+ return '@other$_nameSeparator';
}
- return '$lib${_nameSeparator}${cls}';
+ return '$lib$_nameSeparator$cls';
}
@override
@@ -425,7 +425,7 @@
class _BucketByLibrary extends Bucketing {
@override
- String bucketFor(String? pkg, String lib, String? cls, String? fun) => '$lib';
+ String bucketFor(String? pkg, String lib, String? cls, String? fun) => lib;
const _BucketByLibrary() : super(nameComponents: const ['Library']);
}
diff --git a/pkg/vm_snapshot_analysis/lib/src/commands/compare.dart b/pkg/vm_snapshot_analysis/lib/src/commands/compare.dart
index bb2bdaf..d0d9aac 100644
--- a/pkg/vm_snapshot_analysis/lib/src/commands/compare.dart
+++ b/pkg/vm_snapshot_analysis/lib/src/commands/compare.dart
@@ -73,8 +73,7 @@
final columnWidth = args['column-width'];
final maxWidth = int.tryParse(columnWidth);
if (maxWidth == null) {
- usageException(
- 'Specified column width (${columnWidth}) is not an integer');
+ usageException('Specified column width ($columnWidth) is not an integer');
}
final oldJsonPath = _checkExists(args.rest[0]);
@@ -149,9 +148,9 @@
maxWidth: maxWidth);
print('Comparing ${oldJson.path} (old) to ${newJson.path} (new)');
- print('Old : ${totalOld} bytes.');
- print('New : ${totalNew} bytes.');
- print('Change: ${totalDiff > 0 ? '+' : ''}${totalDiff}'
+ print('Old : $totalOld bytes.');
+ print('New : $totalNew bytes.');
+ print('Change: ${totalDiff > 0 ? '+' : ''}$totalDiff'
' (${formatPercent(totalDiff, totalOld, withSign: true)}) bytes.');
if (oldSizes.snapshotInfo != null) {
@@ -163,7 +162,7 @@
computeHistogram(newSizes, HistogramType.byNodeType);
final diffTypeHistogram = Histogram.fromIterable<String>(
- Set<String>()
+ <String>{}
..addAll(oldTypeHistogram.buckets.keys)
..addAll(newTypeHistogram.buckets.keys),
sizeOf: (bucket) =>
diff --git a/pkg/vm_snapshot_analysis/lib/src/commands/explain.dart b/pkg/vm_snapshot_analysis/lib/src/commands/explain.dart
index e81833f..130e34b 100644
--- a/pkg/vm_snapshot_analysis/lib/src/commands/explain.dart
+++ b/pkg/vm_snapshot_analysis/lib/src/commands/explain.dart
@@ -49,7 +49,7 @@
precompiler trace (an output of --trace-precompiler-to flag).
''';
- ExplainCommand() {}
+ ExplainDynamicCallsCommand();
@override
Future<void> run() async {
@@ -115,7 +115,7 @@
final callNodes = callGraph.nodes
.where((n) => n.data == selector || n.data == dynSelector);
- print('\nDynamic call to ${selector}'
+ print('\nDynamic call to $selector'
' (retaining ~${histogram.buckets[selector]} bytes) occurs in:');
for (var node in callNodes) {
for (var pred in node.pred) {
diff --git a/pkg/vm_snapshot_analysis/lib/src/commands/summary.dart b/pkg/vm_snapshot_analysis/lib/src/commands/summary.dart
index 90b87b9..3709c82 100644
--- a/pkg/vm_snapshot_analysis/lib/src/commands/summary.dart
+++ b/pkg/vm_snapshot_analysis/lib/src/commands/summary.dart
@@ -110,7 +110,7 @@
final traceJson = args['precompiler-trace'];
if (traceJson != null) {
if (!File(traceJson).existsSync()) {
- usageException('Trace ${traceJson} does not exist!');
+ usageException('Trace $traceJson does not exist!');
}
if (granularity != HistogramType.byPackage &&
@@ -123,21 +123,20 @@
final columnWidth = args['column-width'];
final maxWidth = int.tryParse(columnWidth);
if (maxWidth == null) {
- usageException(
- 'Specified column width (${columnWidth}) is not an integer');
+ usageException('Specified column width ($columnWidth) is not an integer');
}
final depsStartDepthStr = args['deps-start-depth'];
final depsStartDepth = int.tryParse(depsStartDepthStr);
if (depsStartDepth == null) {
- usageException('Specified depsStartDepth (${depsStartDepthStr})'
+ usageException('Specified depsStartDepth ($depsStartDepthStr)'
' is not an integer');
}
final depsDisplayDepthStr = args['deps-display-depth'];
final depsDisplayDepth = int.tryParse(depsDisplayDepthStr);
if (depsDisplayDepth == null) {
- usageException('Specified depsDisplayDepth (${depsStartDepthStr})'
+ usageException('Specified depsDisplayDepth ($depsStartDepthStr)'
' is not an integer');
}
@@ -329,7 +328,7 @@
final size = sizes[i];
isLastPerLevel.add(isLast);
print(
- '${_treeLines(isLastPerLevel)}${n.data.qualifiedName} (total ${size} bytes)');
+ '${_treeLines(isLastPerLevel)}${n.data.qualifiedName} (total $size bytes)');
_printDominatedNodes(n,
displayDepth: displayDepth,
isLastPerLevel: isLastPerLevel,
diff --git a/pkg/vm_snapshot_analysis/lib/src/commands/treemap.dart b/pkg/vm_snapshot_analysis/lib/src/commands/treemap.dart
index 2de0694..2355a41 100644
--- a/pkg/vm_snapshot_analysis/lib/src/commands/treemap.dart
+++ b/pkg/vm_snapshot_analysis/lib/src/commands/treemap.dart
@@ -15,8 +15,8 @@
import 'dart:io';
import 'dart:isolate';
-import 'package:path/path.dart' as p;
import 'package:args/command_runner.dart';
+import 'package:path/path.dart' as p;
import 'package:vm_snapshot_analysis/treemap.dart';
import 'utils.dart';
diff --git a/pkg/vm_snapshot_analysis/lib/src/dominators.dart b/pkg/vm_snapshot_analysis/lib/src/dominators.dart
index 92504f3..9a0444b 100644
--- a/pkg/vm_snapshot_analysis/lib/src/dominators.dart
+++ b/pkg/vm_snapshot_analysis/lib/src/dominators.dart
@@ -17,7 +17,7 @@
required int root,
required Iterable<int> Function(int) succ,
required Iterable<int> Function(int) predOf,
- required void handleEdge(int from, int to),
+ required void Function(int from, int to) handleEdge,
}) {
// Compute preorder numbering for the graph using DFS.
final parent = List<int>.filled(size, -1);
@@ -82,39 +82,39 @@
// Loop over the blocks in reverse preorder (not including the graph
// entry).
- for (var block_index = size - 1; block_index >= 1; --block_index) {
+ for (var blockIndex = size - 1; blockIndex >= 1; --blockIndex) {
// Loop over the predecessors.
- final block = preorder[block_index];
+ final block = preorder[blockIndex];
// Clear the immediately dominated blocks in case ComputeDominators is
// used to recompute them.
for (final pred in predOf(block)) {
// Look for the semidominator by ascending the semidominator path
// starting from pred.
- final pred_index = preorderNumber[pred];
- var best = pred_index;
- if (pred_index > block_index) {
- compressPath(block_index, pred_index);
- best = label[pred_index];
+ final predIndex = preorderNumber[pred];
+ var best = predIndex;
+ if (predIndex > blockIndex) {
+ compressPath(blockIndex, predIndex);
+ best = label[predIndex];
}
// Update the semidominator if we've found a better one.
- semi[block_index] = math.min(semi[block_index], semi[best]);
+ semi[blockIndex] = math.min(semi[blockIndex], semi[best]);
}
// Now use label for the semidominator.
- label[block_index] = semi[block_index];
+ label[blockIndex] = semi[blockIndex];
}
// 2. Compute the immediate dominators as the nearest common ancestor of
// spanning tree parent and semidominator, for all blocks except the entry.
final result = List<int>.filled(size, -1);
- for (var block_index = 1; block_index < size; ++block_index) {
- var dom_index = idom[block_index];
- while (dom_index > semi[block_index]) {
- dom_index = idom[dom_index];
+ for (var blockIndex = 1; blockIndex < size; ++blockIndex) {
+ var domIndex = idom[blockIndex];
+ while (domIndex > semi[blockIndex]) {
+ domIndex = idom[domIndex];
}
- idom[block_index] = dom_index;
- result[preorder[block_index]] = preorder[dom_index];
+ idom[blockIndex] = domIndex;
+ result[preorder[blockIndex]] = preorder[domIndex];
}
return result;
}
diff --git a/pkg/vm_snapshot_analysis/lib/treemap.dart b/pkg/vm_snapshot_analysis/lib/treemap.dart
index 2d6da72..3c4458a 100644
--- a/pkg/vm_snapshot_analysis/lib/treemap.dart
+++ b/pkg/vm_snapshot_analysis/lib/treemap.dart
@@ -7,9 +7,9 @@
import 'dart:math';
-import 'package:vm_snapshot_analysis/program_info.dart';
import 'package:vm_snapshot_analysis/instruction_sizes.dart'
as instruction_sizes;
+import 'package:vm_snapshot_analysis/program_info.dart';
import 'package:vm_snapshot_analysis/utils.dart';
import 'package:vm_snapshot_analysis/v8_profile.dart' as v8_profile;
diff --git a/pkg/vm_snapshot_analysis/lib/utils.dart b/pkg/vm_snapshot_analysis/lib/utils.dart
index 40ef457..d722bb2 100644
--- a/pkg/vm_snapshot_analysis/lib/utils.dart
+++ b/pkg/vm_snapshot_analysis/lib/utils.dart
@@ -4,9 +4,9 @@
library vm_snapshot_analysis.utils;
import 'package:vm_snapshot_analysis/ascii_table.dart';
-import 'package:vm_snapshot_analysis/program_info.dart';
import 'package:vm_snapshot_analysis/instruction_sizes.dart'
as instruction_sizes;
+import 'package:vm_snapshot_analysis/program_info.dart';
import 'package:vm_snapshot_analysis/treemap.dart';
import 'package:vm_snapshot_analysis/v8_profile.dart' as v8_profile;
@@ -44,7 +44,7 @@
String formatPercent(int value, int total, {bool withSign = false}) {
final p = value / total * 100.0;
final sign = (withSign && value > 0) ? '+' : '';
- return '${sign}${p.toStringAsFixed(2)}%';
+ return '$sign${p.toStringAsFixed(2)}%';
}
void printHistogram(ProgramInfo info, Histogram histogram,
@@ -80,19 +80,19 @@
if (wasFiltered) formatPercent(size, totalSize),
]);
}
- table.addSeparator(interestingHiddenRows ? Separator.Wave : Separator.Line);
+ table.addSeparator(interestingHiddenRows ? Separator.wave : Separator.line);
}
if (interestingHiddenRows) {
final totalRestBytes = histogram.totalSize - visibleSize;
table.addTextSeparator(
- '$numRestRows more rows accounting for ${totalRestBytes}'
+ '$numRestRows more rows accounting for $totalRestBytes'
' (${formatPercent(totalRestBytes, totalSize)} of total) bytes');
final avg = (totalRestBytes / numRestRows).round();
table.addTextSeparator(
- 'on average that is ${avg} (${formatPercent(avg, histogram.totalSize)})'
+ 'on average that is $avg (${formatPercent(avg, histogram.totalSize)})'
' bytes per row');
- table.addSeparator(suffix.isNotEmpty ? Separator.Wave : Separator.Line);
+ table.addSeparator(suffix.isNotEmpty ? Separator.wave : Separator.line);
}
if (suffix.isNotEmpty) {
@@ -103,16 +103,16 @@
formatPercent(histogram.buckets[key]!, histogram.totalSize),
]);
}
- table.addSeparator(Separator.Line);
+ table.addSeparator(Separator.line);
}
table.render();
if (wasFiltered || visibleSize != histogram.totalSize) {
- print('In visible rows: ${visibleSize}'
+ print('In visible rows: $visibleSize'
' (${formatPercent(visibleSize, totalSize)} of total)');
}
- print('Total: ${totalSize} bytes');
+ print('Total: $totalSize bytes');
}
List<String> partsForPath(String path) {
diff --git a/pkg/vm_snapshot_analysis/lib/v8_profile.dart b/pkg/vm_snapshot_analysis/lib/v8_profile.dart
index 7dde867..8fbe488 100644
--- a/pkg/vm_snapshot_analysis/lib/v8_profile.dart
+++ b/pkg/vm_snapshot_analysis/lib/v8_profile.dart
@@ -7,10 +7,9 @@
library vm_snapshot_analysis.v8_profile;
import 'package:collection/collection.dart';
-
-import 'package:vm_snapshot_analysis/src/dominators.dart' as dominators;
import 'package:vm_snapshot_analysis/name.dart';
import 'package:vm_snapshot_analysis/program_info.dart';
+import 'package:vm_snapshot_analysis/src/dominators.dart' as dominators;
/// This class represents snapshot graph.
///
@@ -248,7 +247,7 @@
/// Returns the target of an outgoing edge with the given name (if any).
Node? operator [](String edgeName) =>
- this.edges.firstWhereOrNull((e) => e.name == edgeName)?.target;
+ edges.firstWhereOrNull((e) => e.name == edgeName)?.target;
@override
bool operator ==(Object other) {
@@ -256,7 +255,7 @@
}
@override
- int get hashCode => this.index.hashCode;
+ int get hashCode => index.hashCode;
/// Offset into [Snapshot._nodes] list at which this node begins.
int get _offset => index * snapshot.meta.nodeFieldCount;
diff --git a/pkg/vm_snapshot_analysis/pubspec.yaml b/pkg/vm_snapshot_analysis/pubspec.yaml
index ba6357d..cf8bed0 100644
--- a/pkg/vm_snapshot_analysis/pubspec.yaml
+++ b/pkg/vm_snapshot_analysis/pubspec.yaml
@@ -12,9 +12,9 @@
dependencies:
args: ^2.0.0
- path: ^1.8.0
collection: ^1.15.0
+ path: ^1.8.0
dev_dependencies:
- pedantic: ^1.11.0
+ lints: any
test: ^1.16.8
diff --git a/pkg/vm_snapshot_analysis/test/instruction_sizes_test.dart b/pkg/vm_snapshot_analysis/test/instruction_sizes_test.dart
index d136957..1590ea8 100644
--- a/pkg/vm_snapshot_analysis/test/instruction_sizes_test.dart
+++ b/pkg/vm_snapshot_analysis/test/instruction_sizes_test.dart
@@ -4,9 +4,8 @@
import 'dart:io';
-import 'package:test/test.dart';
import 'package:collection/collection.dart';
-
+import 'package:test/test.dart';
import 'package:vm_snapshot_analysis/instruction_sizes.dart'
as instruction_sizes;
import 'package:vm_snapshot_analysis/program_info.dart';
@@ -217,7 +216,7 @@
extension on Histogram {
String bucketFor(String pkg, String lib, String cls, String fun) =>
- (this.bucketInfo as Bucketing).bucketFor(pkg, lib, cls, fun);
+ (bucketInfo as Bucketing).bucketFor(pkg, lib, cls, fun);
}
void main() async {
@@ -838,8 +837,8 @@
test('treemap', () async {
await withV8Profile(testSource, (profileJson) async {
final infoJson = await loadJson(File(profileJson));
- final info = await loadProgramInfoFromJson(infoJson,
- collapseAnonymousClosures: true);
+ final info =
+ loadProgramInfoFromJson(infoJson, collapseAnonymousClosures: true);
final treemap = treemapFromInfo(info);
List<Map<String, dynamic>> childrenOf(Map<String, dynamic> node) =>
diff --git a/runtime/tests/vm/dart/deopt/restart_call_on_deopt_regress_46446_test.dart b/runtime/tests/vm/dart/deopt/restart_call_on_deopt_regress_46446_test.dart
new file mode 100644
index 0000000..02db828
--- /dev/null
+++ b/runtime/tests/vm/dart/deopt/restart_call_on_deopt_regress_46446_test.dart
@@ -0,0 +1,21 @@
+// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// VMOptions=--disable-dart-dev --use-slow-path --deoptimize-on-runtime-call-every=3 --optimization-counter-threshold=10 --deterministic
+
+import 'dart:collection';
+
+main() {
+ final entry = Entry();
+
+ final list = LinkedList<Entry>();
+ for (int i = 0; i < 100; ++i) {
+ list.addFirst(entry);
+ entry.unlink();
+ list.addFirst(entry);
+ entry.unlink();
+ }
+}
+
+class Entry extends LinkedListEntry<Entry> {}
diff --git a/runtime/tests/vm/dart_2/deopt/restart_call_on_deopt_regress_46446_test.dart b/runtime/tests/vm/dart_2/deopt/restart_call_on_deopt_regress_46446_test.dart
new file mode 100644
index 0000000..02db828
--- /dev/null
+++ b/runtime/tests/vm/dart_2/deopt/restart_call_on_deopt_regress_46446_test.dart
@@ -0,0 +1,21 @@
+// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// VMOptions=--disable-dart-dev --use-slow-path --deoptimize-on-runtime-call-every=3 --optimization-counter-threshold=10 --deterministic
+
+import 'dart:collection';
+
+main() {
+ final entry = Entry();
+
+ final list = LinkedList<Entry>();
+ for (int i = 0; i < 100; ++i) {
+ list.addFirst(entry);
+ entry.unlink();
+ list.addFirst(entry);
+ entry.unlink();
+ }
+}
+
+class Entry extends LinkedListEntry<Entry> {}
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index 9040385..5e8c466 100644
--- a/runtime/vm/clustered_snapshot.cc
+++ b/runtime/vm/clustered_snapshot.cc
@@ -666,7 +666,7 @@
ArrayPtr Finish() {
if (table != Array::null()) {
- FillGap(Smi::Value(table->untag()->length_) - current_index);
+ FillGap(Smi::Value(table->untag()->length()) - current_index);
}
auto result = table;
table = Array::null();
@@ -682,7 +682,7 @@
d->heap()->old_space()->AllocateSnapshot(instance_size));
Deserializer::InitializeHeader(table, kArrayCid, instance_size);
table->untag()->type_arguments_ = TypeArguments::null();
- table->untag()->length_ = Smi::New(length);
+ table->untag()->length_ = CompressedSmiPtr(Smi::New(length));
for (intptr_t i = 0; i < SetType::kFirstKeyIndex; i++) {
table->untag()->data()[i] = Smi::New(0);
}
@@ -3536,18 +3536,20 @@
InstancePtr instance = Instance::RawCast(object);
objects_.Add(instance);
const intptr_t next_field_offset = host_next_field_offset_in_words_
- << kWordSizeLog2;
+ << kCompressedWordSizeLog2;
const auto unboxed_fields_bitmap =
s->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(cid_);
intptr_t offset = Instance::NextFieldOffset();
while (offset < next_field_offset) {
// Skips unboxed fields
- if (!unboxed_fields_bitmap.Get(offset / kWordSize)) {
- ObjectPtr raw_obj = *reinterpret_cast<ObjectPtr*>(
- reinterpret_cast<uword>(instance->untag()) + offset);
+ if (!unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
+ ObjectPtr raw_obj =
+ reinterpret_cast<CompressedObjectPtr*>(
+ reinterpret_cast<uword>(instance->untag()) + offset)
+ ->Decompress(instance->untag()->heap_base());
s->Push(raw_obj);
}
- offset += kWordSize;
+ offset += kCompressedWordSize;
}
}
@@ -3564,13 +3566,13 @@
}
const intptr_t instance_size = compiler::target::RoundedAllocationSize(
- target_instance_size_in_words_ * compiler::target::kWordSize);
+ target_instance_size_in_words_ * compiler::target::kCompressedWordSize);
target_memory_size_ += instance_size * count;
}
void WriteFill(Serializer* s) {
intptr_t next_field_offset = host_next_field_offset_in_words_
- << kWordSizeLog2;
+ << kCompressedWordSizeLog2;
const intptr_t count = objects_.length();
s->WriteUnsigned64(CalculateTargetUnboxedFieldsBitmap(s, cid_).Value());
const auto unboxed_fields_bitmap =
@@ -3581,17 +3583,19 @@
AutoTraceObject(instance);
intptr_t offset = Instance::NextFieldOffset();
while (offset < next_field_offset) {
- if (unboxed_fields_bitmap.Get(offset / kWordSize)) {
+ if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
// Writes 32 bits of the unboxed value at a time
- const uword value = *reinterpret_cast<uword*>(
+ const compressed_uword value = *reinterpret_cast<compressed_uword*>(
reinterpret_cast<uword>(instance->untag()) + offset);
s->WriteWordWith32BitWrites(value);
} else {
- ObjectPtr raw_obj = *reinterpret_cast<ObjectPtr*>(
- reinterpret_cast<uword>(instance->untag()) + offset);
+ ObjectPtr raw_obj =
+ reinterpret_cast<CompressedObjectPtr*>(
+ reinterpret_cast<uword>(instance->untag()) + offset)
+ ->Decompress(instance->untag()->heap_base());
s->WriteElementRef(raw_obj, offset);
}
- offset += kWordSize;
+ offset += kCompressedWordSize;
}
}
}
@@ -3641,8 +3645,8 @@
const intptr_t count = d->ReadUnsigned();
next_field_offset_in_words_ = d->Read<int32_t>();
instance_size_in_words_ = d->Read<int32_t>();
- intptr_t instance_size =
- Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize);
+ intptr_t instance_size = Object::RoundedAllocationSize(
+ instance_size_in_words_ * kCompressedWordSize);
for (intptr_t i = 0; i < count; i++) {
d->AssignRef(old_space->AllocateSnapshot(instance_size));
}
@@ -3650,9 +3654,10 @@
}
void ReadFill(Deserializer* d, bool primary) {
- intptr_t next_field_offset = next_field_offset_in_words_ << kWordSizeLog2;
- intptr_t instance_size =
- Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize);
+ intptr_t next_field_offset = next_field_offset_in_words_
+ << kCompressedWordSizeLog2;
+ intptr_t instance_size = Object::RoundedAllocationSize(
+ instance_size_in_words_ * kCompressedWordSize);
const UnboxedFieldBitmap unboxed_fields_bitmap(d->ReadUnsigned64());
for (intptr_t id = start_index_; id < stop_index_; id++) {
@@ -3661,23 +3666,23 @@
primary && is_canonical());
intptr_t offset = Instance::NextFieldOffset();
while (offset < next_field_offset) {
- if (unboxed_fields_bitmap.Get(offset / kWordSize)) {
- uword* p = reinterpret_cast<uword*>(
+ if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
+ compressed_uword* p = reinterpret_cast<compressed_uword*>(
reinterpret_cast<uword>(instance->untag()) + offset);
// Reads 32 bits of the unboxed value at a time
*p = d->ReadWordWith32BitReads();
} else {
- ObjectPtr* p = reinterpret_cast<ObjectPtr*>(
+ CompressedObjectPtr* p = reinterpret_cast<CompressedObjectPtr*>(
reinterpret_cast<uword>(instance->untag()) + offset);
*p = d->ReadRef();
}
- offset += kWordSize;
+ offset += kCompressedWordSize;
}
- if (offset < instance_size) {
- ObjectPtr* p = reinterpret_cast<ObjectPtr*>(
+ while (offset < instance_size) {
+ CompressedObjectPtr* p = reinterpret_cast<CompressedObjectPtr*>(
reinterpret_cast<uword>(instance->untag()) + offset);
*p = Object::null();
- offset += kWordSize;
+ offset += kCompressedWordSize;
}
ASSERT(offset == instance_size);
}
@@ -5070,10 +5075,10 @@
ArrayPtr array = Array::RawCast(object);
objects_.Add(array);
- s->Push(array->untag()->type_arguments_);
- const intptr_t length = Smi::Value(array->untag()->length_);
+ s->Push(array->untag()->type_arguments());
+ const intptr_t length = Smi::Value(array->untag()->length());
for (intptr_t i = 0; i < length; i++) {
- s->Push(array->untag()->data()[i]);
+ s->Push(array->untag()->element(i));
}
}
@@ -5084,7 +5089,7 @@
ArrayPtr array = objects_[i];
s->AssignRef(array);
AutoTraceObject(array);
- const intptr_t length = Smi::Value(array->untag()->length_);
+ const intptr_t length = Smi::Value(array->untag()->length());
s->WriteUnsigned(length);
target_memory_size_ += compiler::target::Array::InstanceSize(length);
}
@@ -5095,11 +5100,11 @@
for (intptr_t i = 0; i < count; i++) {
ArrayPtr array = objects_[i];
AutoTraceObject(array);
- const intptr_t length = Smi::Value(array->untag()->length_);
+ const intptr_t length = Smi::Value(array->untag()->length());
s->WriteUnsigned(length);
- WriteField(array, type_arguments_);
+ WriteCompressedField(array, type_arguments);
for (intptr_t j = 0; j < length; j++) {
- s->WriteElementRef(array->untag()->data()[j], j);
+ s->WriteElementRef(array->untag()->element(j), j);
}
}
}
@@ -5136,7 +5141,7 @@
primary && is_canonical());
array->untag()->type_arguments_ =
static_cast<TypeArgumentsPtr>(d->ReadRef());
- array->untag()->length_ = Smi::New(length);
+ array->untag()->length_ = CompressedSmiPtr(Smi::New(length));
for (intptr_t j = 0; j < length; j++) {
array->untag()->data()[j] = d->ReadRef();
}
@@ -6191,7 +6196,7 @@
auto const array = Array::RawCast(obj);
for (intptr_t i = 0, n = Smi::Value(array->untag()->length()); i < n;
i++) {
- ObjectPtr elem = array->untag()->data()[i];
+ ObjectPtr elem = array->untag()->element(i);
links.Add({elem, V8SnapshotProfileWriter::Reference::Element(i)});
}
break;
diff --git a/runtime/vm/clustered_snapshot.h b/runtime/vm/clustered_snapshot.h
index 26411ce..9dab2b8 100644
--- a/runtime/vm/clustered_snapshot.h
+++ b/runtime/vm/clustered_snapshot.h
@@ -654,7 +654,7 @@
ObjectPtr Ref(intptr_t index) const {
ASSERT(index > 0);
ASSERT(index <= num_objects_);
- return refs_->untag()->data()[index];
+ return refs_->untag()->element(index);
}
ObjectPtr ReadRef() { return Ref(ReadUnsigned()); }
diff --git a/runtime/vm/compiler/asm_intrinsifier_arm64.cc b/runtime/vm/compiler/asm_intrinsifier_arm64.cc
index ce8d67d..36cb205 100644
--- a/runtime/vm/compiler/asm_intrinsifier_arm64.cc
+++ b/runtime/vm/compiler/asm_intrinsifier_arm64.cc
@@ -64,20 +64,21 @@
// Store backing array object in growable array object.
__ ldr(R1, Address(SP, kArrayOffset)); // Data argument.
// R0 is new, no barrier needed.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
R0, FieldAddress(R0, target::GrowableObjectArray::data_offset()), R1);
// R0: new growable array object start as a tagged pointer.
// Store the type argument field in the growable array object.
__ ldr(R1, Address(SP, kTypeArgumentsOffset)); // Type argument.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
R0,
FieldAddress(R0, target::GrowableObjectArray::type_arguments_offset()),
R1);
// Set the length field in the growable array object to 0.
__ LoadImmediate(R1, 0);
- __ str(R1, FieldAddress(R0, target::GrowableObjectArray::length_offset()));
+ __ StoreCompressedIntoObjectNoBarrier(
+ R0, FieldAddress(R0, target::GrowableObjectArray::length_offset()), R1);
__ ret(); // Returns the newly allocated object in R0.
__ Bind(normal_ir_body);
@@ -1265,7 +1266,8 @@
// Receiver.
__ ldr(R0, Address(SP, 0 * target::kWordSize));
// Field '_state'.
- __ ldr(R1, FieldAddress(R0, LookupFieldOffsetInBytes(state_field)));
+ __ LoadCompressed(R1,
+ FieldAddress(R0, LookupFieldOffsetInBytes(state_field)));
// Addresses of _state[0].
const int64_t disp =
@@ -1509,9 +1511,9 @@
// Compare type arguments, host_type_arguments_field_offset_in_words in R0.
__ ldp(R1, R2, Address(SP, 0 * target::kWordSize, Address::PairOffset));
__ AddImmediate(R1, -kHeapObjectTag);
- __ ldr(R1, Address(R1, R0, UXTX, Address::Scaled));
+ __ ldr(R1, Address(R1, R0, UXTX, Address::Scaled), kObjectBytes);
__ AddImmediate(R2, -kHeapObjectTag);
- __ ldr(R2, Address(R2, R0, UXTX, Address::Scaled));
+ __ ldr(R2, Address(R2, R0, UXTX, Address::Scaled), kObjectBytes);
__ CompareObjectRegisters(R1, R2);
__ b(normal_ir_body, NE);
// Fall through to equal case if type arguments are equal.
diff --git a/runtime/vm/compiler/asm_intrinsifier_x64.cc b/runtime/vm/compiler/asm_intrinsifier_x64.cc
index 3ba88b8..b7109e8 100644
--- a/runtime/vm/compiler/asm_intrinsifier_x64.cc
+++ b/runtime/vm/compiler/asm_intrinsifier_x64.cc
@@ -61,19 +61,19 @@
// Store backing array object in growable array object.
__ movq(RCX, Address(RSP, kArrayOffset)); // data argument.
// RAX is new, no barrier needed.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
RAX, FieldAddress(RAX, target::GrowableObjectArray::data_offset()), RCX);
// RAX: new growable array object start as a tagged pointer.
// Store the type argument field in the growable array object.
__ movq(RCX, Address(RSP, kTypeArgumentsOffset)); // type argument.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
RAX,
FieldAddress(RAX, target::GrowableObjectArray::type_arguments_offset()),
RCX);
// Set the length field in the growable array object to 0.
- __ ZeroInitSmiField(
+ __ ZeroInitCompressedSmiField(
FieldAddress(RAX, target::GrowableObjectArray::length_offset()));
__ ret(); // returns the newly allocated object in RAX.
@@ -1111,7 +1111,8 @@
// Receiver.
__ movq(RAX, Address(RSP, +1 * target::kWordSize));
// Field '_state'.
- __ movq(RBX, FieldAddress(RAX, LookupFieldOffsetInBytes(state_field)));
+ __ LoadCompressed(RBX,
+ FieldAddress(RAX, LookupFieldOffsetInBytes(state_field)));
// Addresses of _state[0] and _state[1].
const intptr_t scale =
target::Instance::ElementSizeFor(kTypedDataUint32ArrayCid);
@@ -1356,9 +1357,9 @@
// Compare type arguments, host_type_arguments_field_offset_in_words in RAX.
__ movq(RCX, Address(RSP, +1 * target::kWordSize));
__ movq(RDX, Address(RSP, +2 * target::kWordSize));
- __ movq(RCX, FieldAddress(RCX, RAX, TIMES_8, 0));
- __ movq(RDX, FieldAddress(RDX, RAX, TIMES_8, 0));
- __ cmpq(RCX, RDX);
+ __ OBJ(mov)(RCX, FieldAddress(RCX, RAX, TIMES_COMPRESSED_WORD_SIZE, 0));
+ __ OBJ(mov)(RDX, FieldAddress(RDX, RAX, TIMES_COMPRESSED_WORD_SIZE, 0));
+ __ OBJ(cmp)(RCX, RDX);
__ j(NOT_EQUAL, normal_ir_body, Assembler::kNearJump);
// Fall through to equal case if type arguments are equal.
diff --git a/runtime/vm/compiler/assembler/assembler_arm.cc b/runtime/vm/compiler/assembler/assembler_arm.cc
index 3c7f331..63cba26 100644
--- a/runtime/vm/compiler/assembler/assembler_arm.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm.cc
@@ -503,8 +503,8 @@
ASSERT(rt != kNoRegister);
ASSERT(cond != kNoCondition);
int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | B24 |
- B23 | L | (static_cast<int32_t>(rn) << kLdExRnShift) |
- (static_cast<int32_t>(rt) << kLdExRtShift) | B11 | B10 |
+ B23 | L | (static_cast<int32_t>(rn) << kLdrExRnShift) |
+ (static_cast<int32_t>(rt) << kLdrExRtShift) | B11 | B10 |
B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
Emit(encoding);
}
@@ -527,6 +527,46 @@
Emit(kDataMemoryBarrier);
}
+static int32_t BitFieldExtractEncoding(bool sign_extend,
+ Register rd,
+ Register rn,
+ int32_t lsb,
+ int32_t width,
+ Condition cond) {
+ ASSERT(rn != kNoRegister && rn != PC);
+ ASSERT(rd != kNoRegister && rd != PC);
+ ASSERT(cond != kNoCondition);
+ ASSERT(Utils::IsUint(kBitFieldExtractLSBBits, lsb));
+ ASSERT(width >= 1);
+ ASSERT(lsb + width <= kBitsPerInt32);
+ const int32_t widthm1 = width - 1;
+ ASSERT(Utils::IsUint(kBitFieldExtractWidthBits, widthm1));
+ return (static_cast<int32_t>(cond) << kConditionShift) | B26 | B25 | B24 |
+ B23 | (sign_extend ? 0 : B22) | B21 |
+ (widthm1 << kBitFieldExtractWidthShift) |
+ (static_cast<int32_t>(rd) << kRdShift) |
+ (lsb << kBitFieldExtractLSBShift) | B6 | B4 |
+ (static_cast<int32_t>(rn) << kBitFieldExtractRnShift);
+}
+
+void Assembler::sbfx(Register rd,
+ Register rn,
+ int32_t lsb,
+ int32_t width,
+ Condition cond) {
+ const bool sign_extend = true;
+ Emit(BitFieldExtractEncoding(sign_extend, rd, rn, lsb, width, cond));
+}
+
+void Assembler::ubfx(Register rd,
+ Register rn,
+ int32_t lsb,
+ int32_t width,
+ Condition cond) {
+ const bool sign_extend = false;
+ Emit(BitFieldExtractEncoding(sign_extend, rd, rn, lsb, width, cond));
+}
+
void Assembler::EnterFullSafepoint(Register addr, Register state) {
// We generate the same number of instructions whether or not the slow-path is
// forced. This simplifies GenerateJitCallbackTrampolines.
@@ -2473,9 +2513,26 @@
PopList(kAbiPreservedCpuRegs);
}
-void Assembler::MoveRegister(Register rd, Register rm, Condition cond) {
- if (rd != rm) {
- mov(rd, Operand(rm), cond);
+void Assembler::ExtendValue(Register rd,
+ Register rm,
+ OperandSize sz,
+ Condition cond) {
+ switch (sz) {
+ case kUnsignedFourBytes:
+ case kFourBytes:
+ if (rd == rm) return;
+ return mov(rd, Operand(rm), cond);
+ case kUnsignedTwoBytes:
+ return ubfx(rd, rm, 0, kBitsPerInt16, cond);
+ case kTwoBytes:
+ return sbfx(rd, rm, 0, kBitsPerInt16, cond);
+ case kUnsignedByte:
+ return ubfx(rd, rm, 0, kBitsPerInt8, cond);
+ case kByte:
+ return sbfx(rd, rm, 0, kBitsPerInt8, cond);
+ default:
+ UNIMPLEMENTED();
+ break;
}
}
diff --git a/runtime/vm/compiler/assembler/assembler_arm.h b/runtime/vm/compiler/assembler/assembler_arm.h
index a54758b..09162f4 100644
--- a/runtime/vm/compiler/assembler/assembler_arm.h
+++ b/runtime/vm/compiler/assembler/assembler_arm.h
@@ -394,6 +394,7 @@
void LoadMemoryValue(Register dst, Register base, int32_t offset) {
LoadFromOffset(dst, base, offset);
}
+ void LoadCompressed(Register dest, const Address& slot) { ldr(dest, slot); }
void StoreMemoryValue(Register src, Register base, int32_t offset) {
StoreToOffset(src, base, offset);
}
@@ -574,6 +575,18 @@
void dmb();
+ // Media instructions.
+ void sbfx(Register rd,
+ Register rn,
+ int32_t lsb,
+ int32_t width,
+ Condition cond = AL);
+ void ubfx(Register rd,
+ Register rn,
+ int32_t lsb,
+ int32_t width,
+ Condition cond = AL);
+
// Emit code to transition between generated and native modes.
//
// These require that CSP and SP are equal and aligned and require two scratch
@@ -1086,7 +1099,34 @@
b(label, ZERO);
}
- void MoveRegister(Register rd, Register rm, Condition cond = AL);
+ void MoveRegister(Register rd, Register rm, Condition cond) {
+ ExtendValue(rd, rm, kFourBytes, cond);
+ }
+ void MoveRegister(Register rd, Register rm) override {
+ MoveRegister(rd, rm, AL);
+ }
+ void MoveAndSmiTagRegister(Register rd, Register rm, Condition cond) {
+ ExtendAndSmiTagValue(rd, rm, kFourBytes, cond);
+ }
+ void MoveAndSmiTagRegister(Register rd, Register rm) override {
+ MoveAndSmiTagRegister(rd, rm, AL);
+ }
+ void ExtendValue(Register rd, Register rm, OperandSize sz, Condition cond);
+ void ExtendValue(Register rd, Register rm, OperandSize sz) override {
+ ExtendValue(rd, rm, sz, AL);
+ }
+ void ExtendAndSmiTagValue(Register rd,
+ Register rm,
+ OperandSize sz,
+ Condition cond) {
+ ExtendValue(rd, rm, sz, cond);
+ SmiTag(rd, cond);
+ }
+ void ExtendAndSmiTagValue(Register rd,
+ Register rm,
+ OperandSize sz = kFourBytes) override {
+ ExtendAndSmiTagValue(rd, rm, sz, AL);
+ }
// Convenience shift instructions. Use mov instruction with shifter operand
// for variants setting the status flags.
@@ -1125,17 +1165,14 @@
void Vsqrtqs(QRegister qd, QRegister qm, QRegister temp);
void Vdivqs(QRegister qd, QRegister qn, QRegister qm);
- void SmiTag(Register reg, Condition cond = AL) {
- Lsl(reg, reg, Operand(kSmiTagSize), cond);
- }
+ void SmiTag(Register reg, Condition cond) { SmiTag(reg, reg, cond); }
+ void SmiTag(Register reg) override { SmiTag(reg, AL); }
void SmiTag(Register dst, Register src, Condition cond = AL) {
Lsl(dst, src, Operand(kSmiTagSize), cond);
}
- void SmiUntag(Register reg, Condition cond = AL) {
- Asr(reg, reg, Operand(kSmiTagSize), cond);
- }
+ void SmiUntag(Register reg, Condition cond = AL) { SmiUntag(reg, reg, cond); }
void SmiUntag(Register dst, Register src, Condition cond = AL) {
Asr(dst, src, Operand(kSmiTagSize), cond);
@@ -1272,6 +1309,13 @@
Register array,
Register index);
+ void LoadCompressedFieldAddressForRegOffset(Register address,
+ Register instance,
+ Register offset_in_words_as_smi) {
+ return LoadFieldAddressForRegOffset(address, instance,
+ offset_in_words_as_smi);
+ }
+
void LoadFieldAddressForRegOffset(Register address,
Register instance,
Register offset_in_words_as_smi);
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.cc b/runtime/vm/compiler/assembler/assembler_arm64.cc
index 7c30d2d..3807314 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm64.cc
@@ -186,6 +186,54 @@
}
}
+void Assembler::ExtendValue(Register rd, Register rn, OperandSize sz) {
+ switch (sz) {
+ case kEightBytes:
+ if (rd == rn) return; // No operation needed.
+ return mov(rd, rn);
+ case kUnsignedFourBytes:
+ return uxtw(rd, rn);
+ case kFourBytes:
+ return sxtw(rd, rn);
+ case kUnsignedTwoBytes:
+ return uxth(rd, rn);
+ case kTwoBytes:
+ return sxth(rd, rn);
+ case kUnsignedByte:
+ return uxtb(rd, rn);
+ case kByte:
+ return sxtb(rd, rn);
+ default:
+ UNIMPLEMENTED();
+ break;
+ }
+}
+
+// Equivalent to left rotate of kSmiTagSize.
+static constexpr intptr_t kBFMTagRotate = kBitsPerInt64 - kSmiTagSize;
+
+void Assembler::ExtendAndSmiTagValue(Register rd, Register rn, OperandSize sz) {
+ switch (sz) {
+ case kEightBytes:
+ return sbfm(rd, rn, kBFMTagRotate, target::kSmiBits + 1);
+ case kUnsignedFourBytes:
+ return ubfm(rd, rn, kBFMTagRotate, kBitsPerInt32 - 1);
+ case kFourBytes:
+ return sbfm(rd, rn, kBFMTagRotate, kBitsPerInt32 - 1);
+ case kUnsignedTwoBytes:
+ return ubfm(rd, rn, kBFMTagRotate, kBitsPerInt16 - 1);
+ case kTwoBytes:
+ return sbfm(rd, rn, kBFMTagRotate, kBitsPerInt16 - 1);
+ case kUnsignedByte:
+ return ubfm(rd, rn, kBFMTagRotate, kBitsPerInt8 - 1);
+ case kByte:
+ return sbfm(rd, rn, kBFMTagRotate, kBitsPerInt8 - 1);
+ default:
+ UNIMPLEMENTED();
+ break;
+ }
+}
+
void Assembler::Bind(Label* label) {
ASSERT(!label->IsBound());
const intptr_t bound_pc = buffer_.Size();
@@ -937,12 +985,14 @@
#if !defined(DART_COMPRESSED_POINTERS)
LoadFromOffset(dest, base, offset);
#else
- if (Address::CanHoldOffset(offset, Address::Offset, kFourBytes)) {
- ldr(dest, Address(base, offset), kUnsignedFourBytes); // Zero-extension.
+ if (Address::CanHoldOffset(offset, Address::Offset, kObjectBytes)) {
+ ldr(dest, Address(base, offset, Address::Offset, kObjectBytes),
+ kUnsignedFourBytes); // Zero-extension.
} else {
ASSERT(base != TMP2);
AddImmediate(TMP2, base, offset);
- ldr(dest, Address(base, 0), kUnsignedFourBytes); // Zero-extension.
+ ldr(dest, Address(base, 0, Address::Offset, kObjectBytes),
+ kUnsignedFourBytes); // Zero-extension.
}
add(dest, dest, Operand(HEAP_BITS, LSL, 32));
#endif
@@ -962,6 +1012,30 @@
#endif
}
+void Assembler::LoadCompressedSmiFromOffset(Register dest,
+ Register base,
+ int32_t offset) {
+#if !defined(DART_COMPRESSED_POINTERS)
+ LoadFromOffset(dest, base, offset);
+#else
+ if (Address::CanHoldOffset(offset, Address::Offset, kObjectBytes)) {
+ ldr(dest, Address(base, offset, Address::Offset, kObjectBytes),
+ kUnsignedFourBytes); // Zero-extension.
+ } else {
+ ASSERT(base != TMP2);
+ AddImmediate(TMP2, base, offset);
+ ldr(dest, Address(base, 0, Address::Offset, kObjectBytes),
+ kUnsignedFourBytes); // Zero-extension.
+ }
+#endif
+#if defined(DEBUG)
+ Label done;
+ BranchIfSmi(dest, &done);
+ Stop("Expected Smi");
+ Bind(&done);
+#endif
+}
+
// Preserves object and value registers.
void Assembler::StoreIntoObjectFilter(Register object,
Register value,
@@ -1112,6 +1186,22 @@
Register slot,
Register value,
CanBeSmi can_be_smi) {
+ str(value, Address(slot, 0));
+ StoreIntoArrayBarrier(object, slot, value, can_be_smi);
+}
+
+void Assembler::StoreCompressedIntoArray(Register object,
+ Register slot,
+ Register value,
+ CanBeSmi can_be_smi) {
+ str(value, Address(slot, 0, Address::Offset, kObjectBytes), kObjectBytes);
+ StoreIntoArrayBarrier(object, slot, value, can_be_smi);
+}
+
+void Assembler::StoreIntoArrayBarrier(Register object,
+ Register slot,
+ Register value,
+ CanBeSmi can_be_smi) {
const bool spill_lr = lr_state().LRContainsReturnAddress();
ASSERT(object != TMP);
ASSERT(object != TMP2);
@@ -1120,8 +1210,6 @@
ASSERT(slot != TMP);
ASSERT(slot != TMP2);
- str(value, Address(slot, 0));
-
// In parallel, test whether
// - object is old and not remembered and value is new, or
// - object is old and value is old and not marked and concurrent marking is
@@ -1803,13 +1891,13 @@
const intptr_t count_offset = target::Array::element_offset(1);
// Sadly this cannot use ldp because ldp requires aligned offsets.
- ldr(R1, FieldAddress(R5, cid_offset));
- ldr(R2, FieldAddress(R5, count_offset));
+ ldr(R1, FieldAddress(R5, cid_offset, kObjectBytes), kObjectBytes);
+ ldr(R2, FieldAddress(R5, count_offset, kObjectBytes), kObjectBytes);
LoadClassIdMayBeSmi(IP0, R0);
- add(R2, R2, Operand(target::ToRawSmi(1)));
- cmp(R1, Operand(IP0, LSL, 1));
+ add(R2, R2, Operand(target::ToRawSmi(1)), kObjectBytes);
+ cmp(R1, Operand(IP0, LSL, 1), kObjectBytes);
b(&miss, NE);
- str(R2, FieldAddress(R5, count_offset));
+ str(R2, FieldAddress(R5, count_offset, kObjectBytes), kObjectBytes);
LoadImmediate(R4, 0); // GC-safe for OptimizeInvokedFunction
// Fall through to unchecked entry.
@@ -1837,7 +1925,7 @@
ASSERT_EQUAL(CodeSize() - start,
target::Instructions::kMonomorphicEntryOffsetAOT);
LoadClassId(IP0, R0);
- cmp(R5, Operand(IP0, LSL, 1));
+ cmp(R5, Operand(IP0, LSL, 1), kObjectBytes);
b(&miss, NE);
// Fall through to unchecked entry.
@@ -2105,6 +2193,16 @@
}
}
+void Assembler::LoadCompressedFieldAddressForRegOffset(
+ Register address,
+ Register instance,
+ Register offset_in_compressed_words_as_smi) {
+ add(address, instance,
+ Operand(offset_in_compressed_words_as_smi, LSL,
+ target::kCompressedWordSizeLog2 - kSmiTagShift));
+ AddImmediate(address, -kHeapObjectTag);
+}
+
void Assembler::LoadFieldAddressForRegOffset(Register address,
Register instance,
Register offset_in_words_as_smi) {
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.h b/runtime/vm/compiler/assembler/assembler_arm64.h
index 1630d87..f99eb33 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.h
+++ b/runtime/vm/compiler/assembler/assembler_arm64.h
@@ -310,7 +310,6 @@
switch (cid) {
case kArrayCid:
case kImmutableArrayCid:
- return kEightBytes;
case kTypeArgumentsCid:
return kObjectBytes;
case kOneByteStringCid:
@@ -525,11 +524,10 @@
// Pop all registers which are callee-saved according to the ARM64 ABI.
void PopNativeCalleeSavedRegisters();
- void MoveRegister(Register rd, Register rn) {
- if (rd != rn) {
- mov(rd, rn);
- }
- }
+ void ExtendValue(Register rd, Register rn, OperandSize sz) override;
+ void ExtendAndSmiTagValue(Register rd,
+ Register rn,
+ OperandSize sz = kEightBytes) override;
void Drop(intptr_t stack_elements) {
ASSERT(stack_elements >= 0);
@@ -567,12 +565,14 @@
ldar(dst, address);
}
}
- void StoreRelease(Register src, Register address, int32_t offset = 0) {
+ void LoadAcquireCompressed(Register dst,
+ Register address,
+ int32_t offset = 0) {
if (offset != 0) {
AddImmediate(TMP2, address, offset);
- stlr(src, TMP2);
+ ldar(dst, TMP2, kObjectBytes);
} else {
- stlr(src, address);
+ ldar(dst, address, kObjectBytes);
}
}
@@ -1098,8 +1098,12 @@
void csel(Register rd, Register rn, Register rm, Condition cond) {
EmitConditionalSelect(CSEL, rd, rn, rm, cond, kEightBytes);
}
- void csinc(Register rd, Register rn, Register rm, Condition cond) {
- EmitConditionalSelect(CSINC, rd, rn, rm, cond, kEightBytes);
+ void csinc(Register rd,
+ Register rn,
+ Register rm,
+ Condition cond,
+ OperandSize sz = kEightBytes) {
+ EmitConditionalSelect(CSINC, rd, rn, rm, cond, sz);
}
void cinc(Register rd, Register rn, Condition cond) {
csinc(rd, rn, rn, InvertCondition(cond));
@@ -1580,13 +1584,11 @@
void VRecps(VRegister vd, VRegister vn);
void VRSqrts(VRegister vd, VRegister vn);
- void SmiUntag(Register reg) {
- sbfm(reg, reg, kSmiTagSize, target::kSmiBits + 1);
- }
+ void SmiUntag(Register reg) { SmiUntag(reg, reg); }
void SmiUntag(Register dst, Register src) {
sbfm(dst, src, kSmiTagSize, target::kSmiBits + 1);
}
- void SmiTag(Register reg) { LslImmediate(reg, reg, kSmiTagSize); }
+ void SmiTag(Register reg) override { SmiTag(reg, reg); }
void SmiTag(Register dst, Register src) {
LslImmediate(dst, src, kSmiTagSize);
}
@@ -1708,6 +1710,11 @@
int32_t offset) override {
LoadCompressedFromOffset(dest, base, offset - kHeapObjectTag);
}
+ void LoadCompressedSmiFieldFromOffset(Register dest,
+ Register base,
+ int32_t offset) {
+ LoadCompressedSmiFromOffset(dest, base, offset - kHeapObjectTag);
+ }
// For loading indexed payloads out of tagged objects like Arrays. If the
// payload objects are word-sized, use TIMES_HALF_WORD_SIZE if the contents of
// [index] is a Smi, otherwise TIMES_WORD_SIZE if unboxed.
@@ -1770,6 +1777,9 @@
void LoadCompressed(Register dest, const Address& slot);
void LoadCompressedFromOffset(Register dest, Register base, int32_t offset);
void LoadCompressedSmi(Register dest, const Address& slot);
+ void LoadCompressedSmiFromOffset(Register dest,
+ Register base,
+ int32_t offset);
// Store into a heap object and apply the generational and incremental write
// barriers. All stores into heap objects must pass through this function or,
@@ -1790,6 +1800,14 @@
Register slot,
Register value,
CanBeSmi can_value_be_smi = kValueCanBeSmi);
+ void StoreCompressedIntoArray(Register object,
+ Register slot,
+ Register value,
+ CanBeSmi can_value_be_smi = kValueCanBeSmi);
+ void StoreIntoArrayBarrier(Register object,
+ Register slot,
+ Register value,
+ CanBeSmi can_value_be_smi);
void StoreIntoObjectOffset(Register object,
int32_t offset,
@@ -2072,6 +2090,10 @@
Register array,
Register index);
+ void LoadCompressedFieldAddressForRegOffset(Register address,
+ Register instance,
+ Register offset_in_words_as_smi);
+
void LoadFieldAddressForRegOffset(Register address,
Register instance,
Register offset_in_words_as_smi);
diff --git a/runtime/vm/compiler/assembler/assembler_arm_test.cc b/runtime/vm/compiler/assembler/assembler_arm_test.cc
index 0addf7b..504c848 100644
--- a/runtime/vm/compiler/assembler/assembler_arm_test.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm_test.cc
@@ -1765,6 +1765,64 @@
}
}
+// 3 2 1 0
+// 10987654321098765432109876543210
+static constexpr uint32_t kBfxTestBits = 0b00010000001000001000010001001011;
+
+static constexpr int32_t ExpectedUbfxBitPattern(uint8_t lsb, uint8_t width) {
+ ASSERT(width >= 1);
+ ASSERT(width < 32);
+ ASSERT(lsb < 32);
+ ASSERT(lsb + width <= 32);
+ return (kBfxTestBits & (Utils::NBitMask(width) << lsb)) >> lsb;
+}
+
+static constexpr int32_t ExpectedSbfxBitPattern(uint8_t lsb, uint8_t width) {
+ const uint32_t no_extension = ExpectedUbfxBitPattern(lsb, width);
+ const uint32_t sign_extension =
+ Utils::TestBit(no_extension, width - 1) ? ~Utils::NBitMask(width) : 0;
+ return no_extension | sign_extension;
+}
+
+// (lsb, width, extracted bit field is signed)
+#define BFX_TEST_CASES(V) \
+ V(0, 1, true) \
+ V(0, 8, false) \
+ V(0, 11, true) V(0, 19, false) V(3, 20, false) V(10, 19, true) V(31, 1, false)
+
+#define GENERATE_BFX_TEST(L, W, S) \
+ ASSEMBLER_TEST_GENERATE(UbfxLSB##L##Width##W, assembler) { \
+ __ LoadImmediate(R1, kBfxTestBits); \
+ __ ubfx(R0, R1, L, W); \
+ __ Ret(); \
+ } \
+ ASSEMBLER_TEST_RUN(UbfxLSB##L##Width##W, test) { \
+ EXPECT(test != nullptr); \
+ typedef int (*Tst)() DART_UNUSED; \
+ ASSERT((ExpectedUbfxBitPattern(L, W) == ExpectedSbfxBitPattern(L, W)) != \
+ S); \
+ EXPECT_EQ(ExpectedUbfxBitPattern(L, W), \
+ EXECUTE_TEST_CODE_INT32(Tst, test->entry())); \
+ } \
+ ASSEMBLER_TEST_GENERATE(SbfxLSB##L##Width##W, assembler) { \
+ __ LoadImmediate(R1, kBfxTestBits); \
+ __ sbfx(R0, R1, L, W); \
+ __ Ret(); \
+ } \
+ ASSEMBLER_TEST_RUN(SbfxLSB##L##Width##W, test) { \
+ EXPECT(test != nullptr); \
+ typedef int (*Tst)() DART_UNUSED; \
+ ASSERT((ExpectedUbfxBitPattern(L, W) == ExpectedSbfxBitPattern(L, W)) != \
+ S); \
+ EXPECT_EQ(ExpectedSbfxBitPattern(L, W), \
+ EXECUTE_TEST_CODE_INT32(Tst, test->entry())); \
+ }
+
+BFX_TEST_CASES(GENERATE_BFX_TEST)
+
+#undef GENERATE_BFX_TEST
+#undef BFX_TEST_CASES
+
ASSEMBLER_TEST_GENERATE(Udiv, assembler) {
if (TargetCPUFeatures::integer_division_supported()) {
__ mov(R0, Operand(27));
diff --git a/runtime/vm/compiler/assembler/assembler_base.h b/runtime/vm/compiler/assembler/assembler_base.h
index e8b7d89..55258dc 100644
--- a/runtime/vm/compiler/assembler/assembler_base.h
+++ b/runtime/vm/compiler/assembler/assembler_base.h
@@ -563,6 +563,37 @@
virtual void Breakpoint() = 0;
+ virtual void SmiTag(Register r) = 0;
+
+ // Extends a value of size sz in src to a value of size kWordBytes in dst.
+ // That is, bits in the source register that are not part of the sz-sized
+ // value are ignored, and if sz is signed, then the value is sign extended.
+ //
+ // Produces no instructions if dst and src are the same and sz is kWordBytes.
+ virtual void ExtendValue(Register dst, Register src, OperandSize sz) = 0;
+
+ // Extends a value of size sz in src to a tagged Smi value in dst.
+ // That is, bits in the source register that are not part of the sz-sized
+ // value are ignored, and if sz is signed, then the value is sign extended.
+ virtual void ExtendAndSmiTagValue(Register dst,
+ Register src,
+ OperandSize sz) {
+ ExtendValue(dst, src, sz);
+ SmiTag(dst);
+ }
+
+ // Move the contents of src into dst.
+ //
+ // Produces no instructions if dst and src are the same.
+ virtual void MoveRegister(Register dst, Register src) {
+ ExtendValue(dst, src, kWordBytes);
+ }
+
+ // Move the contents of src into dst and tag the value in dst as a Smi.
+ virtual void MoveAndSmiTagRegister(Register dst, Register src) {
+ ExtendAndSmiTagValue(dst, src, kWordBytes);
+ }
+
// Inlined allocation in new space of an instance of an object whose instance
// size is known at compile time with class ID 'cid'. The generated code has
// no runtime calls. Jump to 'failure' if the instance cannot be allocated
diff --git a/runtime/vm/compiler/assembler/assembler_ia32.cc b/runtime/vm/compiler/assembler/assembler_ia32.cc
index 421b17d..05b6d4b 100644
--- a/runtime/vm/compiler/assembler/assembler_ia32.cc
+++ b/runtime/vm/compiler/assembler/assembler_ia32.cc
@@ -1819,9 +1819,48 @@
cmpl(src, Address(ESP, depth * target::kWordSize));
}
-void Assembler::MoveRegister(Register to, Register from) {
- if (to != from) {
- movl(to, from);
+void Assembler::ExtendValue(Register to, Register from, OperandSize sz) {
+ switch (sz) {
+ case kUnsignedFourBytes:
+ case kFourBytes:
+ if (to == from) return; // No operation needed.
+ return movl(to, from);
+ case kUnsignedTwoBytes:
+ return movzxw(to, from);
+ case kTwoBytes:
+ return movsxw(to, from);
+ case kUnsignedByte:
+ switch (from) {
+ case EAX:
+ case EBX:
+ case ECX:
+ case EDX:
+ return movzxb(to, ByteRegisterOf(from));
+ break;
+ default:
+ if (to != from) {
+ movl(to, from);
+ }
+ return andl(to, Immediate(0xFF));
+ }
+ case kByte:
+ switch (from) {
+ case EAX:
+ case EBX:
+ case ECX:
+ case EDX:
+ return movsxb(to, ByteRegisterOf(from));
+ break;
+ default:
+ if (to != from) {
+ movl(to, from);
+ }
+ shll(to, Immediate(24));
+ return sarl(to, Immediate(24));
+ }
+ default:
+ UNIMPLEMENTED();
+ break;
}
}
diff --git a/runtime/vm/compiler/assembler/assembler_ia32.h b/runtime/vm/compiler/assembler/assembler_ia32.h
index 48ab1e1..3726cbc 100644
--- a/runtime/vm/compiler/assembler/assembler_ia32.h
+++ b/runtime/vm/compiler/assembler/assembler_ia32.h
@@ -665,8 +665,7 @@
movl(Address(address, offset), src);
}
- // Issues a move instruction if 'to' is not the same as 'from'.
- void MoveRegister(Register to, Register from);
+ void ExtendValue(Register to, Register from, OperandSize sz) override;
void PushRegister(Register r);
void PopRegister(Register r);
@@ -718,6 +717,8 @@
void CompareObject(Register reg, const Object& object);
void LoadDoubleConstant(XmmRegister dst, double value);
+ void LoadCompressed(Register dest, const Address& slot) { movl(dest, slot); }
+
// Store into a heap object and apply the generational write barrier. (Unlike
// the other architectures, this does not apply the incremental write barrier,
// and so concurrent marking is not enabled for now on IA32.) All stores into
@@ -854,6 +855,12 @@
Register index,
intptr_t extra_disp = 0);
+ void LoadCompressedFieldAddressForRegOffset(Register address,
+ Register instance,
+ Register offset_in_words_as_smi) {
+ LoadFieldAddressForRegOffset(address, instance, offset_in_words_as_smi);
+ }
+
void LoadFieldAddressForRegOffset(Register address,
Register instance,
Register offset_in_words_as_smi) {
@@ -868,7 +875,7 @@
/*
* Misc. functionality
*/
- void SmiTag(Register reg) { addl(reg, reg); }
+ void SmiTag(Register reg) override { addl(reg, reg); }
void SmiUntag(Register reg) { sarl(reg, Immediate(kSmiTagSize)); }
diff --git a/runtime/vm/compiler/assembler/assembler_x64.cc b/runtime/vm/compiler/assembler/assembler_x64.cc
index 956088b..8bc3290 100644
--- a/runtime/vm/compiler/assembler/assembler_x64.cc
+++ b/runtime/vm/compiler/assembler/assembler_x64.cc
@@ -1082,9 +1082,26 @@
cmpq(Address(SPREG, depth * target::kWordSize), src);
}
-void Assembler::MoveRegister(Register to, Register from) {
- if (to != from) {
- movq(to, from);
+void Assembler::ExtendValue(Register to, Register from, OperandSize sz) {
+ switch (sz) {
+ case kEightBytes:
+ if (to == from) return; // No operation needed.
+ return movq(to, from);
+ case kUnsignedFourBytes:
+ return movl(to, from);
+ case kFourBytes:
+ return movsxd(to, from);
+ case kUnsignedTwoBytes:
+ return movzxw(to, from);
+ case kTwoBytes:
+ return movsxw(to, from);
+ case kUnsignedByte:
+ return movzxb(to, from);
+ case kByte:
+ return movsxb(to, from);
+ default:
+ UNIMPLEMENTED();
+ break;
}
}
@@ -1490,12 +1507,26 @@
Register slot,
Register value,
CanBeSmi can_be_smi) {
+ movq(Address(slot, 0), value);
+ StoreIntoArrayBarrier(object, slot, value, can_be_smi);
+}
+
+void Assembler::StoreCompressedIntoArray(Register object,
+ Register slot,
+ Register value,
+ CanBeSmi can_be_smi) {
+ OBJ(mov)(Address(slot, 0), value);
+ StoreIntoArrayBarrier(object, slot, value, can_be_smi);
+}
+
+void Assembler::StoreIntoArrayBarrier(Register object,
+ Register slot,
+ Register value,
+ CanBeSmi can_be_smi) {
ASSERT(object != TMP);
ASSERT(value != TMP);
ASSERT(slot != TMP);
- movq(Address(slot, 0), value);
-
// In parallel, test whether
// - object is old and not remembered and value is new, or
// - object is old and value is old and not marked and concurrent marking is
@@ -1607,11 +1638,12 @@
OBJ(mov)(dest, zero);
}
-void Assembler::IncrementSmiField(const Address& dest, int64_t increment) {
+void Assembler::IncrementCompressedSmiField(const Address& dest,
+ int64_t increment) {
// Note: FlowGraphCompiler::EdgeCounterIncrementSizeInBytes depends on
// the length of this instruction sequence.
Immediate inc_imm(target::ToRawSmi(increment));
- addq(dest, inc_imm);
+ OBJ(add)(dest, inc_imm);
}
void Assembler::Bind(Label* label) {
@@ -1975,14 +2007,12 @@
LoadTaggedClassIdMayBeSmi(RAX, RDX);
- cmpq(RAX, FieldAddress(RBX, cid_offset));
+ OBJ(cmp)(RAX, FieldAddress(RBX, cid_offset));
j(NOT_EQUAL, &miss, Assembler::kNearJump);
- addl(FieldAddress(RBX, count_offset), Immediate(target::ToRawSmi(1)));
+ OBJ(add)(FieldAddress(RBX, count_offset), Immediate(target::ToRawSmi(1)));
xorq(R10, R10); // GC-safe for OptimizeInvokedFunction.
-#if !defined(DART_COMPRESSED_POINTERS)
- nop(1);
-#else
- nop(2);
+#if defined(DART_COMPRESSED_POINTERS)
+ nop(3);
#endif
// Fall through to unchecked entry.
diff --git a/runtime/vm/compiler/assembler/assembler_x64.h b/runtime/vm/compiler/assembler/assembler_x64.h
index 3c8a165..a8b539a 100644
--- a/runtime/vm/compiler/assembler/assembler_x64.h
+++ b/runtime/vm/compiler/assembler/assembler_x64.h
@@ -705,8 +705,7 @@
j(ZERO, label, distance);
}
- // Issues a move instruction if 'to' is not the same as 'from'.
- void MoveRegister(Register to, Register from);
+ void ExtendValue(Register dst, Register src, OperandSize sz) override;
void PushRegister(Register r);
void PopRegister(Register r);
@@ -801,6 +800,10 @@
Register slot, // Where we are storing into.
Register value, // Value we are storing.
CanBeSmi can_be_smi = kValueCanBeSmi);
+ void StoreCompressedIntoArray(Register object, // Object we are storing into.
+ Register slot, // Where we are storing into.
+ Register value, // Value we are storing.
+ CanBeSmi can_be_smi = kValueCanBeSmi);
void StoreIntoObjectNoBarrier(Register object,
const Address& dest,
@@ -825,7 +828,7 @@
void ZeroInitSmiField(const Address& dest);
void ZeroInitCompressedSmiField(const Address& dest);
// Increments a Smi field. Leaves flags in same state as an 'addq'.
- void IncrementSmiField(const Address& dest, int64_t increment);
+ void IncrementCompressedSmiField(const Address& dest, int64_t increment);
void DoubleNegate(XmmRegister dst, XmmRegister src);
void DoubleAbs(XmmRegister dst, XmmRegister src);
@@ -893,7 +896,7 @@
void SmiUntagOrCheckClass(Register object, intptr_t class_id, Label* smi);
// Misc. functionality.
- void SmiTag(Register reg) { OBJ(add)(reg, reg); }
+ void SmiTag(Register reg) override { OBJ(add)(reg, reg); }
void SmiUntag(Register reg) { OBJ(sar)(reg, Immediate(kSmiTagSize)); }
void SmiUntag(Register dst, Register src) {
@@ -1016,6 +1019,9 @@
void LoadMemoryValue(Register dst, Register base, int32_t offset) {
movq(dst, Address(base, offset));
}
+ void LoadCompressedMemoryValue(Register dst, Register base, int32_t offset) {
+ OBJ(mov)(dst, Address(base, offset));
+ }
void StoreMemoryValue(Register src, Register base, int32_t offset) {
movq(Address(base, offset), src);
}
@@ -1024,6 +1030,13 @@
// with other loads).
movq(dst, Address(address, offset));
}
+ void LoadAcquireCompressed(Register dst,
+ Register address,
+ int32_t offset = 0) {
+ // On intel loads have load-acquire behavior (i.e. loads are not re-ordered
+ // with other loads).
+ LoadCompressed(dst, Address(address, offset));
+ }
void StoreRelease(Register src, Register address, int32_t offset = 0) {
// On intel stores have store-release behavior (i.e. stores are not
// re-ordered with other stores).
@@ -1164,6 +1177,14 @@
leaq(address, FieldAddress(instance, offset_in_words_as_smi, TIMES_4, 0));
}
+ void LoadCompressedFieldAddressForRegOffset(Register address,
+ Register instance,
+ Register offset_in_words_as_smi) {
+ static_assert(kSmiTagShift == 1, "adjust scale factor");
+ leaq(address, FieldAddress(instance, offset_in_words_as_smi,
+ TIMES_COMPRESSED_HALF_WORD_SIZE, 0));
+ }
+
static Address VMTagAddress();
// On some other platforms, we draw a distinction between safe and unsafe
@@ -1264,6 +1285,10 @@
Label* label,
CanBeSmi can_be_smi,
BarrierFilterMode barrier_filter_mode);
+ void StoreIntoArrayBarrier(Register object,
+ Register slot,
+ Register value,
+ CanBeSmi can_be_smi = kValueCanBeSmi);
// Unaware of write barrier (use StoreInto* methods for storing to objects).
void MoveImmediate(const Address& dst, const Immediate& imm);
diff --git a/runtime/vm/compiler/assembler/disassembler_arm.cc b/runtime/vm/compiler/assembler/disassembler_arm.cc
index 946c402..51bb568 100644
--- a/runtime/vm/compiler/assembler/disassembler_arm.cc
+++ b/runtime/vm/compiler/assembler/disassembler_arm.cc
@@ -452,11 +452,20 @@
remaining_size_in_buffer(), "0x%x", immed16);
return 7;
}
- case 'l': { // 'l: branch and link
- if (instr->HasLink()) {
- Print("l");
+ case 'l': {
+ if (format[1] == 's') {
+ ASSERT(STRING_STARTS_WITH(format, "lsb"));
+ buffer_pos_ += Utils::SNPrint(current_position_in_buffer(),
+ remaining_size_in_buffer(), "%u",
+ instr->BitFieldExtractLSBField());
+ return 3;
+ } else {
+ // 'l: branch and link
+ if (instr->HasLink()) {
+ Print("l");
+ }
+ return 1;
}
- return 1;
}
case 'm': { // 'memop: load/store instructions
ASSERT(STRING_STARTS_WITH(format, "memop"));
@@ -571,11 +580,22 @@
}
return 1;
}
- case 'w': { // 'w: W field of load and store instructions.
- if (instr->HasW()) {
- Print("!");
+ case 'w': {
+ if (format[1] == 'i') {
+ ASSERT(STRING_STARTS_WITH(format, "width"));
+ // 'width: width field of bit field extract instructions
+ // (field value in encoding is 1 less than in mnemonic)
+ buffer_pos_ = Utils::SNPrint(current_position_in_buffer(),
+ remaining_size_in_buffer(), "%u",
+ instr->BitFieldExtractWidthField() + 1);
+ return 5;
+ } else {
+ // 'w: W field of load and store instructions.
+ if (instr->HasW()) {
+ Print("!");
+ }
+ return 1;
}
- return 1;
}
case 'x': { // 'x: type of extra load/store instructions.
if (!instr->HasSign()) {
@@ -913,15 +933,37 @@
}
void ARMDecoder::DecodeType3(Instr* instr) {
- if (instr->IsDivision()) {
- if (!TargetCPUFeatures::integer_division_supported()) {
- Unknown(instr);
- return;
- }
- if (instr->Bit(21)) {
- Format(instr, "udiv'cond 'rn, 'rs, 'rm");
+ if (instr->IsMedia()) {
+ if (instr->IsDivision()) {
+ if (!TargetCPUFeatures::integer_division_supported()) {
+ Unknown(instr);
+ return;
+ }
+ // Check differences between A8.8.{165,248} and FormatRegister.
+ static_assert(kDivRdShift == kRnShift,
+ "div 'rd does not corresspond to 'rn");
+ static_assert(kDivRmShift == kRsShift,
+ "div 'rm does not corresspond to 'rs");
+ static_assert(kDivRnShift == kRmShift,
+ "div 'rn does not corresspond to 'rm");
+ if (instr->IsDivUnsigned()) {
+ Format(instr, "udiv'cond 'rn, 'rs, 'rm");
+ } else {
+ Format(instr, "sdiv'cond 'rn, 'rs, 'rm");
+ }
+ } else if (instr->IsRbit()) {
+ Format(instr, "rbit'cond 'rd, 'rm");
+ } else if (instr->IsBitFieldExtract()) {
+ // Check differences between A8.8.{164,246} and FormatRegister.
+ static_assert(kBitFieldExtractRnShift == kRmShift,
+ "bfx 'rn does not correspond to 'rm");
+ if (instr->IsBitFieldExtractSignExtended()) {
+ Format(instr, "sbfx'cond 'rd, 'rm, 'lsb, 'width");
+ } else {
+ Format(instr, "ubfx'cond 'rd, 'rm, 'lsb, 'width");
+ }
} else {
- Format(instr, "sdiv'cond 'rn, 'rs, 'rm");
+ UNREACHABLE();
}
return;
}
diff --git a/runtime/vm/compiler/backend/constant_propagator.cc b/runtime/vm/compiler/backend/constant_propagator.cc
index 88d1f2f..da234e4 100644
--- a/runtime/vm/compiler/backend/constant_propagator.cc
+++ b/runtime/vm/compiler/backend/constant_propagator.cc
@@ -1426,7 +1426,7 @@
}
}
-void ConstantPropagator::VisitBoxUint8(BoxUint8Instr* instr) {
+void ConstantPropagator::VisitBoxSmallInt(BoxSmallIntInstr* instr) {
VisitBox(instr);
}
diff --git a/runtime/vm/compiler/backend/flow_graph.cc b/runtime/vm/compiler/backend/flow_graph.cc
index 0ccabb1..27ab909 100644
--- a/runtime/vm/compiler/backend/flow_graph.cc
+++ b/runtime/vm/compiler/backend/flow_graph.cc
@@ -276,13 +276,6 @@
entry->initial_definitions()->Add(defn);
}
-void FlowGraph::InsertBefore(Instruction* next,
- Instruction* instr,
- Environment* env,
- UseKind use_kind) {
- InsertAfter(next->previous(), instr, env, use_kind);
-}
-
void FlowGraph::InsertAfter(Instruction* prev,
Instruction* instr,
Environment* env,
@@ -298,6 +291,16 @@
}
}
+void FlowGraph::InsertSpeculativeAfter(Instruction* prev,
+ Instruction* instr,
+ Environment* env,
+ UseKind use_kind) {
+ InsertAfter(prev, instr, env, use_kind);
+ if (instr->env() != nullptr) {
+ instr->env()->MarkAsLazyDeoptToBeforeDeoptId();
+ }
+}
+
Instruction* FlowGraph::AppendTo(Instruction* prev,
Instruction* instr,
Environment* env,
@@ -312,6 +315,16 @@
}
return prev->AppendInstruction(instr);
}
+Instruction* FlowGraph::AppendSpeculativeTo(Instruction* prev,
+ Instruction* instr,
+ Environment* env,
+ UseKind use_kind) {
+ auto result = AppendTo(prev, instr, env, use_kind);
+ if (instr->env() != nullptr) {
+ instr->env()->MarkAsLazyDeoptToBeforeDeoptId();
+ }
+ return result;
+}
// A wrapper around block entries including an index of the next successor to
// be read.
diff --git a/runtime/vm/compiler/backend/flow_graph.h b/runtime/vm/compiler/backend/flow_graph.h
index 4e72259..fa930ed 100644
--- a/runtime/vm/compiler/backend/flow_graph.h
+++ b/runtime/vm/compiler/backend/flow_graph.h
@@ -304,15 +304,55 @@
void InsertBefore(Instruction* next,
Instruction* instr,
Environment* env,
- UseKind use_kind);
+ UseKind use_kind) {
+ InsertAfter(next->previous(), instr, env, use_kind);
+ }
+ void InsertSpeculativeBefore(Instruction* next,
+ Instruction* instr,
+ Environment* env,
+ UseKind use_kind) {
+ InsertSpeculativeAfter(next->previous(), instr, env, use_kind);
+ }
void InsertAfter(Instruction* prev,
Instruction* instr,
Environment* env,
UseKind use_kind);
+
+ // Inserts a speculative [instr] after existing [prev] instruction.
+ //
+ // If the inserted [instr] deopts eagerly or lazily we will always continue in
+ // unoptimized code at before-call using the given [env].
+ //
+ // This is mainly used during inlining / call specializing when replacing
+ // calls with N specialized instructions where the inserted [1..N[
+ // instructions cannot continue in unoptimized code after-call since they
+ // would miss instructions following the one that lazy-deopted.
+ //
+ // For example specializing an instance call to an implicit field setter
+ //
+ // InstanceCall:<id>(v0, set:<name>, args = [v1])
+ //
+ // to
+ //
+ // v2 <- AssertAssignable:<id>(v1, ...)
+ // StoreInstanceField(v0, v2)
+ //
+ // If the [AssertAssignable] causes a lazy-deopt on return, we'll have to
+ // *re-try* the implicit setter call in unoptimized mode, i.e. lazy deopt to
+ // before-call (otherwise - if we continued after-call - the
+ // StoreInstanceField would not be performed).
+ void InsertSpeculativeAfter(Instruction* prev,
+ Instruction* instr,
+ Environment* env,
+ UseKind use_kind);
Instruction* AppendTo(Instruction* prev,
Instruction* instr,
Environment* env,
UseKind use_kind);
+ Instruction* AppendSpeculativeTo(Instruction* prev,
+ Instruction* instr,
+ Environment* env,
+ UseKind use_kind);
// Operations on the flow graph.
void ComputeSSA(intptr_t next_virtual_register_number,
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.cc b/runtime/vm/compiler/backend/flow_graph_compiler.cc
index 0094d89..6b2d7d8 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler.cc
@@ -3142,16 +3142,15 @@
rep = kTagged;
}
ASSERT(!is_optimizing());
- ASSERT((rep == kTagged) || (rep == kUntagged) || (rep == kUnboxedUint32) ||
- (rep == kUnboxedUint8));
+ ASSERT((rep == kTagged) || (rep == kUntagged) ||
+ RepresentationUtils::IsUnboxedInteger(rep));
ASSERT(rep != kUntagged || flow_graph_.IsIrregexpFunction());
const auto& function = flow_graph_.parsed_function().function();
- // Currently, we only allow unboxed uint8 and uint32 on the stack in
- // unoptimized code when building a dynamic closure call dispatcher, where
- // any unboxed values on the stack are consumed before possible
- // FrameStateIsSafeToCall() checks.
+ // Currently, we only allow unboxed integers on the stack in unoptimized code
+ // when building a dynamic closure call dispatcher, where any unboxed values
+ // on the stack are consumed before possible FrameStateIsSafeToCall() checks.
// See FlowGraphBuilder::BuildDynamicCallVarsInit().
- ASSERT((rep != kUnboxedUint32 && rep != kUnboxedUint8) ||
+ ASSERT(!RepresentationUtils::IsUnboxedInteger(rep) ||
function.IsDynamicClosureCallDispatcher(thread()));
frame_state_.Add(rep);
}
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
index d97a36b..6f257d1 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
@@ -472,9 +472,10 @@
ASSERT(assembler_->constant_pool_allowed());
__ Comment("Edge counter");
__ LoadObject(R0, edge_counters_array_);
- __ LoadFieldFromOffset(TMP, R0, Array::element_offset(edge_id));
- __ add(TMP, TMP, compiler::Operand(Smi::RawValue(1)));
- __ StoreFieldToOffset(TMP, R0, Array::element_offset(edge_id));
+ __ LoadCompressedSmiFieldFromOffset(TMP, R0, Array::element_offset(edge_id));
+ __ add(TMP, TMP, compiler::Operand(Smi::RawValue(1)), compiler::kObjectBytes);
+ __ StoreFieldToOffset(TMP, R0, Array::element_offset(edge_id),
+ compiler::kObjectBytes);
}
void FlowGraphCompiler::EmitOptimizedInstanceCall(
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
index 01ee817..3b5345d 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
@@ -491,7 +491,7 @@
ASSERT(assembler_->constant_pool_allowed());
__ Comment("Edge counter");
__ LoadObject(RAX, edge_counters_array_);
- __ IncrementSmiField(
+ __ IncrementCompressedSmiField(
compiler::FieldAddress(RAX, Array::element_offset(edge_id)), 1);
}
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index b65dc1a..49131ee 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -3735,13 +3735,20 @@
BoxInstr* BoxInstr::Create(Representation from, Value* value) {
switch (from) {
case kUnboxedUint8:
- return new BoxUint8Instr(value);
+ case kUnboxedUint16:
+#if defined(TARGET_ARCH_IS_64_BIT) && !defined(DART_COMPRESSED_POINTERS)
+ case kUnboxedInt32:
+ case kUnboxedUint32:
+#endif
+ return new BoxSmallIntInstr(from, value);
+#if defined(TARGET_ARCH_IS_32_BIT) || defined(DART_COMPRESSED_POINTERS)
case kUnboxedInt32:
return new BoxInt32Instr(value);
case kUnboxedUint32:
return new BoxUint32Instr(value);
+#endif
case kUnboxedInt64:
return new BoxInt64Instr(value);
@@ -4608,6 +4615,28 @@
__ Drop(num_temps());
}
+LocationSummary* BoxSmallIntInstr::MakeLocationSummary(Zone* zone,
+ bool opt) const {
+ ASSERT(RepresentationUtils::ValueSize(from_representation()) * kBitsPerByte <=
+ compiler::target::kSmiBits);
+ const intptr_t kNumInputs = 1;
+ const intptr_t kNumTemps = 0;
+ LocationSummary* summary = new (zone)
+ LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
+ summary->set_in(0, Location::RequiresRegister());
+ summary->set_out(0, Location::RequiresRegister());
+ return summary;
+}
+
+void BoxSmallIntInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
+ const Register value = locs()->in(0).reg();
+ const Register out = locs()->out(0).reg();
+ ASSERT(value != out);
+
+ __ ExtendAndSmiTagValue(
+ out, value, RepresentationUtils::OperandSize(from_representation()));
+}
+
StrictCompareInstr::StrictCompareInstr(const InstructionSource& source,
Token::Kind kind,
Value* left,
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 9a6ac0d..5f22ad8 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -499,7 +499,7 @@
M(UnboxUint32, kNoGC) \
M(BoxInt32, _) \
M(UnboxInt32, kNoGC) \
- M(BoxUint8, kNoGC) \
+ M(BoxSmallInt, kNoGC) \
M(IntConverter, kNoGC) \
M(BitCast, kNoGC) \
M(Deoptimize, kNoGC) \
@@ -5720,7 +5720,6 @@
virtual Definition* Canonicalize(FlowGraph* flow_graph);
-
private:
const bool index_unboxed_;
const intptr_t index_scale_;
@@ -6030,7 +6029,6 @@
virtual Instruction* Canonicalize(FlowGraph* flow_graph);
-
private:
compiler::Assembler::CanBeSmi CanValueBeSmi() const {
return compiler::Assembler::kValueCanBeSmi;
@@ -7060,18 +7058,22 @@
DISALLOW_COPY_AND_ASSIGN(BoxIntegerInstr);
};
-class BoxUint8Instr : public BoxIntegerInstr {
+class BoxSmallIntInstr : public BoxIntegerInstr {
public:
- explicit BoxUint8Instr(Value* value)
- : BoxIntegerInstr(kUnboxedUint8, value) {}
+ explicit BoxSmallIntInstr(Representation rep, Value* value)
+ : BoxIntegerInstr(rep, value) {
+ ASSERT(RepresentationUtils::ValueSize(rep) * kBitsPerByte <=
+ compiler::target::kSmiBits);
+ }
virtual bool ValueFitsSmi() const { return true; }
- DECLARE_INSTRUCTION(BoxUint8)
+ DECLARE_INSTRUCTION(BoxSmallInt)
private:
- DISALLOW_COPY_AND_ASSIGN(BoxUint8Instr);
+ DISALLOW_COPY_AND_ASSIGN(BoxSmallIntInstr);
};
+
class BoxInteger32Instr : public BoxIntegerInstr {
public:
BoxInteger32Instr(Representation representation, Value* value)
diff --git a/runtime/vm/compiler/backend/il_arm.cc b/runtime/vm/compiler/backend/il_arm.cc
index 07999cc..6b44b0d 100644
--- a/runtime/vm/compiler/backend/il_arm.cc
+++ b/runtime/vm/compiler/backend/il_arm.cc
@@ -4731,26 +4731,6 @@
__ Bind(&done);
}
-LocationSummary* BoxUint8Instr::MakeLocationSummary(Zone* zone,
- bool opt) const {
- ASSERT(from_representation() == kUnboxedUint8);
- const intptr_t kNumInputs = 1;
- const intptr_t kNumTemps = 0;
- LocationSummary* summary = new (zone)
- LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
- summary->set_in(0, Location::RequiresRegister());
- summary->set_out(0, Location::RequiresRegister());
- return summary;
-}
-
-void BoxUint8Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
- const Register value = locs()->in(0).reg();
- const Register out = locs()->out(0).reg();
-
- __ AndImmediate(out, value, 0xff);
- __ SmiTag(out);
-}
-
LocationSummary* BoxInteger32Instr::MakeLocationSummary(Zone* zone,
bool opt) const {
ASSERT((from_representation() == kUnboxedInt32) ||
diff --git a/runtime/vm/compiler/backend/il_arm64.cc b/runtime/vm/compiler/backend/il_arm64.cc
index b66dd0b..0f09726 100644
--- a/runtime/vm/compiler/backend/il_arm64.cc
+++ b/runtime/vm/compiler/backend/il_arm64.cc
@@ -1685,9 +1685,19 @@
decoder_reg = decoder_location.reg();
}
const auto scan_flags_field_offset = scan_flags_field_.offset_in_bytes();
- __ LoadFieldFromOffset(flags_temp_reg, decoder_reg, scan_flags_field_offset);
- __ orr(flags_temp_reg, flags_temp_reg, compiler::Operand(flags_reg));
- __ StoreFieldToOffset(flags_temp_reg, decoder_reg, scan_flags_field_offset);
+ if (scan_flags_field_.is_compressed() && !IsScanFlagsUnboxed()) {
+ __ LoadCompressedSmiFieldFromOffset(flags_temp_reg, decoder_reg,
+ scan_flags_field_offset);
+ __ orr(flags_temp_reg, flags_temp_reg, compiler::Operand(flags_reg),
+ compiler::kObjectBytes);
+ __ StoreFieldToOffset(flags_temp_reg, decoder_reg, scan_flags_field_offset,
+ compiler::kObjectBytes);
+ } else {
+ __ LoadFieldFromOffset(flags_temp_reg, decoder_reg,
+ scan_flags_field_offset);
+ __ orr(flags_temp_reg, flags_temp_reg, compiler::Operand(flags_reg));
+ __ StoreFieldToOffset(flags_temp_reg, decoder_reg, scan_flags_field_offset);
+ }
}
LocationSummary* LoadUntaggedInstr::MakeLocationSummary(Zone* zone,
@@ -1831,11 +1841,7 @@
ASSERT(representation() == kTagged);
ASSERT((class_id() == kArrayCid) || (class_id() == kImmutableArrayCid) ||
(class_id() == kTypeArgumentsCid));
- if (class_id() == kTypeArgumentsCid) {
- __ LoadCompressed(result, element_address);
- } else {
- __ ldr(result, element_address);
- }
+ __ LoadCompressed(result, element_address);
break;
}
}
@@ -1977,7 +1983,7 @@
Smi::Cast(index.constant()).Value());
}
const Register value = locs()->in(2).reg();
- __ StoreIntoArray(array, temp, value, CanValueBeSmi());
+ __ StoreCompressedIntoArray(array, temp, value, CanValueBeSmi());
return;
}
@@ -1994,10 +2000,10 @@
ASSERT(!ShouldEmitStoreBarrier()); // Specially treated above.
if (locs()->in(2).IsConstant()) {
const Object& constant = locs()->in(2).constant();
- __ StoreIntoObjectNoBarrier(array, element_address, constant);
+ __ StoreCompressedIntoObjectNoBarrier(array, element_address, constant);
} else {
const Register value = locs()->in(2).reg();
- __ StoreIntoObjectNoBarrier(array, element_address, value);
+ __ StoreCompressedIntoObjectNoBarrier(array, element_address, value);
}
break;
case kTypedDataInt8ArrayCid:
@@ -2365,13 +2371,13 @@
intptr_t offset,
Register temp) {
compiler::Label done;
- __ LoadFieldFromOffset(box_reg, instance_reg, offset);
+ __ LoadCompressedFieldFromOffset(box_reg, instance_reg, offset);
__ CompareObject(box_reg, Object::null_object());
__ b(&done, NE);
BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg, temp);
__ MoveRegister(temp, box_reg);
- __ StoreIntoObjectOffset(instance_reg, offset, temp,
- compiler::Assembler::kValueIsNotSmi);
+ __ StoreCompressedIntoObjectOffset(instance_reg, offset, temp,
+ compiler::Assembler::kValueIsNotSmi);
__ Bind(&done);
}
@@ -2481,10 +2487,10 @@
BoxAllocationSlowPath::Allocate(compiler, this, *cls, temp, temp2);
__ MoveRegister(temp2, temp);
- __ StoreIntoObjectOffset(instance_reg, offset_in_bytes, temp2,
- compiler::Assembler::kValueIsNotSmi);
+ __ StoreCompressedIntoObjectOffset(instance_reg, offset_in_bytes, temp2,
+ compiler::Assembler::kValueIsNotSmi);
} else {
- __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes);
+ __ LoadCompressedFieldFromOffset(temp, instance_reg, offset_in_bytes);
}
switch (cid) {
case kDoubleCid:
@@ -2698,17 +2704,18 @@
// R3: new object end address.
// Store the type argument field.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateArrayABI::kResultReg,
compiler::FieldAddress(AllocateArrayABI::kResultReg,
- Array::type_arguments_offset()),
+ Array::type_arguments_offset(),
+ compiler::kObjectBytes),
AllocateArrayABI::kTypeArgumentsReg);
// Set the length field.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateArrayABI::kResultReg,
compiler::FieldAddress(AllocateArrayABI::kResultReg,
- Array::length_offset()),
+ Array::length_offset(), compiler::kObjectBytes),
AllocateArrayABI::kLengthReg);
// TODO(zra): Use stp once added.
@@ -2723,19 +2730,20 @@
__ LoadObject(R6, Object::null_object());
__ AddImmediate(R8, AllocateArrayABI::kResultReg,
sizeof(UntaggedArray) - kHeapObjectTag);
- if (array_size < (kInlineArraySize * kWordSize)) {
+ if (array_size < (kInlineArraySize * kCompressedWordSize)) {
intptr_t current_offset = 0;
while (current_offset < array_size) {
- __ str(R6, compiler::Address(R8, current_offset));
- current_offset += kWordSize;
+ __ str(R6, compiler::Address(R8, current_offset),
+ compiler::kObjectBytes);
+ current_offset += kCompressedWordSize;
}
} else {
compiler::Label end_loop, init_loop;
__ Bind(&init_loop);
__ CompareRegisters(R8, R3);
__ b(&end_loop, CS);
- __ str(R6, compiler::Address(R8));
- __ AddImmediate(R8, kWordSize);
+ __ str(R6, compiler::Address(R8), compiler::kObjectBytes);
+ __ AddImmediate(R8, kCompressedWordSize);
__ b(&init_loop);
__ Bind(&end_loop);
}
@@ -2886,7 +2894,7 @@
const Register temp = locs()->temp(0).reg();
- __ LoadFieldFromOffset(temp, instance_reg, OffsetInBytes());
+ __ LoadCompressedFieldFromOffset(temp, instance_reg, OffsetInBytes());
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleLoadFieldInstr");
@@ -2948,7 +2956,7 @@
__ Bind(&load_double);
BoxAllocationSlowPath::Allocate(compiler, this, compiler->double_class(),
result_reg, temp);
- __ LoadFieldFromOffset(temp, instance_reg, OffsetInBytes());
+ __ LoadCompressedFieldFromOffset(temp, instance_reg, OffsetInBytes());
__ LoadDFieldFromOffset(VTMP, temp, Double::value_offset());
__ StoreDFieldToOffset(VTMP, result_reg, Double::value_offset());
__ b(&done);
@@ -2958,7 +2966,7 @@
__ Bind(&load_float32x4);
BoxAllocationSlowPath::Allocate(
compiler, this, compiler->float32x4_class(), result_reg, temp);
- __ LoadFieldFromOffset(temp, instance_reg, OffsetInBytes());
+ __ LoadCompressedFieldFromOffset(temp, instance_reg, OffsetInBytes());
__ LoadQFieldFromOffset(VTMP, temp, Float32x4::value_offset());
__ StoreQFieldToOffset(VTMP, result_reg, Float32x4::value_offset());
__ b(&done);
@@ -2968,7 +2976,7 @@
__ Bind(&load_float64x2);
BoxAllocationSlowPath::Allocate(
compiler, this, compiler->float64x2_class(), result_reg, temp);
- __ LoadFieldFromOffset(temp, instance_reg, OffsetInBytes());
+ __ LoadCompressedFieldFromOffset(temp, instance_reg, OffsetInBytes());
__ LoadQFieldFromOffset(VTMP, temp, Float64x2::value_offset());
__ StoreQFieldToOffset(VTMP, result_reg, Float64x2::value_offset());
__ b(&done);
@@ -3921,27 +3929,6 @@
__ Bind(&done);
}
-LocationSummary* BoxUint8Instr::MakeLocationSummary(Zone* zone,
- bool opt) const {
- ASSERT(from_representation() == kUnboxedUint8);
- const intptr_t kNumInputs = 1;
- const intptr_t kNumTemps = 0;
- LocationSummary* summary = new (zone)
- LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
- summary->set_in(0, Location::RequiresRegister());
- summary->set_out(0, Location::RequiresRegister());
- return summary;
-}
-
-void BoxUint8Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
- const Register value = locs()->in(0).reg();
- const Register out = locs()->out(0).reg();
- ASSERT(value != out);
-
- ASSERT(compiler::target::kSmiBits >= 8);
- __ ubfiz(out, value, kSmiTagSize, 8);
-}
-
LocationSummary* BoxInteger32Instr::MakeLocationSummary(Zone* zone,
bool opt) const {
ASSERT((from_representation() == kUnboxedInt32) ||
diff --git a/runtime/vm/compiler/backend/il_ia32.cc b/runtime/vm/compiler/backend/il_ia32.cc
index d51429f..d16dab4 100644
--- a/runtime/vm/compiler/backend/il_ia32.cc
+++ b/runtime/vm/compiler/backend/il_ia32.cc
@@ -3914,28 +3914,6 @@
__ Bind(&done);
}
-LocationSummary* BoxUint8Instr::MakeLocationSummary(Zone* zone,
- bool opt) const {
- ASSERT(from_representation() == kUnboxedUint8);
- const intptr_t kNumInputs = 1;
- const intptr_t kNumTemps = 0;
- LocationSummary* summary = new (zone)
- LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
- summary->set_in(0, Location::RequiresRegister());
- summary->set_out(0, Location::RequiresRegister());
- return summary;
-}
-
-void BoxUint8Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
- const Register value = locs()->in(0).reg();
- const Register out = locs()->out(0).reg();
- ASSERT(value != out);
-
- __ MoveRegister(out, value);
- __ andl(out, compiler::Immediate(0xff));
- __ SmiTag(out);
-}
-
LocationSummary* BoxInteger32Instr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 1;
diff --git a/runtime/vm/compiler/backend/il_printer.cc b/runtime/vm/compiler/backend/il_printer.cc
index 1bfffd0..1c8b3f8 100644
--- a/runtime/vm/compiler/backend/il_printer.cc
+++ b/runtime/vm/compiler/backend/il_printer.cc
@@ -936,6 +936,8 @@
return "float";
case kUnboxedUint8:
return "uint8";
+ case kUnboxedUint16:
+ return "uint16";
case kUnboxedInt32:
return "int32";
case kUnboxedUint32:
diff --git a/runtime/vm/compiler/backend/il_x64.cc b/runtime/vm/compiler/backend/il_x64.cc
index 0fb1260..ca85ce7 100644
--- a/runtime/vm/compiler/backend/il_x64.cc
+++ b/runtime/vm/compiler/backend/il_x64.cc
@@ -1661,8 +1661,13 @@
decoder_reg = decoder_location.reg();
}
const auto scan_flags_field_offset = scan_flags_field_.offset_in_bytes();
- __ orq(compiler::FieldAddress(decoder_reg, scan_flags_field_offset),
- flags_reg);
+ if (scan_flags_field_.is_compressed() && !IsScanFlagsUnboxed()) {
+ __ OBJ(or)(compiler::FieldAddress(decoder_reg, scan_flags_field_offset),
+ flags_reg);
+ } else {
+ __ orq(compiler::FieldAddress(decoder_reg, scan_flags_field_offset),
+ flags_reg);
+ }
}
LocationSummary* LoadUntaggedInstr::MakeLocationSummary(Zone* zone,
@@ -1809,11 +1814,7 @@
ASSERT(representation() == kTagged);
ASSERT((class_id() == kArrayCid) || (class_id() == kImmutableArrayCid) ||
(class_id() == kTypeArgumentsCid));
- if (class_id() == kTypeArgumentsCid) {
- __ LoadCompressed(result, element_address);
- } else {
- __ movq(result, element_address);
- }
+ __ LoadCompressed(result, element_address);
break;
}
}
@@ -2007,13 +2008,13 @@
Register value = locs()->in(2).reg();
Register slot = locs()->temp(0).reg();
__ leaq(slot, element_address);
- __ StoreIntoArray(array, slot, value, CanValueBeSmi());
+ __ StoreCompressedIntoArray(array, slot, value, CanValueBeSmi());
} else if (locs()->in(2).IsConstant()) {
const Object& constant = locs()->in(2).constant();
- __ StoreIntoObjectNoBarrier(array, element_address, constant);
+ __ StoreCompressedIntoObjectNoBarrier(array, element_address, constant);
} else {
Register value = locs()->in(2).reg();
- __ StoreIntoObjectNoBarrier(array, element_address, value);
+ __ StoreCompressedIntoObjectNoBarrier(array, element_address, value);
}
break;
case kOneByteStringCid:
@@ -2482,13 +2483,14 @@
intptr_t offset,
Register temp) {
compiler::Label done;
- __ movq(box_reg, compiler::FieldAddress(instance_reg, offset));
+ __ LoadCompressed(box_reg, compiler::FieldAddress(instance_reg, offset));
__ CompareObject(box_reg, Object::null_object());
__ j(NOT_EQUAL, &done);
BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg, temp);
__ movq(temp, box_reg);
- __ StoreIntoObject(instance_reg, compiler::FieldAddress(instance_reg, offset),
- temp, compiler::Assembler::kValueIsNotSmi);
+ __ StoreCompressedIntoObject(instance_reg,
+ compiler::FieldAddress(instance_reg, offset),
+ temp, compiler::Assembler::kValueIsNotSmi);
__ Bind(&done);
}
@@ -2562,11 +2564,12 @@
BoxAllocationSlowPath::Allocate(compiler, this, *cls, temp, temp2);
__ movq(temp2, temp);
- __ StoreIntoObject(instance_reg,
- compiler::FieldAddress(instance_reg, offset_in_bytes),
- temp2, compiler::Assembler::kValueIsNotSmi);
+ __ StoreCompressedIntoObject(
+ instance_reg, compiler::FieldAddress(instance_reg, offset_in_bytes),
+ temp2, compiler::Assembler::kValueIsNotSmi);
} else {
- __ movq(temp, compiler::FieldAddress(instance_reg, offset_in_bytes));
+ __ LoadCompressed(temp,
+ compiler::FieldAddress(instance_reg, offset_in_bytes));
}
switch (cid) {
case kDoubleCid:
@@ -2793,14 +2796,14 @@
// RAX: new object start as a tagged pointer.
// Store the type argument field.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateArrayABI::kResultReg,
compiler::FieldAddress(AllocateArrayABI::kResultReg,
Array::type_arguments_offset()),
AllocateArrayABI::kTypeArgumentsReg);
// Set the length field.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateArrayABI::kResultReg,
compiler::FieldAddress(AllocateArrayABI::kResultReg,
Array::length_offset()),
@@ -2816,20 +2819,20 @@
__ LoadObject(R12, Object::null_object());
__ leaq(RDI, compiler::FieldAddress(AllocateArrayABI::kResultReg,
sizeof(UntaggedArray)));
- if (array_size < (kInlineArraySize * kWordSize)) {
+ if (array_size < (kInlineArraySize * kCompressedWordSize)) {
intptr_t current_offset = 0;
while (current_offset < array_size) {
- __ StoreIntoObjectNoBarrier(AllocateArrayABI::kResultReg,
- compiler::Address(RDI, current_offset),
- R12);
- current_offset += kWordSize;
+ __ StoreCompressedIntoObjectNoBarrier(
+ AllocateArrayABI::kResultReg,
+ compiler::Address(RDI, current_offset), R12);
+ current_offset += kCompressedWordSize;
}
} else {
compiler::Label init_loop;
__ Bind(&init_loop);
- __ StoreIntoObjectNoBarrier(AllocateArrayABI::kResultReg,
- compiler::Address(RDI, 0), R12);
- __ addq(RDI, compiler::Immediate(kWordSize));
+ __ StoreCompressedIntoObjectNoBarrier(AllocateArrayABI::kResultReg,
+ compiler::Address(RDI, 0), R12);
+ __ addq(RDI, compiler::Immediate(kCompressedWordSize));
__ cmpq(RDI, RCX);
__ j(BELOW, &init_loop, compiler::Assembler::kNearJump);
}
@@ -2986,7 +2989,8 @@
}
Register temp = locs()->temp(0).reg();
- __ movq(temp, compiler::FieldAddress(instance_reg, OffsetInBytes()));
+ __ LoadCompressed(temp,
+ compiler::FieldAddress(instance_reg, OffsetInBytes()));
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleLoadFieldInstr");
@@ -3049,7 +3053,8 @@
__ Bind(&load_double);
BoxAllocationSlowPath::Allocate(compiler, this, compiler->double_class(),
result, temp);
- __ movq(temp, compiler::FieldAddress(instance_reg, OffsetInBytes()));
+ __ LoadCompressed(temp,
+ compiler::FieldAddress(instance_reg, OffsetInBytes()));
__ movsd(value, compiler::FieldAddress(temp, Double::value_offset()));
__ movsd(compiler::FieldAddress(result, Double::value_offset()), value);
__ jmp(&done);
@@ -3059,7 +3064,8 @@
__ Bind(&load_float32x4);
BoxAllocationSlowPath::Allocate(
compiler, this, compiler->float32x4_class(), result, temp);
- __ movq(temp, compiler::FieldAddress(instance_reg, OffsetInBytes()));
+ __ LoadCompressed(temp,
+ compiler::FieldAddress(instance_reg, OffsetInBytes()));
__ movups(value, compiler::FieldAddress(temp, Float32x4::value_offset()));
__ movups(compiler::FieldAddress(result, Float32x4::value_offset()),
value);
@@ -3070,7 +3076,8 @@
__ Bind(&load_float64x2);
BoxAllocationSlowPath::Allocate(
compiler, this, compiler->float64x2_class(), result, temp);
- __ movq(temp, compiler::FieldAddress(instance_reg, OffsetInBytes()));
+ __ LoadCompressed(temp,
+ compiler::FieldAddress(instance_reg, OffsetInBytes()));
__ movups(value, compiler::FieldAddress(temp, Float64x2::value_offset()));
__ movups(compiler::FieldAddress(result, Float64x2::value_offset()),
value);
@@ -4327,27 +4334,6 @@
}
}
-LocationSummary* BoxUint8Instr::MakeLocationSummary(Zone* zone,
- bool opt) const {
- ASSERT(from_representation() == kUnboxedUint8);
- const intptr_t kNumInputs = 1;
- const intptr_t kNumTemps = 0;
- LocationSummary* summary = new (zone)
- LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
- summary->set_in(0, Location::RequiresRegister());
- summary->set_out(0, Location::RequiresRegister());
- return summary;
-}
-
-void BoxUint8Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
- const Register value = locs()->in(0).reg();
- const Register out = locs()->out(0).reg();
- ASSERT(value != out);
-
- __ movzxb(out, value);
- __ SmiTag(out);
-}
-
LocationSummary* BoxInteger32Instr::MakeLocationSummary(Zone* zone,
bool opt) const {
ASSERT((from_representation() == kUnboxedInt32) ||
diff --git a/runtime/vm/compiler/backend/inliner.cc b/runtime/vm/compiler/backend/inliner.cc
index eb206db..1b2d1d4 100644
--- a/runtime/vm/compiler/backend/inliner.cc
+++ b/runtime/vm/compiler/backend/inliner.cc
@@ -2655,13 +2655,8 @@
source, new (Z) Value(stored_value), new (Z) Value(dst_type),
new (Z) Value(type_args), new (Z) Value(function_type_args),
Symbols::Value(), call->deopt_id());
- cursor = flow_graph->AppendTo(cursor, assert_value, call->env(),
- FlowGraph::kValue);
- // The environment is that of the InstanceCall([]=, ..., <env>).
- // A lazy-deopt of the inserted AssertAssignable must continue in
- // unoptimzed code.
- // => We will re-try this []= call in unoptimized code.
- assert_value->env()->MarkAsLazyDeoptToBeforeDeoptId();
+ cursor = flow_graph->AppendSpeculativeTo(cursor, assert_value,
+ call->env(), FlowGraph::kValue);
}
}
diff --git a/runtime/vm/compiler/backend/locations.cc b/runtime/vm/compiler/backend/locations.cc
index abcbf3f..56640f8 100644
--- a/runtime/vm/compiler/backend/locations.cc
+++ b/runtime/vm/compiler/backend/locations.cc
@@ -66,15 +66,18 @@
ASSERT(IsUnboxedInteger(rep));
switch (ValueSize(rep)) {
case 8:
+ ASSERT(!IsUnsigned(rep));
ASSERT_EQUAL(compiler::target::kWordSize, 8);
return compiler::kEightBytes;
case 4:
return IsUnsigned(rep) ? compiler::kUnsignedFourBytes
: compiler::kFourBytes;
case 2:
- // No kUnboxed{Uint,Int}16 yet.
- UNIMPLEMENTED();
- break;
+ // No kUnboxedInt16 yet.
+ if (!IsUnsigned(rep)) {
+ UNIMPLEMENTED();
+ }
+ return compiler::kUnsignedTwoBytes;
case 1:
if (!IsUnsigned(rep)) {
// No kUnboxedInt8 yet.
diff --git a/runtime/vm/compiler/backend/locations.h b/runtime/vm/compiler/backend/locations.h
index dcac605..22b858f 100644
--- a/runtime/vm/compiler/backend/locations.h
+++ b/runtime/vm/compiler/backend/locations.h
@@ -28,6 +28,7 @@
// Format: (representation name, is unsigned, value type)
#define FOR_EACH_INTEGER_REPRESENTATION_KIND(M) \
M(UnboxedUint8, true, uint8_t) \
+ M(UnboxedUint16, true, uint16_t) \
M(UnboxedInt32, false, int32_t) \
M(UnboxedUint32, true, uint32_t) \
M(UnboxedInt64, false, int64_t)
diff --git a/runtime/vm/compiler/backend/range_analysis.cc b/runtime/vm/compiler/backend/range_analysis.cc
index cfc40b2..3807b9a 100644
--- a/runtime/vm/compiler/backend/range_analysis.cc
+++ b/runtime/vm/compiler/backend/range_analysis.cc
@@ -2745,6 +2745,7 @@
case kUnboxedUint8: // Overapproximate Uint8 as Int16.
return RangeBoundary::kRangeBoundaryInt16;
case kUnboxedInt32:
+ case kUnboxedUint16: // Overapproximate Uint16 as Int32.
return RangeBoundary::kRangeBoundaryInt32;
case kUnboxedInt64:
case kUnboxedUint32: // Overapproximate Uint32 as Int64.
diff --git a/runtime/vm/compiler/call_specializer.cc b/runtime/vm/compiler/call_specializer.cc
index 4a737b1..d386935 100644
--- a/runtime/vm/compiler/call_specializer.cc
+++ b/runtime/vm/compiler/call_specializer.cc
@@ -837,15 +837,15 @@
if (IG->use_field_guards()) {
if (field.guarded_cid() != kDynamicCid) {
- InsertBefore(instr,
- new (Z)
- GuardFieldClassInstr(new (Z) Value(instr->ArgumentAt(1)),
- field, instr->deopt_id()),
- instr->env(), FlowGraph::kEffect);
+ InsertSpeculativeBefore(
+ instr,
+ new (Z) GuardFieldClassInstr(new (Z) Value(instr->ArgumentAt(1)),
+ field, instr->deopt_id()),
+ instr->env(), FlowGraph::kEffect);
}
if (field.needs_length_check()) {
- InsertBefore(
+ InsertSpeculativeBefore(
instr,
new (Z) GuardFieldLengthInstr(new (Z) Value(instr->ArgumentAt(1)),
field, instr->deopt_id()),
@@ -853,11 +853,11 @@
}
if (field.static_type_exactness_state().NeedsFieldGuard()) {
- InsertBefore(instr,
- new (Z)
- GuardFieldTypeInstr(new (Z) Value(instr->ArgumentAt(1)),
- field, instr->deopt_id()),
- instr->env(), FlowGraph::kEffect);
+ InsertSpeculativeBefore(
+ instr,
+ new (Z) GuardFieldTypeInstr(new (Z) Value(instr->ArgumentAt(1)),
+ field, instr->deopt_id()),
+ instr->env(), FlowGraph::kEffect);
}
}
@@ -896,20 +896,19 @@
instantiator_type_args = new (Z) LoadFieldInstr(
new (Z) Value(instr->ArgumentAt(0)),
Slot::GetTypeArgumentsSlotFor(thread(), owner), instr->source());
- InsertBefore(instr, instantiator_type_args, instr->env(),
- FlowGraph::kValue);
+ InsertSpeculativeBefore(instr, instantiator_type_args, instr->env(),
+ FlowGraph::kValue);
}
}
- InsertBefore(
- instr,
- new (Z) AssertAssignableInstr(
- instr->source(), new (Z) Value(instr->ArgumentAt(1)),
- new (Z) Value(flow_graph_->GetConstant(dst_type)),
- new (Z) Value(instantiator_type_args),
- new (Z) Value(function_type_args),
- String::ZoneHandle(zone(), field.name()), instr->deopt_id()),
- instr->env(), FlowGraph::kEffect);
+ auto assert_assignable = new (Z) AssertAssignableInstr(
+ instr->source(), new (Z) Value(instr->ArgumentAt(1)),
+ new (Z) Value(flow_graph_->GetConstant(dst_type)),
+ new (Z) Value(instantiator_type_args),
+ new (Z) Value(function_type_args),
+ String::ZoneHandle(zone(), field.name()), instr->deopt_id());
+ InsertSpeculativeBefore(instr, assert_assignable, instr->env(),
+ FlowGraph::kEffect);
}
}
diff --git a/runtime/vm/compiler/call_specializer.h b/runtime/vm/compiler/call_specializer.h
index c0ad62d..e82a69c 100644
--- a/runtime/vm/compiler/call_specializer.h
+++ b/runtime/vm/compiler/call_specializer.h
@@ -64,6 +64,12 @@
FlowGraph::UseKind use_kind) {
flow_graph_->InsertBefore(next, instr, env, use_kind);
}
+ void InsertSpeculativeBefore(Instruction* next,
+ Instruction* instr,
+ Environment* env,
+ FlowGraph::UseKind use_kind) {
+ flow_graph_->InsertSpeculativeBefore(next, instr, env, use_kind);
+ }
virtual void VisitStaticCall(StaticCallInstr* instr);
diff --git a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
index 7160b17..f750101 100644
--- a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
+++ b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
@@ -617,7 +617,6 @@
StoreIndexedInstr* store = new (Z) StoreIndexedInstr(
Pop(), // Array.
index, value, emit_store_barrier, /*index_unboxed=*/false,
-
compiler::target::Instance::ElementSizeFor(class_id), class_id,
kAlignedAccess, DeoptId::kNone, InstructionSource());
return Fragment(store);
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
index 226faffb..f27b185 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
@@ -1115,8 +1115,6 @@
return BuildVariableSet(position);
case kSpecializedVariableSet:
return BuildVariableSet(payload, position);
- case kPropertyGet:
- return BuildPropertyGet(position);
case kInstanceGet:
return BuildInstanceGet(position);
case kDynamicGet:
@@ -1125,8 +1123,6 @@
return BuildInstanceTearOff(position);
case kFunctionTearOff:
return BuildFunctionTearOff(position);
- case kPropertySet:
- return BuildPropertySet(position);
case kInstanceSet:
return BuildInstanceSet(position);
case kDynamicSet:
@@ -1139,10 +1135,10 @@
return BuildStaticGet(position);
case kStaticSet:
return BuildStaticSet(position);
- case kMethodInvocation:
case kInstanceInvocation:
+ return BuildMethodInvocation(position, /*is_dynamic=*/false);
case kDynamicInvocation:
- return BuildMethodInvocation(position, tag);
+ return BuildMethodInvocation(position, /*is_dynamic=*/true);
case kLocalFunctionInvocation:
return BuildLocalFunctionInvocation(position);
case kFunctionInvocation:
@@ -2173,78 +2169,6 @@
return instructions;
}
-Fragment StreamingFlowGraphBuilder::BuildPropertyGet(TokenPosition* p) {
- const intptr_t offset = ReaderOffset() - 1; // Include the tag.
- const TokenPosition position = ReadPosition(); // read position.
- if (p != NULL) *p = position;
-
- const DirectCallMetadata direct_call =
- direct_call_metadata_helper_.GetDirectTargetForPropertyGet(offset);
- const InferredTypeMetadata result_type =
- inferred_type_metadata_helper_.GetInferredType(offset);
-
- Fragment instructions = BuildExpression(); // read receiver.
-
- LocalVariable* receiver = NULL;
- if (direct_call.check_receiver_for_null_) {
- // Duplicate receiver for CheckNull before it is consumed by PushArgument.
- receiver = MakeTemporary();
- instructions += LoadLocal(receiver);
- }
-
- const String& getter_name = ReadNameAsGetterName(); // read name.
-
- const Function* interface_target = &Function::null_function();
- const Function* tearoff_interface_target = &Function::null_function();
- const NameIndex itarget_name =
- ReadInterfaceMemberNameReference(); // read interface_target_reference.
- if (!H.IsRoot(itarget_name) && H.IsGetter(itarget_name)) {
- interface_target = &Function::ZoneHandle(
- Z,
- H.LookupMethodByMember(itarget_name, H.DartGetterName(itarget_name)));
- ASSERT(getter_name.ptr() == interface_target->name());
- } else if (!H.IsRoot(itarget_name) && H.IsMethod(itarget_name)) {
- tearoff_interface_target = &Function::ZoneHandle(
- Z,
- H.LookupMethodByMember(itarget_name, H.DartMethodName(itarget_name)));
- }
-
- if (direct_call.check_receiver_for_null_) {
- instructions += CheckNull(position, receiver, getter_name);
- }
-
- const String* mangled_name = &getter_name;
- const Function* direct_call_target = &direct_call.target_;
- if (H.IsRoot(itarget_name)) {
- mangled_name = &String::ZoneHandle(
- Z, Function::CreateDynamicInvocationForwarderName(getter_name));
- if (!direct_call_target->IsNull()) {
- direct_call_target = &Function::ZoneHandle(
- direct_call.target_.GetDynamicInvocationForwarder(*mangled_name));
- }
- }
-
- if (!direct_call_target->IsNull()) {
- ASSERT(CompilerState::Current().is_aot());
- instructions +=
- StaticCall(position, *direct_call_target, 1, Array::null_array(),
- ICData::kNoRebind, &result_type);
- } else {
- const intptr_t kTypeArgsLen = 0;
- const intptr_t kNumArgsChecked = 1;
- instructions +=
- InstanceCall(position, *mangled_name, Token::kGET, kTypeArgsLen, 1,
- Array::null_array(), kNumArgsChecked, *interface_target,
- *tearoff_interface_target, &result_type);
- }
-
- if (direct_call.check_receiver_for_null_) {
- instructions += DropTempsPreserveTop(1); // Drop receiver, preserve result.
- }
-
- return instructions;
-}
-
Fragment StreamingFlowGraphBuilder::BuildInstanceGet(TokenPosition* p) {
const intptr_t offset = ReaderOffset() - 1; // Include the tag.
ReadByte(); // read kind.
@@ -2451,101 +2375,6 @@
return instructions;
}
-Fragment StreamingFlowGraphBuilder::BuildPropertySet(TokenPosition* p) {
- const intptr_t offset = ReaderOffset() - 1; // Include the tag.
-
- const DirectCallMetadata direct_call =
- direct_call_metadata_helper_.GetDirectTargetForPropertySet(offset);
- const CallSiteAttributesMetadata call_site_attributes =
- call_site_attributes_metadata_helper_.GetCallSiteAttributes(offset);
- const InferredTypeMetadata inferred_type =
- inferred_type_metadata_helper_.GetInferredType(offset);
-
- // True if callee can skip argument type checks.
- bool is_unchecked_call = inferred_type.IsSkipCheck();
- if (call_site_attributes.receiver_type != nullptr &&
- call_site_attributes.receiver_type->HasTypeClass() &&
- !Class::Handle(call_site_attributes.receiver_type->type_class())
- .IsGeneric()) {
- is_unchecked_call = true;
- }
-
- Fragment instructions(MakeTemp());
- LocalVariable* variable = MakeTemporary();
-
- const TokenPosition position = ReadPosition(); // read position.
- if (p != nullptr) *p = position;
-
- if (PeekTag() == kThisExpression) {
- is_unchecked_call = true;
- }
- instructions += BuildExpression(); // read receiver.
-
- LocalVariable* receiver = nullptr;
- if (direct_call.check_receiver_for_null_) {
- // Duplicate receiver for CheckNull before it is consumed by PushArgument.
- receiver = MakeTemporary();
- instructions += LoadLocal(receiver);
- }
-
- const String& setter_name = ReadNameAsSetterName(); // read name.
-
- instructions += BuildExpression(); // read value.
- instructions += StoreLocal(TokenPosition::kNoSource, variable);
-
- const Function* interface_target = &Function::null_function();
- const NameIndex itarget_name =
- ReadInterfaceMemberNameReference(); // read interface_target_reference.
- if (!H.IsRoot(itarget_name)) {
- interface_target = &Function::ZoneHandle(
- Z,
- H.LookupMethodByMember(itarget_name, H.DartSetterName(itarget_name)));
- ASSERT(setter_name.ptr() == interface_target->name());
- }
-
- if (direct_call.check_receiver_for_null_) {
- instructions += CheckNull(position, receiver, setter_name);
- }
-
- const String* mangled_name = &setter_name;
- const Function* direct_call_target = &direct_call.target_;
- if (H.IsRoot(itarget_name)) {
- mangled_name = &String::ZoneHandle(
- Z, Function::CreateDynamicInvocationForwarderName(setter_name));
- if (!direct_call_target->IsNull()) {
- direct_call_target = &Function::ZoneHandle(
- direct_call.target_.GetDynamicInvocationForwarder(*mangled_name));
- }
- }
-
- if (!direct_call_target->IsNull()) {
- ASSERT(CompilerState::Current().is_aot());
- instructions +=
- StaticCall(position, *direct_call_target, 2, Array::null_array(),
- ICData::kNoRebind, /*result_type=*/nullptr,
- /*type_args_count=*/0,
- /*use_unchecked_entry=*/is_unchecked_call);
- } else {
- const intptr_t kTypeArgsLen = 0;
- const intptr_t kNumArgsChecked = 1;
-
- instructions += InstanceCall(
- position, *mangled_name, Token::kSET, kTypeArgsLen, 2,
- Array::null_array(), kNumArgsChecked, *interface_target,
- Function::null_function(),
- /*result_type=*/nullptr,
- /*use_unchecked_entry=*/is_unchecked_call, &call_site_attributes);
- }
-
- instructions += Drop(); // Drop result of the setter invocation.
-
- if (direct_call.check_receiver_for_null_) {
- instructions += Drop(); // Drop receiver.
- }
-
- return instructions;
-}
-
Fragment StreamingFlowGraphBuilder::BuildInstanceSet(TokenPosition* p) {
const intptr_t offset = ReaderOffset() - 1; // Include the tag.
ReadByte(); // read kind.
@@ -3020,19 +2849,12 @@
}
Fragment StreamingFlowGraphBuilder::BuildMethodInvocation(TokenPosition* p,
- Tag tag) {
- ASSERT((tag == kMethodInvocation) || (tag == kInstanceInvocation) ||
- (tag == kDynamicInvocation));
+ bool is_dynamic) {
const intptr_t offset = ReaderOffset() - 1; // Include the tag.
-
- if ((tag == kInstanceInvocation) || (tag == kDynamicInvocation)) {
- ReadByte(); // read kind.
- }
+ ReadByte(); // read kind.
// read flags.
- const uint8_t flags =
- ((tag == kMethodInvocation) || (tag == kInstanceInvocation)) ? ReadFlags()
- : 0;
+ const uint8_t flags = is_dynamic ? 0 : ReadFlags();
const bool is_invariant = (flags & kMethodInvocationFlagInvariant) != 0;
const TokenPosition position = ReadPosition(); // read position.
@@ -3047,22 +2869,13 @@
const Tag receiver_tag = PeekTag(); // peek tag for receiver.
- bool is_unchecked_closure_call = false;
bool is_unchecked_call = is_invariant || result_type.IsSkipCheck();
- if (call_site_attributes.receiver_type != nullptr) {
- if ((tag == kMethodInvocation) &&
- call_site_attributes.receiver_type->IsFunctionType()) {
- AlternativeReadingScope alt(&reader_);
- SkipExpression(); // skip receiver
- is_unchecked_closure_call =
- ReadNameAsMethodName().Equals(Symbols::Call());
- } else if ((tag != kDynamicInvocation) &&
- call_site_attributes.receiver_type->HasTypeClass() &&
- !call_site_attributes.receiver_type->IsDynamicType() &&
- !Class::Handle(call_site_attributes.receiver_type->type_class())
- .IsGeneric()) {
- is_unchecked_call = true;
- }
+ if (!is_dynamic && (call_site_attributes.receiver_type != nullptr) &&
+ call_site_attributes.receiver_type->HasTypeClass() &&
+ !call_site_attributes.receiver_type->IsDynamicType() &&
+ !Class::Handle(call_site_attributes.receiver_type->type_class())
+ .IsGeneric()) {
+ is_unchecked_call = true;
}
Fragment instructions;
@@ -3079,7 +2892,7 @@
const TypeArguments& type_arguments =
T.BuildTypeArguments(list_length); // read types.
instructions += TranslateInstantiatedTypeArguments(type_arguments);
- if (direct_call.check_receiver_for_null_ || is_unchecked_closure_call) {
+ if (direct_call.check_receiver_for_null_) {
// Don't yet push type arguments if we need to check receiver for null.
// In this case receiver will be duplicated so instead of pushing
// type arguments here we need to push it between receiver_temp
@@ -3092,7 +2905,7 @@
// Take note of whether the invocation is against the receiver of the current
// function: in this case, we may skip some type checks in the callee.
- if ((PeekTag() == kThisExpression) && (tag != kDynamicInvocation)) {
+ if ((PeekTag() == kThisExpression) && !is_dynamic) {
is_unchecked_call = true;
}
instructions += BuildExpression(); // read receiver.
@@ -3119,7 +2932,7 @@
}
LocalVariable* receiver_temp = NULL;
- if (direct_call.check_receiver_for_null_ || is_unchecked_closure_call) {
+ if (direct_call.check_receiver_for_null_) {
// Duplicate receiver for CheckNull before it is consumed by PushArgument.
receiver_temp = MakeTemporary();
if (type_arguments_temp != NULL) {
@@ -3149,15 +2962,14 @@
checked_argument_count = argument_count;
}
- if (tag == kInstanceInvocation) {
+ if (!is_dynamic) {
SkipDartType(); // read function_type.
}
const Function* interface_target = &Function::null_function();
+ // read interface_target_reference.
const NameIndex itarget_name =
- ((tag == kMethodInvocation) || (tag == kInstanceInvocation))
- ? ReadInterfaceMemberNameReference()
- : NameIndex(); // read interface_target_reference.
+ is_dynamic ? NameIndex() : ReadInterfaceMemberNameReference();
// TODO(dartbug.com/34497): Once front-end desugars calls via
// fields/getters, filtering of field and getter interface targets here
// can be turned into assertions.
@@ -3169,12 +2981,9 @@
ASSERT(!interface_target->IsGetterFunction());
}
- // TODO(sjindel): Avoid the check for null on unchecked closure calls if TFA
- // allows.
- if (direct_call.check_receiver_for_null_ || is_unchecked_closure_call) {
- // Receiver temp is needed to load the function to call from the closure.
+ if (direct_call.check_receiver_for_null_) {
instructions += CheckNull(position, receiver_temp, name,
- /*clear_temp=*/!is_unchecked_closure_call);
+ /*clear_temp=*/true);
}
const String* mangled_name = &name;
@@ -3193,19 +3002,7 @@
}
}
- if (is_unchecked_closure_call) {
- // Lookup the function in the closure.
- instructions += LoadLocal(receiver_temp);
- if (!FLAG_precompiled_mode || !FLAG_use_bare_instructions) {
- instructions += LoadNativeField(Slot::Closure_function());
- }
- if (parsed_function()->function().is_debuggable()) {
- ASSERT(!parsed_function()->function().is_native());
- instructions += DebugStepCheck(position);
- }
- instructions +=
- B->ClosureCall(position, type_args_len, argument_count, argument_names);
- } else if (!direct_call_target->IsNull()) {
+ if (!direct_call_target->IsNull()) {
// Even if TFA infers a concrete receiver type, the static type of the
// call-site may still be dynamic and we need to call the dynamic invocation
// forwarder to ensure type-checks are performed.
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
index 36cae3e..7160201 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
@@ -274,12 +274,10 @@
Fragment BuildVariableSet(uint8_t payload, TokenPosition* position);
Fragment BuildVariableSetImpl(TokenPosition position,
intptr_t variable_kernel_position);
- Fragment BuildPropertyGet(TokenPosition* position);
Fragment BuildInstanceGet(TokenPosition* position);
Fragment BuildDynamicGet(TokenPosition* position);
Fragment BuildInstanceTearOff(TokenPosition* position);
Fragment BuildFunctionTearOff(TokenPosition* position);
- Fragment BuildPropertySet(TokenPosition* position);
Fragment BuildInstanceSet(TokenPosition* position);
Fragment BuildDynamicSet(TokenPosition* position);
Fragment BuildAllocateInvocationMirrorCall(TokenPosition position,
@@ -293,7 +291,7 @@
Fragment BuildSuperPropertySet(TokenPosition* position);
Fragment BuildStaticGet(TokenPosition* position);
Fragment BuildStaticSet(TokenPosition* position);
- Fragment BuildMethodInvocation(TokenPosition* position, Tag tag);
+ Fragment BuildMethodInvocation(TokenPosition* position, bool is_dynamic);
Fragment BuildLocalFunctionInvocation(TokenPosition* position);
Fragment BuildFunctionInvocation(TokenPosition* position);
Fragment BuildEqualsCall(TokenPosition* position);
diff --git a/runtime/vm/compiler/frontend/kernel_fingerprints.cc b/runtime/vm/compiler/frontend/kernel_fingerprints.cc
index 4b0426c..805e37d 100644
--- a/runtime/vm/compiler/frontend/kernel_fingerprints.cc
+++ b/runtime/vm/compiler/frontend/kernel_fingerprints.cc
@@ -373,12 +373,6 @@
ReadUInt(); // read kernel position.
CalculateExpressionFingerprint(); // read expression.
return;
- case kPropertyGet:
- ReadPosition(); // read position.
- CalculateExpressionFingerprint(); // read receiver.
- BuildHash(ReadNameAsGetterName().Hash()); // read name.
- CalculateGetterNameFingerprint(); // read interface_target_reference.
- return;
case kInstanceGet:
ReadByte(); // read kind.
ReadPosition(); // read position.
@@ -405,13 +399,6 @@
ReadPosition(); // read position.
CalculateExpressionFingerprint(); // read receiver.
return;
- case kPropertySet:
- ReadPosition(); // read position.
- CalculateExpressionFingerprint(); // read receiver.
- BuildHash(ReadNameAsSetterName().Hash()); // read name.
- CalculateExpressionFingerprint(); // read value.
- CalculateSetterNameFingerprint(); // read interface_target_reference.
- return;
case kInstanceSet:
ReadByte(); // read kind.
ReadPosition(); // read position.
@@ -447,14 +434,6 @@
CalculateCanonicalNameFingerprint(); // read target_reference.
CalculateExpressionFingerprint(); // read expression.
return;
- case kMethodInvocation:
- ReadFlags(); // read flags.
- ReadPosition(); // read position.
- CalculateExpressionFingerprint(); // read receiver.
- BuildHash(ReadNameAsMethodName().Hash()); // read name.
- CalculateArgumentsFingerprint(); // read arguments.
- CalculateMethodNameFingerprint(); // read interface_target_reference.
- return;
case kInstanceInvocation:
ReadByte(); // read kind.
ReadFlags(); // read flags.
diff --git a/runtime/vm/compiler/frontend/kernel_to_il.cc b/runtime/vm/compiler/frontend/kernel_to_il.cc
index a09201c..8c9be51 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.cc
+++ b/runtime/vm/compiler/frontend/kernel_to_il.cc
@@ -2177,7 +2177,8 @@
check_required.current = valid_index;
check_required += LoadLocal(info.parameter_names);
check_required += LoadLocal(flags_index);
- check_required += LoadIndexed(kArrayCid);
+ check_required += LoadIndexed(
+ kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
check_required += LoadLocal(opt_index);
check_required +=
IntConstant(compiler::target::kNumParameterFlagsPerElement - 1);
@@ -2375,7 +2376,8 @@
// First load the name we need to check against.
loop_body += LoadLocal(info.parameter_names);
loop_body += LoadLocal(info.vars->current_param_index);
- loop_body += LoadIndexed(kArrayCid);
+ loop_body += LoadIndexed(
+ kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
LocalVariable* param_name = MakeTemporary("param_name"); // Read only.
// One additional local value on the stack within the loop body (param_name)
@@ -2598,7 +2600,8 @@
loop_test_flag += LoadLocal(info.vars->current_param_index);
loop_test_flag += IntConstant(TypeParameters::kFlagsPerSmiShift);
loop_test_flag += SmiBinaryOp(Token::kSHR);
- loop_test_flag += LoadIndexed(kArrayCid);
+ loop_test_flag += LoadIndexed(
+ kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
loop_test_flag += LoadLocal(info.vars->current_param_index);
loop_test_flag += IntConstant(TypeParameters::kFlagsPerSmiMask);
loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
@@ -2659,7 +2662,8 @@
loop_call_check += LoadLocal(info.type_parameters);
loop_call_check += LoadNativeField(Slot::TypeParameters_names());
loop_call_check += LoadLocal(info.vars->current_param_index);
- loop_call_check += LoadIndexed(kArrayCid);
+ loop_call_check += LoadIndexed(
+ kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
// Assert that the passed-in type argument is consistent with the bound of
// the corresponding type parameter.
loop_call_check += AssertSubtype(TokenPosition::kNoSource);
@@ -2688,7 +2692,8 @@
// Load destination type.
instructions += LoadLocal(info.parameter_types);
instructions += LoadLocal(param_index);
- instructions += LoadIndexed(kArrayCid);
+ instructions += LoadIndexed(
+ kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
// Load instantiator type arguments.
instructions += LoadLocal(info.instantiator_type_args);
// Load the full set of function type arguments.
diff --git a/runtime/vm/compiler/frontend/kernel_translation_helper.cc b/runtime/vm/compiler/frontend/kernel_translation_helper.cc
index d8ce563..1b5a858 100644
--- a/runtime/vm/compiler/frontend/kernel_translation_helper.cc
+++ b/runtime/vm/compiler/frontend/kernel_translation_helper.cc
@@ -2320,12 +2320,6 @@
ReadUInt(); // read kernel position.
SkipExpression(); // read expression.
return;
- case kPropertyGet:
- ReadPosition(); // read position.
- SkipExpression(); // read receiver.
- SkipName(); // read name.
- SkipInterfaceMemberNameReference(); // read interface_target_reference.
- return;
case kInstanceGet:
ReadByte(); // read kind.
ReadPosition(); // read position.
@@ -2352,13 +2346,6 @@
ReadPosition(); // read position.
SkipExpression(); // read receiver.
return;
- case kPropertySet:
- ReadPosition(); // read position.
- SkipExpression(); // read receiver.
- SkipName(); // read name.
- SkipExpression(); // read value.
- SkipInterfaceMemberNameReference(); // read interface_target_reference.
- return;
case kInstanceSet:
ReadByte(); // read kind.
ReadPosition(); // read position.
@@ -2394,14 +2381,6 @@
SkipCanonicalNameReference(); // read target_reference.
SkipExpression(); // read expression.
return;
- case kMethodInvocation:
- ReadFlags(); // read flags.
- ReadPosition(); // read position.
- SkipExpression(); // read receiver.
- SkipName(); // read name.
- SkipArguments(); // read arguments.
- SkipInterfaceMemberNameReference(); // read interface_target_reference.
- return;
case kInstanceInvocation:
ReadByte(); // read kind.
ReadFlags(); // read flags.
diff --git a/runtime/vm/compiler/frontend/prologue_builder.cc b/runtime/vm/compiler/frontend/prologue_builder.cc
index 8125ba0..035bf63 100644
--- a/runtime/vm/compiler/frontend/prologue_builder.cc
+++ b/runtime/vm/compiler/frontend/prologue_builder.cc
@@ -227,7 +227,7 @@
for (intptr_t i = 0; param < num_params; ++param, ++i) {
copy_args_prologue += IntConstant(
compiler::target::ArgumentsDescriptor::named_entry_size() /
- compiler::target::kWordSize);
+ compiler::target::kCompressedWordSize);
copy_args_prologue += LoadLocal(optional_count_vars_processed);
copy_args_prologue += SmiBinaryOp(Token::kMUL, /* truncate= */ true);
LocalVariable* tuple_diff = MakeTemporary();
@@ -247,10 +247,11 @@
good += IntConstant(
(first_name_offset +
compiler::target::ArgumentsDescriptor::position_offset()) /
- compiler::target::kWordSize);
+ compiler::target::kCompressedWordSize);
good += LoadLocal(tuple_diff);
good += SmiBinaryOp(Token::kADD, /* truncate= */ true);
- good += LoadIndexed(kArrayCid);
+ good += LoadIndexed(
+ kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
}
good += SmiBinaryOp(Token::kSUB, /* truncate= */ true);
good += LoadFpRelativeSlot(
@@ -283,10 +284,11 @@
copy_args_prologue +=
IntConstant((first_name_offset +
compiler::target::ArgumentsDescriptor::name_offset()) /
- compiler::target::kWordSize);
+ compiler::target::kCompressedWordSize);
copy_args_prologue += LoadLocal(tuple_diff);
copy_args_prologue += SmiBinaryOp(Token::kADD, /* truncate= */ true);
- copy_args_prologue += LoadIndexed(kArrayCid);
+ copy_args_prologue += LoadIndexed(
+ kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
// first name in sorted list of all names
const String& param_name = String::ZoneHandle(
diff --git a/runtime/vm/compiler/frontend/scope_builder.cc b/runtime/vm/compiler/frontend/scope_builder.cc
index 27be5e5..36fbbbb 100644
--- a/runtime/vm/compiler/frontend/scope_builder.cc
+++ b/runtime/vm/compiler/frontend/scope_builder.cc
@@ -686,13 +686,6 @@
VisitExpression(); // read expression.
return;
}
- case kPropertyGet:
- helper_.ReadPosition(); // read position.
- VisitExpression(); // read receiver.
- helper_.SkipName(); // read name.
- // read interface_target_reference.
- helper_.SkipInterfaceMemberNameReference();
- return;
case kInstanceGet:
helper_.ReadByte(); // read kind.
helper_.ReadPosition(); // read position.
@@ -721,14 +714,6 @@
helper_.ReadPosition(); // read position.
VisitExpression(); // read receiver.
return;
- case kPropertySet:
- helper_.ReadPosition(); // read position.
- VisitExpression(); // read receiver.
- helper_.SkipName(); // read name.
- VisitExpression(); // read value.
- // read interface_target_reference.
- helper_.SkipInterfaceMemberNameReference();
- return;
case kInstanceSet:
helper_.ReadByte(); // read kind.
helper_.ReadPosition(); // read position.
@@ -767,15 +752,6 @@
helper_.SkipCanonicalNameReference(); // read target_reference.
VisitExpression(); // read expression.
return;
- case kMethodInvocation:
- helper_.ReadFlags(); // read flags.
- helper_.ReadPosition(); // read position.
- VisitExpression(); // read receiver.
- helper_.SkipName(); // read name.
- VisitArguments(); // read arguments.
- // read interface_target_reference.
- helper_.SkipInterfaceMemberNameReference();
- return;
case kInstanceInvocation:
helper_.ReadByte(); // read kind.
helper_.ReadFlags(); // read flags.
diff --git a/runtime/vm/compiler/runtime_api.cc b/runtime/vm/compiler/runtime_api.cc
index ecca290..c289e46 100644
--- a/runtime/vm/compiler/runtime_api.cc
+++ b/runtime/vm/compiler/runtime_api.cc
@@ -508,7 +508,7 @@
switch (cid) {
case kArrayCid:
case kImmutableArrayCid:
- return kWordSize;
+ return kCompressedWordSize;
case kTypeArgumentsCid:
return kCompressedWordSize;
case kOneByteStringCid:
diff --git a/runtime/vm/compiler/runtime_api.h b/runtime/vm/compiler/runtime_api.h
index 7b746fe..ce683c3 100644
--- a/runtime/vm/compiler/runtime_api.h
+++ b/runtime/vm/compiler/runtime_api.h
@@ -298,15 +298,17 @@
"Host word size smaller than target word size");
#endif
+#if defined(DART_COMPRESSED_POINTERS)
+static constexpr int kCompressedWordSize = kInt32Size;
+static constexpr int kCompressedWordSizeLog2 = kInt32SizeLog2;
+#else
+static constexpr int kCompressedWordSize = kWordSize;
+static constexpr int kCompressedWordSizeLog2 = kWordSizeLog2;
+#endif
+
static constexpr word kBitsPerWordLog2 = kWordSizeLog2 + kBitsPerByteLog2;
static constexpr word kBitsPerWord = 1 << kBitsPerWordLog2;
-#if !defined(DART_COMPRESSED_POINTERS)
-static constexpr int kCompressedWordSize = kWordSize;
-#else
-static constexpr int kCompressedWordSize = sizeof(uint32_t);
-#endif
-
using ObjectAlignment = dart::ObjectAlignment<kWordSize, kWordSizeLog2>;
constexpr word kWordMax = (static_cast<uword>(1) << (kBitsPerWord - 1)) - 1;
diff --git a/runtime/vm/compiler/runtime_offsets_extracted.h b/runtime/vm/compiler/runtime_offsets_extracted.h
index 8465181..99fcc93 100644
--- a/runtime/vm/compiler/runtime_offsets_extracted.h
+++ b/runtime/vm/compiler/runtime_offsets_extracted.h
@@ -2190,8 +2190,8 @@
84;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
-static constexpr dart::compiler::target::word Array_elements_start_offset = 24;
-static constexpr dart::compiler::target::word Array_element_size = 8;
+static constexpr dart::compiler::target::word Array_elements_start_offset = 16;
+static constexpr dart::compiler::target::word Array_element_size = 4;
static constexpr dart::compiler::target::word ClassTable_elements_start_offset =
0;
static constexpr dart::compiler::target::word ClassTable_element_size = 1;
@@ -2223,9 +2223,9 @@
static constexpr dart::compiler::target::word
TwoByteString_elements_start_offset = 16;
static constexpr dart::compiler::target::word TwoByteString_element_size = 2;
-static constexpr dart::compiler::target::word Array_kMaxElements = 134217727;
+static constexpr dart::compiler::target::word Array_kMaxElements = 268435455;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
- 32765;
+ 65532;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@@ -2238,7 +2238,7 @@
Instructions_kBarePayloadAlignment = 4;
static constexpr dart::compiler::target::word
Instructions_kNonBarePayloadAlignment = 8;
-static constexpr dart::compiler::target::word OldPage_kBytesPerCardLog2 = 10;
+static constexpr dart::compiler::target::word OldPage_kBytesPerCardLog2 = 9;
static constexpr dart::compiler::target::word
NativeEntry_kNumCallWrapperArguments = 2;
static constexpr dart::compiler::target::word String_kMaxElements = 536870911;
@@ -2264,23 +2264,23 @@
static constexpr dart::compiler::target::word
AbstractType_type_test_stub_entry_point_offset = 8;
static constexpr dart::compiler::target::word ArgumentsDescriptor_count_offset =
- 32;
+ 20;
static constexpr dart::compiler::target::word ArgumentsDescriptor_size_offset =
- 40;
+ 24;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_first_named_entry_offset = 56;
+ ArgumentsDescriptor_first_named_entry_offset = 32;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_named_entry_size = 16;
+ ArgumentsDescriptor_named_entry_size = 8;
static constexpr dart::compiler::target::word ArgumentsDescriptor_name_offset =
0;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_position_offset = 8;
+ ArgumentsDescriptor_position_offset = 4;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_positional_count_offset = 48;
+ ArgumentsDescriptor_positional_count_offset = 28;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_type_args_len_offset = 24;
-static constexpr dart::compiler::target::word Array_data_offset = 24;
-static constexpr dart::compiler::target::word Array_length_offset = 16;
+ ArgumentsDescriptor_type_args_len_offset = 16;
+static constexpr dart::compiler::target::word Array_data_offset = 16;
+static constexpr dart::compiler::target::word Array_length_offset = 12;
static constexpr dart::compiler::target::word Array_tags_offset = 0;
static constexpr dart::compiler::target::word Array_type_arguments_offset = 8;
static constexpr dart::compiler::target::word Class_declaration_type_offset =
@@ -2292,13 +2292,13 @@
Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
-static constexpr dart::compiler::target::word Closure_context_offset = 40;
+static constexpr dart::compiler::target::word Closure_context_offset = 24;
static constexpr dart::compiler::target::word
- Closure_delayed_type_arguments_offset = 24;
-static constexpr dart::compiler::target::word Closure_function_offset = 32;
+ Closure_delayed_type_arguments_offset = 16;
+static constexpr dart::compiler::target::word Closure_function_offset = 20;
static constexpr dart::compiler::target::word
- Closure_function_type_arguments_offset = 16;
-static constexpr dart::compiler::target::word Closure_hash_offset = 48;
+ Closure_function_type_arguments_offset = 12;
+static constexpr dart::compiler::target::word Closure_hash_offset = 28;
static constexpr dart::compiler::target::word
Closure_instantiator_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
@@ -2338,9 +2338,9 @@
static constexpr dart::compiler::target::word FutureOr_type_arguments_offset =
8;
static constexpr dart::compiler::target::word GrowableObjectArray_data_offset =
- 24;
+ 16;
static constexpr dart::compiler::target::word
- GrowableObjectArray_length_offset = 16;
+ GrowableObjectArray_length_offset = 12;
static constexpr dart::compiler::target::word
GrowableObjectArray_type_arguments_offset = 8;
static constexpr dart::compiler::target::word OldPage_card_table_offset = 40;
@@ -2363,16 +2363,16 @@
IsolateGroup_cached_class_table_table_offset = 32;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 72;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
-static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
+static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 20;
static constexpr dart::compiler::target::word
- LinkedHashMap_deleted_keys_offset = 48;
+ LinkedHashMap_deleted_keys_offset = 28;
static constexpr dart::compiler::target::word LinkedHashMap_hash_mask_offset =
- 24;
-static constexpr dart::compiler::target::word LinkedHashMap_index_offset = 16;
+ 16;
+static constexpr dart::compiler::target::word LinkedHashMap_index_offset = 12;
static constexpr dart::compiler::target::word
LinkedHashMap_type_arguments_offset = 8;
static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
- 40;
+ 24;
static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
static constexpr dart::compiler::target::word
MarkingStackBlock_pointers_offset = 16;
@@ -2635,7 +2635,7 @@
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
-static constexpr dart::compiler::target::word WeakProperty_value_offset = 16;
+static constexpr dart::compiler::target::word WeakProperty_value_offset = 12;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@@ -2644,11 +2644,11 @@
1424, 1432, 1440, -1, 1448, 1456, -1, -1};
static constexpr dart::compiler::target::word AbstractType_InstanceSize = 24;
static constexpr dart::compiler::target::word ApiError_InstanceSize = 16;
-static constexpr dart::compiler::target::word Array_header_size = 24;
+static constexpr dart::compiler::target::word Array_header_size = 16;
static constexpr dart::compiler::target::word Bool_InstanceSize = 16;
static constexpr dart::compiler::target::word Capability_InstanceSize = 16;
static constexpr dart::compiler::target::word Class_InstanceSize = 120;
-static constexpr dart::compiler::target::word Closure_InstanceSize = 56;
+static constexpr dart::compiler::target::word Closure_InstanceSize = 32;
static constexpr dart::compiler::target::word ClosureData_InstanceSize = 24;
static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
@@ -2671,7 +2671,7 @@
static constexpr dart::compiler::target::word FunctionType_InstanceSize = 48;
static constexpr dart::compiler::target::word FutureOr_InstanceSize = 16;
static constexpr dart::compiler::target::word GrowableObjectArray_InstanceSize =
- 32;
+ 24;
static constexpr dart::compiler::target::word ICData_InstanceSize = 56;
static constexpr dart::compiler::target::word Instance_InstanceSize = 8;
static constexpr dart::compiler::target::word Instructions_UnalignedHeaderSize =
@@ -2685,7 +2685,7 @@
static constexpr dart::compiler::target::word LanguageError_InstanceSize = 32;
static constexpr dart::compiler::target::word Library_InstanceSize = 112;
static constexpr dart::compiler::target::word LibraryPrefix_InstanceSize = 24;
-static constexpr dart::compiler::target::word LinkedHashMap_InstanceSize = 56;
+static constexpr dart::compiler::target::word LinkedHashMap_InstanceSize = 32;
static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
48;
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
@@ -2725,7 +2725,7 @@
static constexpr dart::compiler::target::word UnlinkedCall_InstanceSize = 32;
static constexpr dart::compiler::target::word UnwindError_InstanceSize = 16;
static constexpr dart::compiler::target::word UserTag_InstanceSize = 24;
-static constexpr dart::compiler::target::word WeakProperty_InstanceSize = 32;
+static constexpr dart::compiler::target::word WeakProperty_InstanceSize = 24;
static constexpr dart::compiler::target::word
WeakSerializationReference_InstanceSize = 16;
#endif // defined(TARGET_ARCH_X64) && defined(DART_COMPRESSED_POINTERS)
@@ -2735,8 +2735,8 @@
84;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
-static constexpr dart::compiler::target::word Array_elements_start_offset = 24;
-static constexpr dart::compiler::target::word Array_element_size = 8;
+static constexpr dart::compiler::target::word Array_elements_start_offset = 16;
+static constexpr dart::compiler::target::word Array_element_size = 4;
static constexpr dart::compiler::target::word ClassTable_elements_start_offset =
0;
static constexpr dart::compiler::target::word ClassTable_element_size = 1;
@@ -2768,9 +2768,9 @@
static constexpr dart::compiler::target::word
TwoByteString_elements_start_offset = 16;
static constexpr dart::compiler::target::word TwoByteString_element_size = 2;
-static constexpr dart::compiler::target::word Array_kMaxElements = 134217727;
+static constexpr dart::compiler::target::word Array_kMaxElements = 268435455;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
- 32765;
+ 65532;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@@ -2783,7 +2783,7 @@
Instructions_kBarePayloadAlignment = 4;
static constexpr dart::compiler::target::word
Instructions_kNonBarePayloadAlignment = 8;
-static constexpr dart::compiler::target::word OldPage_kBytesPerCardLog2 = 10;
+static constexpr dart::compiler::target::word OldPage_kBytesPerCardLog2 = 9;
static constexpr dart::compiler::target::word
NativeEntry_kNumCallWrapperArguments = 2;
static constexpr dart::compiler::target::word String_kMaxElements = 536870911;
@@ -2809,23 +2809,23 @@
static constexpr dart::compiler::target::word
AbstractType_type_test_stub_entry_point_offset = 8;
static constexpr dart::compiler::target::word ArgumentsDescriptor_count_offset =
- 32;
+ 20;
static constexpr dart::compiler::target::word ArgumentsDescriptor_size_offset =
- 40;
+ 24;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_first_named_entry_offset = 56;
+ ArgumentsDescriptor_first_named_entry_offset = 32;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_named_entry_size = 16;
+ ArgumentsDescriptor_named_entry_size = 8;
static constexpr dart::compiler::target::word ArgumentsDescriptor_name_offset =
0;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_position_offset = 8;
+ ArgumentsDescriptor_position_offset = 4;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_positional_count_offset = 48;
+ ArgumentsDescriptor_positional_count_offset = 28;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_type_args_len_offset = 24;
-static constexpr dart::compiler::target::word Array_data_offset = 24;
-static constexpr dart::compiler::target::word Array_length_offset = 16;
+ ArgumentsDescriptor_type_args_len_offset = 16;
+static constexpr dart::compiler::target::word Array_data_offset = 16;
+static constexpr dart::compiler::target::word Array_length_offset = 12;
static constexpr dart::compiler::target::word Array_tags_offset = 0;
static constexpr dart::compiler::target::word Array_type_arguments_offset = 8;
static constexpr dart::compiler::target::word Class_declaration_type_offset =
@@ -2837,13 +2837,13 @@
Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
-static constexpr dart::compiler::target::word Closure_context_offset = 40;
+static constexpr dart::compiler::target::word Closure_context_offset = 24;
static constexpr dart::compiler::target::word
- Closure_delayed_type_arguments_offset = 24;
-static constexpr dart::compiler::target::word Closure_function_offset = 32;
+ Closure_delayed_type_arguments_offset = 16;
+static constexpr dart::compiler::target::word Closure_function_offset = 20;
static constexpr dart::compiler::target::word
- Closure_function_type_arguments_offset = 16;
-static constexpr dart::compiler::target::word Closure_hash_offset = 48;
+ Closure_function_type_arguments_offset = 12;
+static constexpr dart::compiler::target::word Closure_hash_offset = 28;
static constexpr dart::compiler::target::word
Closure_instantiator_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
@@ -2883,9 +2883,9 @@
static constexpr dart::compiler::target::word FutureOr_type_arguments_offset =
8;
static constexpr dart::compiler::target::word GrowableObjectArray_data_offset =
- 24;
+ 16;
static constexpr dart::compiler::target::word
- GrowableObjectArray_length_offset = 16;
+ GrowableObjectArray_length_offset = 12;
static constexpr dart::compiler::target::word
GrowableObjectArray_type_arguments_offset = 8;
static constexpr dart::compiler::target::word OldPage_card_table_offset = 40;
@@ -2908,16 +2908,16 @@
IsolateGroup_cached_class_table_table_offset = 32;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 72;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
-static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
+static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 20;
static constexpr dart::compiler::target::word
- LinkedHashMap_deleted_keys_offset = 48;
+ LinkedHashMap_deleted_keys_offset = 28;
static constexpr dart::compiler::target::word LinkedHashMap_hash_mask_offset =
- 24;
-static constexpr dart::compiler::target::word LinkedHashMap_index_offset = 16;
+ 16;
+static constexpr dart::compiler::target::word LinkedHashMap_index_offset = 12;
static constexpr dart::compiler::target::word
LinkedHashMap_type_arguments_offset = 8;
static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
- 40;
+ 24;
static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
static constexpr dart::compiler::target::word
MarkingStackBlock_pointers_offset = 16;
@@ -3180,7 +3180,7 @@
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
-static constexpr dart::compiler::target::word WeakProperty_value_offset = 16;
+static constexpr dart::compiler::target::word WeakProperty_value_offset = 12;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@@ -3190,11 +3190,11 @@
-1, 1512, 1520, 1528, -1, -1, -1, -1, -1, -1};
static constexpr dart::compiler::target::word AbstractType_InstanceSize = 24;
static constexpr dart::compiler::target::word ApiError_InstanceSize = 16;
-static constexpr dart::compiler::target::word Array_header_size = 24;
+static constexpr dart::compiler::target::word Array_header_size = 16;
static constexpr dart::compiler::target::word Bool_InstanceSize = 16;
static constexpr dart::compiler::target::word Capability_InstanceSize = 16;
static constexpr dart::compiler::target::word Class_InstanceSize = 120;
-static constexpr dart::compiler::target::word Closure_InstanceSize = 56;
+static constexpr dart::compiler::target::word Closure_InstanceSize = 32;
static constexpr dart::compiler::target::word ClosureData_InstanceSize = 24;
static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
@@ -3217,7 +3217,7 @@
static constexpr dart::compiler::target::word FunctionType_InstanceSize = 48;
static constexpr dart::compiler::target::word FutureOr_InstanceSize = 16;
static constexpr dart::compiler::target::word GrowableObjectArray_InstanceSize =
- 32;
+ 24;
static constexpr dart::compiler::target::word ICData_InstanceSize = 56;
static constexpr dart::compiler::target::word Instance_InstanceSize = 8;
static constexpr dart::compiler::target::word Instructions_UnalignedHeaderSize =
@@ -3231,7 +3231,7 @@
static constexpr dart::compiler::target::word LanguageError_InstanceSize = 32;
static constexpr dart::compiler::target::word Library_InstanceSize = 112;
static constexpr dart::compiler::target::word LibraryPrefix_InstanceSize = 24;
-static constexpr dart::compiler::target::word LinkedHashMap_InstanceSize = 56;
+static constexpr dart::compiler::target::word LinkedHashMap_InstanceSize = 32;
static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
48;
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
@@ -3271,7 +3271,7 @@
static constexpr dart::compiler::target::word UnlinkedCall_InstanceSize = 32;
static constexpr dart::compiler::target::word UnwindError_InstanceSize = 16;
static constexpr dart::compiler::target::word UserTag_InstanceSize = 24;
-static constexpr dart::compiler::target::word WeakProperty_InstanceSize = 32;
+static constexpr dart::compiler::target::word WeakProperty_InstanceSize = 24;
static constexpr dart::compiler::target::word
WeakSerializationReference_InstanceSize = 16;
#endif // defined(TARGET_ARCH_ARM64) && defined(DART_COMPRESSED_POINTERS)
@@ -5427,8 +5427,8 @@
84;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
-static constexpr dart::compiler::target::word Array_elements_start_offset = 24;
-static constexpr dart::compiler::target::word Array_element_size = 8;
+static constexpr dart::compiler::target::word Array_elements_start_offset = 16;
+static constexpr dart::compiler::target::word Array_element_size = 4;
static constexpr dart::compiler::target::word Code_elements_start_offset = 144;
static constexpr dart::compiler::target::word Code_element_size = 4;
static constexpr dart::compiler::target::word Context_elements_start_offset =
@@ -5457,9 +5457,9 @@
static constexpr dart::compiler::target::word
TwoByteString_elements_start_offset = 16;
static constexpr dart::compiler::target::word TwoByteString_element_size = 2;
-static constexpr dart::compiler::target::word Array_kMaxElements = 134217727;
+static constexpr dart::compiler::target::word Array_kMaxElements = 268435455;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
- 32765;
+ 65532;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@@ -5472,7 +5472,7 @@
Instructions_kBarePayloadAlignment = 4;
static constexpr dart::compiler::target::word
Instructions_kNonBarePayloadAlignment = 8;
-static constexpr dart::compiler::target::word OldPage_kBytesPerCardLog2 = 10;
+static constexpr dart::compiler::target::word OldPage_kBytesPerCardLog2 = 9;
static constexpr dart::compiler::target::word
NativeEntry_kNumCallWrapperArguments = 2;
static constexpr dart::compiler::target::word String_kMaxElements = 536870911;
@@ -5498,23 +5498,23 @@
static constexpr dart::compiler::target::word
AbstractType_type_test_stub_entry_point_offset = 8;
static constexpr dart::compiler::target::word ArgumentsDescriptor_count_offset =
- 32;
+ 20;
static constexpr dart::compiler::target::word ArgumentsDescriptor_size_offset =
- 40;
+ 24;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_first_named_entry_offset = 56;
+ ArgumentsDescriptor_first_named_entry_offset = 32;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_named_entry_size = 16;
+ ArgumentsDescriptor_named_entry_size = 8;
static constexpr dart::compiler::target::word ArgumentsDescriptor_name_offset =
0;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_position_offset = 8;
+ ArgumentsDescriptor_position_offset = 4;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_positional_count_offset = 48;
+ ArgumentsDescriptor_positional_count_offset = 28;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_type_args_len_offset = 24;
-static constexpr dart::compiler::target::word Array_data_offset = 24;
-static constexpr dart::compiler::target::word Array_length_offset = 16;
+ ArgumentsDescriptor_type_args_len_offset = 16;
+static constexpr dart::compiler::target::word Array_data_offset = 16;
+static constexpr dart::compiler::target::word Array_length_offset = 12;
static constexpr dart::compiler::target::word Array_tags_offset = 0;
static constexpr dart::compiler::target::word Array_type_arguments_offset = 8;
static constexpr dart::compiler::target::word Class_declaration_type_offset =
@@ -5524,13 +5524,13 @@
static constexpr dart::compiler::target::word Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 100;
-static constexpr dart::compiler::target::word Closure_context_offset = 40;
+static constexpr dart::compiler::target::word Closure_context_offset = 24;
static constexpr dart::compiler::target::word
- Closure_delayed_type_arguments_offset = 24;
-static constexpr dart::compiler::target::word Closure_function_offset = 32;
+ Closure_delayed_type_arguments_offset = 16;
+static constexpr dart::compiler::target::word Closure_function_offset = 20;
static constexpr dart::compiler::target::word
- Closure_function_type_arguments_offset = 16;
-static constexpr dart::compiler::target::word Closure_hash_offset = 48;
+ Closure_function_type_arguments_offset = 12;
+static constexpr dart::compiler::target::word Closure_hash_offset = 28;
static constexpr dart::compiler::target::word
Closure_instantiator_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
@@ -5570,9 +5570,9 @@
static constexpr dart::compiler::target::word FutureOr_type_arguments_offset =
8;
static constexpr dart::compiler::target::word GrowableObjectArray_data_offset =
- 24;
+ 16;
static constexpr dart::compiler::target::word
- GrowableObjectArray_length_offset = 16;
+ GrowableObjectArray_length_offset = 12;
static constexpr dart::compiler::target::word
GrowableObjectArray_type_arguments_offset = 8;
static constexpr dart::compiler::target::word OldPage_card_table_offset = 40;
@@ -5594,16 +5594,16 @@
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
-static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
+static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 20;
static constexpr dart::compiler::target::word
- LinkedHashMap_deleted_keys_offset = 48;
+ LinkedHashMap_deleted_keys_offset = 28;
static constexpr dart::compiler::target::word LinkedHashMap_hash_mask_offset =
- 24;
-static constexpr dart::compiler::target::word LinkedHashMap_index_offset = 16;
+ 16;
+static constexpr dart::compiler::target::word LinkedHashMap_index_offset = 12;
static constexpr dart::compiler::target::word
LinkedHashMap_type_arguments_offset = 8;
static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
- 40;
+ 24;
static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
static constexpr dart::compiler::target::word
MarkingStackBlock_pointers_offset = 16;
@@ -5866,7 +5866,7 @@
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
-static constexpr dart::compiler::target::word WeakProperty_value_offset = 16;
+static constexpr dart::compiler::target::word WeakProperty_value_offset = 12;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@@ -5875,11 +5875,11 @@
1424, 1432, 1440, -1, 1448, 1456, -1, -1};
static constexpr dart::compiler::target::word AbstractType_InstanceSize = 24;
static constexpr dart::compiler::target::word ApiError_InstanceSize = 16;
-static constexpr dart::compiler::target::word Array_header_size = 24;
+static constexpr dart::compiler::target::word Array_header_size = 16;
static constexpr dart::compiler::target::word Bool_InstanceSize = 16;
static constexpr dart::compiler::target::word Capability_InstanceSize = 16;
static constexpr dart::compiler::target::word Class_InstanceSize = 112;
-static constexpr dart::compiler::target::word Closure_InstanceSize = 56;
+static constexpr dart::compiler::target::word Closure_InstanceSize = 32;
static constexpr dart::compiler::target::word ClosureData_InstanceSize = 24;
static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
@@ -5902,7 +5902,7 @@
static constexpr dart::compiler::target::word FunctionType_InstanceSize = 48;
static constexpr dart::compiler::target::word FutureOr_InstanceSize = 16;
static constexpr dart::compiler::target::word GrowableObjectArray_InstanceSize =
- 32;
+ 24;
static constexpr dart::compiler::target::word ICData_InstanceSize = 56;
static constexpr dart::compiler::target::word Instance_InstanceSize = 8;
static constexpr dart::compiler::target::word Instructions_UnalignedHeaderSize =
@@ -5916,7 +5916,7 @@
static constexpr dart::compiler::target::word LanguageError_InstanceSize = 32;
static constexpr dart::compiler::target::word Library_InstanceSize = 112;
static constexpr dart::compiler::target::word LibraryPrefix_InstanceSize = 24;
-static constexpr dart::compiler::target::word LinkedHashMap_InstanceSize = 56;
+static constexpr dart::compiler::target::word LinkedHashMap_InstanceSize = 32;
static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
48;
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
@@ -5956,7 +5956,7 @@
static constexpr dart::compiler::target::word UnlinkedCall_InstanceSize = 32;
static constexpr dart::compiler::target::word UnwindError_InstanceSize = 16;
static constexpr dart::compiler::target::word UserTag_InstanceSize = 24;
-static constexpr dart::compiler::target::word WeakProperty_InstanceSize = 32;
+static constexpr dart::compiler::target::word WeakProperty_InstanceSize = 24;
static constexpr dart::compiler::target::word
WeakSerializationReference_InstanceSize = 16;
#endif // defined(TARGET_ARCH_X64) && defined(DART_COMPRESSED_POINTERS)
@@ -5966,8 +5966,8 @@
84;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
-static constexpr dart::compiler::target::word Array_elements_start_offset = 24;
-static constexpr dart::compiler::target::word Array_element_size = 8;
+static constexpr dart::compiler::target::word Array_elements_start_offset = 16;
+static constexpr dart::compiler::target::word Array_element_size = 4;
static constexpr dart::compiler::target::word Code_elements_start_offset = 144;
static constexpr dart::compiler::target::word Code_element_size = 4;
static constexpr dart::compiler::target::word Context_elements_start_offset =
@@ -5996,9 +5996,9 @@
static constexpr dart::compiler::target::word
TwoByteString_elements_start_offset = 16;
static constexpr dart::compiler::target::word TwoByteString_element_size = 2;
-static constexpr dart::compiler::target::word Array_kMaxElements = 134217727;
+static constexpr dart::compiler::target::word Array_kMaxElements = 268435455;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
- 32765;
+ 65532;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@@ -6011,7 +6011,7 @@
Instructions_kBarePayloadAlignment = 4;
static constexpr dart::compiler::target::word
Instructions_kNonBarePayloadAlignment = 8;
-static constexpr dart::compiler::target::word OldPage_kBytesPerCardLog2 = 10;
+static constexpr dart::compiler::target::word OldPage_kBytesPerCardLog2 = 9;
static constexpr dart::compiler::target::word
NativeEntry_kNumCallWrapperArguments = 2;
static constexpr dart::compiler::target::word String_kMaxElements = 536870911;
@@ -6037,23 +6037,23 @@
static constexpr dart::compiler::target::word
AbstractType_type_test_stub_entry_point_offset = 8;
static constexpr dart::compiler::target::word ArgumentsDescriptor_count_offset =
- 32;
+ 20;
static constexpr dart::compiler::target::word ArgumentsDescriptor_size_offset =
- 40;
+ 24;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_first_named_entry_offset = 56;
+ ArgumentsDescriptor_first_named_entry_offset = 32;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_named_entry_size = 16;
+ ArgumentsDescriptor_named_entry_size = 8;
static constexpr dart::compiler::target::word ArgumentsDescriptor_name_offset =
0;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_position_offset = 8;
+ ArgumentsDescriptor_position_offset = 4;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_positional_count_offset = 48;
+ ArgumentsDescriptor_positional_count_offset = 28;
static constexpr dart::compiler::target::word
- ArgumentsDescriptor_type_args_len_offset = 24;
-static constexpr dart::compiler::target::word Array_data_offset = 24;
-static constexpr dart::compiler::target::word Array_length_offset = 16;
+ ArgumentsDescriptor_type_args_len_offset = 16;
+static constexpr dart::compiler::target::word Array_data_offset = 16;
+static constexpr dart::compiler::target::word Array_length_offset = 12;
static constexpr dart::compiler::target::word Array_tags_offset = 0;
static constexpr dart::compiler::target::word Array_type_arguments_offset = 8;
static constexpr dart::compiler::target::word Class_declaration_type_offset =
@@ -6063,13 +6063,13 @@
static constexpr dart::compiler::target::word Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 100;
-static constexpr dart::compiler::target::word Closure_context_offset = 40;
+static constexpr dart::compiler::target::word Closure_context_offset = 24;
static constexpr dart::compiler::target::word
- Closure_delayed_type_arguments_offset = 24;
-static constexpr dart::compiler::target::word Closure_function_offset = 32;
+ Closure_delayed_type_arguments_offset = 16;
+static constexpr dart::compiler::target::word Closure_function_offset = 20;
static constexpr dart::compiler::target::word
- Closure_function_type_arguments_offset = 16;
-static constexpr dart::compiler::target::word Closure_hash_offset = 48;
+ Closure_function_type_arguments_offset = 12;
+static constexpr dart::compiler::target::word Closure_hash_offset = 28;
static constexpr dart::compiler::target::word
Closure_instantiator_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
@@ -6109,9 +6109,9 @@
static constexpr dart::compiler::target::word FutureOr_type_arguments_offset =
8;
static constexpr dart::compiler::target::word GrowableObjectArray_data_offset =
- 24;
+ 16;
static constexpr dart::compiler::target::word
- GrowableObjectArray_length_offset = 16;
+ GrowableObjectArray_length_offset = 12;
static constexpr dart::compiler::target::word
GrowableObjectArray_type_arguments_offset = 8;
static constexpr dart::compiler::target::word OldPage_card_table_offset = 40;
@@ -6133,16 +6133,16 @@
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
-static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
+static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 20;
static constexpr dart::compiler::target::word
- LinkedHashMap_deleted_keys_offset = 48;
+ LinkedHashMap_deleted_keys_offset = 28;
static constexpr dart::compiler::target::word LinkedHashMap_hash_mask_offset =
- 24;
-static constexpr dart::compiler::target::word LinkedHashMap_index_offset = 16;
+ 16;
+static constexpr dart::compiler::target::word LinkedHashMap_index_offset = 12;
static constexpr dart::compiler::target::word
LinkedHashMap_type_arguments_offset = 8;
static constexpr dart::compiler::target::word LinkedHashMap_used_data_offset =
- 40;
+ 24;
static constexpr dart::compiler::target::word LocalHandle_ptr_offset = 0;
static constexpr dart::compiler::target::word
MarkingStackBlock_pointers_offset = 16;
@@ -6405,7 +6405,7 @@
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
-static constexpr dart::compiler::target::word WeakProperty_value_offset = 16;
+static constexpr dart::compiler::target::word WeakProperty_value_offset = 12;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@@ -6415,11 +6415,11 @@
-1, 1512, 1520, 1528, -1, -1, -1, -1, -1, -1};
static constexpr dart::compiler::target::word AbstractType_InstanceSize = 24;
static constexpr dart::compiler::target::word ApiError_InstanceSize = 16;
-static constexpr dart::compiler::target::word Array_header_size = 24;
+static constexpr dart::compiler::target::word Array_header_size = 16;
static constexpr dart::compiler::target::word Bool_InstanceSize = 16;
static constexpr dart::compiler::target::word Capability_InstanceSize = 16;
static constexpr dart::compiler::target::word Class_InstanceSize = 112;
-static constexpr dart::compiler::target::word Closure_InstanceSize = 56;
+static constexpr dart::compiler::target::word Closure_InstanceSize = 32;
static constexpr dart::compiler::target::word ClosureData_InstanceSize = 24;
static constexpr dart::compiler::target::word CodeSourceMap_HeaderSize = 16;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
@@ -6442,7 +6442,7 @@
static constexpr dart::compiler::target::word FunctionType_InstanceSize = 48;
static constexpr dart::compiler::target::word FutureOr_InstanceSize = 16;
static constexpr dart::compiler::target::word GrowableObjectArray_InstanceSize =
- 32;
+ 24;
static constexpr dart::compiler::target::word ICData_InstanceSize = 56;
static constexpr dart::compiler::target::word Instance_InstanceSize = 8;
static constexpr dart::compiler::target::word Instructions_UnalignedHeaderSize =
@@ -6456,7 +6456,7 @@
static constexpr dart::compiler::target::word LanguageError_InstanceSize = 32;
static constexpr dart::compiler::target::word Library_InstanceSize = 112;
static constexpr dart::compiler::target::word LibraryPrefix_InstanceSize = 24;
-static constexpr dart::compiler::target::word LinkedHashMap_InstanceSize = 56;
+static constexpr dart::compiler::target::word LinkedHashMap_InstanceSize = 32;
static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
48;
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
@@ -6496,7 +6496,7 @@
static constexpr dart::compiler::target::word UnlinkedCall_InstanceSize = 32;
static constexpr dart::compiler::target::word UnwindError_InstanceSize = 16;
static constexpr dart::compiler::target::word UserTag_InstanceSize = 24;
-static constexpr dart::compiler::target::word WeakProperty_InstanceSize = 32;
+static constexpr dart::compiler::target::word WeakProperty_InstanceSize = 24;
static constexpr dart::compiler::target::word
WeakSerializationReference_InstanceSize = 16;
#endif // defined(TARGET_ARCH_ARM64) && defined(DART_COMPRESSED_POINTERS)
@@ -8331,10 +8331,10 @@
#if defined(TARGET_ARCH_X64) && defined(DART_COMPRESSED_POINTERS)
static constexpr dart::compiler::target::word AOT_Closure_entry_point_offset =
- 56;
+ 32;
static constexpr dart::compiler::target::word AOT_Array_elements_start_offset =
- 24;
-static constexpr dart::compiler::target::word AOT_Array_element_size = 8;
+ 16;
+static constexpr dart::compiler::target::word AOT_Array_element_size = 4;
static constexpr dart::compiler::target::word
AOT_ClassTable_elements_start_offset = 0;
static constexpr dart::compiler::target::word AOT_ClassTable_element_size = 1;
@@ -8372,9 +8372,9 @@
static constexpr dart::compiler::target::word AOT_TwoByteString_element_size =
2;
static constexpr dart::compiler::target::word AOT_Array_kMaxElements =
- 134217727;
+ 268435455;
static constexpr dart::compiler::target::word AOT_Array_kMaxNewSpaceElements =
- 32765;
+ 65532;
static constexpr dart::compiler::target::word
AOT_Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@@ -8387,8 +8387,7 @@
AOT_Instructions_kBarePayloadAlignment = 4;
static constexpr dart::compiler::target::word
AOT_Instructions_kNonBarePayloadAlignment = 8;
-static constexpr dart::compiler::target::word AOT_OldPage_kBytesPerCardLog2 =
- 10;
+static constexpr dart::compiler::target::word AOT_OldPage_kBytesPerCardLog2 = 9;
static constexpr dart::compiler::target::word
AOT_NativeEntry_kNumCallWrapperArguments = 2;
static constexpr dart::compiler::target::word AOT_String_kMaxElements =
@@ -8416,23 +8415,23 @@
static constexpr dart::compiler::target::word
AOT_AbstractType_type_test_stub_entry_point_offset = 8;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_count_offset = 32;
+ AOT_ArgumentsDescriptor_count_offset = 20;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_size_offset = 40;
+ AOT_ArgumentsDescriptor_size_offset = 24;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_first_named_entry_offset = 56;
+ AOT_ArgumentsDescriptor_first_named_entry_offset = 32;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_named_entry_size = 16;
+ AOT_ArgumentsDescriptor_named_entry_size = 8;
static constexpr dart::compiler::target::word
AOT_ArgumentsDescriptor_name_offset = 0;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_position_offset = 8;
+ AOT_ArgumentsDescriptor_position_offset = 4;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_positional_count_offset = 48;
+ AOT_ArgumentsDescriptor_positional_count_offset = 28;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_type_args_len_offset = 24;
-static constexpr dart::compiler::target::word AOT_Array_data_offset = 24;
-static constexpr dart::compiler::target::word AOT_Array_length_offset = 16;
+ AOT_ArgumentsDescriptor_type_args_len_offset = 16;
+static constexpr dart::compiler::target::word AOT_Array_data_offset = 16;
+static constexpr dart::compiler::target::word AOT_Array_length_offset = 12;
static constexpr dart::compiler::target::word AOT_Array_tags_offset = 0;
static constexpr dart::compiler::target::word AOT_Array_type_arguments_offset =
8;
@@ -8445,13 +8444,13 @@
AOT_Class_host_type_arguments_field_offset_in_words_offset = 88;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
-static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
+static constexpr dart::compiler::target::word AOT_Closure_context_offset = 24;
static constexpr dart::compiler::target::word
- AOT_Closure_delayed_type_arguments_offset = 24;
-static constexpr dart::compiler::target::word AOT_Closure_function_offset = 32;
+ AOT_Closure_delayed_type_arguments_offset = 16;
+static constexpr dart::compiler::target::word AOT_Closure_function_offset = 20;
static constexpr dart::compiler::target::word
- AOT_Closure_function_type_arguments_offset = 16;
-static constexpr dart::compiler::target::word AOT_Closure_hash_offset = 48;
+ AOT_Closure_function_type_arguments_offset = 12;
+static constexpr dart::compiler::target::word AOT_Closure_hash_offset = 28;
static constexpr dart::compiler::target::word
AOT_Closure_instantiator_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
@@ -8493,9 +8492,9 @@
static constexpr dart::compiler::target::word
AOT_FutureOr_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_data_offset = 24;
+ AOT_GrowableObjectArray_data_offset = 16;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_length_offset = 16;
+ AOT_GrowableObjectArray_length_offset = 12;
static constexpr dart::compiler::target::word
AOT_GrowableObjectArray_type_arguments_offset = 8;
static constexpr dart::compiler::target::word AOT_OldPage_card_table_offset =
@@ -8524,17 +8523,17 @@
72;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
- 32;
+ 20;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_deleted_keys_offset = 48;
+ AOT_LinkedHashMap_deleted_keys_offset = 28;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_hash_mask_offset = 24;
+ AOT_LinkedHashMap_hash_mask_offset = 16;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_index_offset =
- 16;
+ 12;
static constexpr dart::compiler::target::word
AOT_LinkedHashMap_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_used_data_offset = 40;
+ AOT_LinkedHashMap_used_data_offset = 24;
static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
static constexpr dart::compiler::target::word
AOT_MarkingStackBlock_pointers_offset = 16;
@@ -8828,7 +8827,7 @@
AOT_MonomorphicSmiableCall_target_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
- 16;
+ 12;
static constexpr dart::compiler::target::word AOT_Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@@ -8838,11 +8837,11 @@
static constexpr dart::compiler::target::word AOT_AbstractType_InstanceSize =
24;
static constexpr dart::compiler::target::word AOT_ApiError_InstanceSize = 16;
-static constexpr dart::compiler::target::word AOT_Array_header_size = 24;
+static constexpr dart::compiler::target::word AOT_Array_header_size = 16;
static constexpr dart::compiler::target::word AOT_Bool_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_Capability_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_Class_InstanceSize = 96;
-static constexpr dart::compiler::target::word AOT_Closure_InstanceSize = 64;
+static constexpr dart::compiler::target::word AOT_Closure_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
static constexpr dart::compiler::target::word
@@ -8867,7 +8866,7 @@
48;
static constexpr dart::compiler::target::word AOT_FutureOr_InstanceSize = 16;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_InstanceSize = 32;
+ AOT_GrowableObjectArray_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_ICData_InstanceSize = 48;
static constexpr dart::compiler::target::word AOT_Instance_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -8884,7 +8883,7 @@
static constexpr dart::compiler::target::word AOT_LibraryPrefix_InstanceSize =
24;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_InstanceSize =
- 56;
+ 32;
static constexpr dart::compiler::target::word
AOT_MegamorphicCache_InstanceSize = 48;
static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
@@ -8932,17 +8931,17 @@
static constexpr dart::compiler::target::word AOT_UnwindError_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_UserTag_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_WeakProperty_InstanceSize =
- 32;
+ 24;
static constexpr dart::compiler::target::word
AOT_WeakSerializationReference_InstanceSize = 16;
#endif // defined(TARGET_ARCH_X64) && defined(DART_COMPRESSED_POINTERS)
#if defined(TARGET_ARCH_ARM64) && defined(DART_COMPRESSED_POINTERS)
static constexpr dart::compiler::target::word AOT_Closure_entry_point_offset =
- 56;
+ 32;
static constexpr dart::compiler::target::word AOT_Array_elements_start_offset =
- 24;
-static constexpr dart::compiler::target::word AOT_Array_element_size = 8;
+ 16;
+static constexpr dart::compiler::target::word AOT_Array_element_size = 4;
static constexpr dart::compiler::target::word
AOT_ClassTable_elements_start_offset = 0;
static constexpr dart::compiler::target::word AOT_ClassTable_element_size = 1;
@@ -8980,9 +8979,9 @@
static constexpr dart::compiler::target::word AOT_TwoByteString_element_size =
2;
static constexpr dart::compiler::target::word AOT_Array_kMaxElements =
- 134217727;
+ 268435455;
static constexpr dart::compiler::target::word AOT_Array_kMaxNewSpaceElements =
- 32765;
+ 65532;
static constexpr dart::compiler::target::word
AOT_Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@@ -8995,8 +8994,7 @@
AOT_Instructions_kBarePayloadAlignment = 4;
static constexpr dart::compiler::target::word
AOT_Instructions_kNonBarePayloadAlignment = 8;
-static constexpr dart::compiler::target::word AOT_OldPage_kBytesPerCardLog2 =
- 10;
+static constexpr dart::compiler::target::word AOT_OldPage_kBytesPerCardLog2 = 9;
static constexpr dart::compiler::target::word
AOT_NativeEntry_kNumCallWrapperArguments = 2;
static constexpr dart::compiler::target::word AOT_String_kMaxElements =
@@ -9024,23 +9022,23 @@
static constexpr dart::compiler::target::word
AOT_AbstractType_type_test_stub_entry_point_offset = 8;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_count_offset = 32;
+ AOT_ArgumentsDescriptor_count_offset = 20;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_size_offset = 40;
+ AOT_ArgumentsDescriptor_size_offset = 24;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_first_named_entry_offset = 56;
+ AOT_ArgumentsDescriptor_first_named_entry_offset = 32;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_named_entry_size = 16;
+ AOT_ArgumentsDescriptor_named_entry_size = 8;
static constexpr dart::compiler::target::word
AOT_ArgumentsDescriptor_name_offset = 0;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_position_offset = 8;
+ AOT_ArgumentsDescriptor_position_offset = 4;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_positional_count_offset = 48;
+ AOT_ArgumentsDescriptor_positional_count_offset = 28;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_type_args_len_offset = 24;
-static constexpr dart::compiler::target::word AOT_Array_data_offset = 24;
-static constexpr dart::compiler::target::word AOT_Array_length_offset = 16;
+ AOT_ArgumentsDescriptor_type_args_len_offset = 16;
+static constexpr dart::compiler::target::word AOT_Array_data_offset = 16;
+static constexpr dart::compiler::target::word AOT_Array_length_offset = 12;
static constexpr dart::compiler::target::word AOT_Array_tags_offset = 0;
static constexpr dart::compiler::target::word AOT_Array_type_arguments_offset =
8;
@@ -9053,13 +9051,13 @@
AOT_Class_host_type_arguments_field_offset_in_words_offset = 88;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
-static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
+static constexpr dart::compiler::target::word AOT_Closure_context_offset = 24;
static constexpr dart::compiler::target::word
- AOT_Closure_delayed_type_arguments_offset = 24;
-static constexpr dart::compiler::target::word AOT_Closure_function_offset = 32;
+ AOT_Closure_delayed_type_arguments_offset = 16;
+static constexpr dart::compiler::target::word AOT_Closure_function_offset = 20;
static constexpr dart::compiler::target::word
- AOT_Closure_function_type_arguments_offset = 16;
-static constexpr dart::compiler::target::word AOT_Closure_hash_offset = 48;
+ AOT_Closure_function_type_arguments_offset = 12;
+static constexpr dart::compiler::target::word AOT_Closure_hash_offset = 28;
static constexpr dart::compiler::target::word
AOT_Closure_instantiator_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
@@ -9101,9 +9099,9 @@
static constexpr dart::compiler::target::word
AOT_FutureOr_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_data_offset = 24;
+ AOT_GrowableObjectArray_data_offset = 16;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_length_offset = 16;
+ AOT_GrowableObjectArray_length_offset = 12;
static constexpr dart::compiler::target::word
AOT_GrowableObjectArray_type_arguments_offset = 8;
static constexpr dart::compiler::target::word AOT_OldPage_card_table_offset =
@@ -9132,17 +9130,17 @@
72;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
- 32;
+ 20;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_deleted_keys_offset = 48;
+ AOT_LinkedHashMap_deleted_keys_offset = 28;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_hash_mask_offset = 24;
+ AOT_LinkedHashMap_hash_mask_offset = 16;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_index_offset =
- 16;
+ 12;
static constexpr dart::compiler::target::word
AOT_LinkedHashMap_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_used_data_offset = 40;
+ AOT_LinkedHashMap_used_data_offset = 24;
static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
static constexpr dart::compiler::target::word
AOT_MarkingStackBlock_pointers_offset = 16;
@@ -9436,7 +9434,7 @@
AOT_MonomorphicSmiableCall_target_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
- 16;
+ 12;
static constexpr dart::compiler::target::word AOT_Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@@ -9447,11 +9445,11 @@
static constexpr dart::compiler::target::word AOT_AbstractType_InstanceSize =
24;
static constexpr dart::compiler::target::word AOT_ApiError_InstanceSize = 16;
-static constexpr dart::compiler::target::word AOT_Array_header_size = 24;
+static constexpr dart::compiler::target::word AOT_Array_header_size = 16;
static constexpr dart::compiler::target::word AOT_Bool_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_Capability_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_Class_InstanceSize = 96;
-static constexpr dart::compiler::target::word AOT_Closure_InstanceSize = 64;
+static constexpr dart::compiler::target::word AOT_Closure_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
static constexpr dart::compiler::target::word
@@ -9476,7 +9474,7 @@
48;
static constexpr dart::compiler::target::word AOT_FutureOr_InstanceSize = 16;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_InstanceSize = 32;
+ AOT_GrowableObjectArray_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_ICData_InstanceSize = 48;
static constexpr dart::compiler::target::word AOT_Instance_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -9493,7 +9491,7 @@
static constexpr dart::compiler::target::word AOT_LibraryPrefix_InstanceSize =
24;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_InstanceSize =
- 56;
+ 32;
static constexpr dart::compiler::target::word
AOT_MegamorphicCache_InstanceSize = 48;
static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
@@ -9541,7 +9539,7 @@
static constexpr dart::compiler::target::word AOT_UnwindError_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_UserTag_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_WeakProperty_InstanceSize =
- 32;
+ 24;
static constexpr dart::compiler::target::word
AOT_WeakSerializationReference_InstanceSize = 16;
#endif // defined(TARGET_ARCH_ARM64) && defined(DART_COMPRESSED_POINTERS)
@@ -11353,10 +11351,10 @@
#if defined(TARGET_ARCH_X64) && defined(DART_COMPRESSED_POINTERS)
static constexpr dart::compiler::target::word AOT_Closure_entry_point_offset =
- 56;
+ 32;
static constexpr dart::compiler::target::word AOT_Array_elements_start_offset =
- 24;
-static constexpr dart::compiler::target::word AOT_Array_element_size = 8;
+ 16;
+static constexpr dart::compiler::target::word AOT_Array_element_size = 4;
static constexpr dart::compiler::target::word AOT_Code_elements_start_offset =
120;
static constexpr dart::compiler::target::word AOT_Code_element_size = 4;
@@ -11391,9 +11389,9 @@
static constexpr dart::compiler::target::word AOT_TwoByteString_element_size =
2;
static constexpr dart::compiler::target::word AOT_Array_kMaxElements =
- 134217727;
+ 268435455;
static constexpr dart::compiler::target::word AOT_Array_kMaxNewSpaceElements =
- 32765;
+ 65532;
static constexpr dart::compiler::target::word
AOT_Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@@ -11406,8 +11404,7 @@
AOT_Instructions_kBarePayloadAlignment = 4;
static constexpr dart::compiler::target::word
AOT_Instructions_kNonBarePayloadAlignment = 8;
-static constexpr dart::compiler::target::word AOT_OldPage_kBytesPerCardLog2 =
- 10;
+static constexpr dart::compiler::target::word AOT_OldPage_kBytesPerCardLog2 = 9;
static constexpr dart::compiler::target::word
AOT_NativeEntry_kNumCallWrapperArguments = 2;
static constexpr dart::compiler::target::word AOT_String_kMaxElements =
@@ -11435,23 +11432,23 @@
static constexpr dart::compiler::target::word
AOT_AbstractType_type_test_stub_entry_point_offset = 8;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_count_offset = 32;
+ AOT_ArgumentsDescriptor_count_offset = 20;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_size_offset = 40;
+ AOT_ArgumentsDescriptor_size_offset = 24;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_first_named_entry_offset = 56;
+ AOT_ArgumentsDescriptor_first_named_entry_offset = 32;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_named_entry_size = 16;
+ AOT_ArgumentsDescriptor_named_entry_size = 8;
static constexpr dart::compiler::target::word
AOT_ArgumentsDescriptor_name_offset = 0;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_position_offset = 8;
+ AOT_ArgumentsDescriptor_position_offset = 4;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_positional_count_offset = 48;
+ AOT_ArgumentsDescriptor_positional_count_offset = 28;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_type_args_len_offset = 24;
-static constexpr dart::compiler::target::word AOT_Array_data_offset = 24;
-static constexpr dart::compiler::target::word AOT_Array_length_offset = 16;
+ AOT_ArgumentsDescriptor_type_args_len_offset = 16;
+static constexpr dart::compiler::target::word AOT_Array_data_offset = 16;
+static constexpr dart::compiler::target::word AOT_Array_length_offset = 12;
static constexpr dart::compiler::target::word AOT_Array_tags_offset = 0;
static constexpr dart::compiler::target::word AOT_Array_type_arguments_offset =
8;
@@ -11462,13 +11459,13 @@
static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 76;
-static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
+static constexpr dart::compiler::target::word AOT_Closure_context_offset = 24;
static constexpr dart::compiler::target::word
- AOT_Closure_delayed_type_arguments_offset = 24;
-static constexpr dart::compiler::target::word AOT_Closure_function_offset = 32;
+ AOT_Closure_delayed_type_arguments_offset = 16;
+static constexpr dart::compiler::target::word AOT_Closure_function_offset = 20;
static constexpr dart::compiler::target::word
- AOT_Closure_function_type_arguments_offset = 16;
-static constexpr dart::compiler::target::word AOT_Closure_hash_offset = 48;
+ AOT_Closure_function_type_arguments_offset = 12;
+static constexpr dart::compiler::target::word AOT_Closure_hash_offset = 28;
static constexpr dart::compiler::target::word
AOT_Closure_instantiator_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
@@ -11510,9 +11507,9 @@
static constexpr dart::compiler::target::word
AOT_FutureOr_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_data_offset = 24;
+ AOT_GrowableObjectArray_data_offset = 16;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_length_offset = 16;
+ AOT_GrowableObjectArray_length_offset = 12;
static constexpr dart::compiler::target::word
AOT_GrowableObjectArray_type_arguments_offset = 8;
static constexpr dart::compiler::target::word AOT_OldPage_card_table_offset =
@@ -11539,17 +11536,17 @@
AOT_IsolateGroup_cached_class_table_table_offset = 32;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
- 32;
+ 20;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_deleted_keys_offset = 48;
+ AOT_LinkedHashMap_deleted_keys_offset = 28;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_hash_mask_offset = 24;
+ AOT_LinkedHashMap_hash_mask_offset = 16;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_index_offset =
- 16;
+ 12;
static constexpr dart::compiler::target::word
AOT_LinkedHashMap_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_used_data_offset = 40;
+ AOT_LinkedHashMap_used_data_offset = 24;
static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
static constexpr dart::compiler::target::word
AOT_MarkingStackBlock_pointers_offset = 16;
@@ -11843,7 +11840,7 @@
AOT_MonomorphicSmiableCall_target_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
- 16;
+ 12;
static constexpr dart::compiler::target::word AOT_Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@@ -11853,11 +11850,11 @@
static constexpr dart::compiler::target::word AOT_AbstractType_InstanceSize =
24;
static constexpr dart::compiler::target::word AOT_ApiError_InstanceSize = 16;
-static constexpr dart::compiler::target::word AOT_Array_header_size = 24;
+static constexpr dart::compiler::target::word AOT_Array_header_size = 16;
static constexpr dart::compiler::target::word AOT_Bool_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_Capability_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_Class_InstanceSize = 88;
-static constexpr dart::compiler::target::word AOT_Closure_InstanceSize = 64;
+static constexpr dart::compiler::target::word AOT_Closure_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
static constexpr dart::compiler::target::word
@@ -11882,7 +11879,7 @@
48;
static constexpr dart::compiler::target::word AOT_FutureOr_InstanceSize = 16;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_InstanceSize = 32;
+ AOT_GrowableObjectArray_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_ICData_InstanceSize = 48;
static constexpr dart::compiler::target::word AOT_Instance_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -11899,7 +11896,7 @@
static constexpr dart::compiler::target::word AOT_LibraryPrefix_InstanceSize =
24;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_InstanceSize =
- 56;
+ 32;
static constexpr dart::compiler::target::word
AOT_MegamorphicCache_InstanceSize = 48;
static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
@@ -11947,17 +11944,17 @@
static constexpr dart::compiler::target::word AOT_UnwindError_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_UserTag_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_WeakProperty_InstanceSize =
- 32;
+ 24;
static constexpr dart::compiler::target::word
AOT_WeakSerializationReference_InstanceSize = 16;
#endif // defined(TARGET_ARCH_X64) && defined(DART_COMPRESSED_POINTERS)
#if defined(TARGET_ARCH_ARM64) && defined(DART_COMPRESSED_POINTERS)
static constexpr dart::compiler::target::word AOT_Closure_entry_point_offset =
- 56;
+ 32;
static constexpr dart::compiler::target::word AOT_Array_elements_start_offset =
- 24;
-static constexpr dart::compiler::target::word AOT_Array_element_size = 8;
+ 16;
+static constexpr dart::compiler::target::word AOT_Array_element_size = 4;
static constexpr dart::compiler::target::word AOT_Code_elements_start_offset =
120;
static constexpr dart::compiler::target::word AOT_Code_element_size = 4;
@@ -11992,9 +11989,9 @@
static constexpr dart::compiler::target::word AOT_TwoByteString_element_size =
2;
static constexpr dart::compiler::target::word AOT_Array_kMaxElements =
- 134217727;
+ 268435455;
static constexpr dart::compiler::target::word AOT_Array_kMaxNewSpaceElements =
- 32765;
+ 65532;
static constexpr dart::compiler::target::word
AOT_Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@@ -12007,8 +12004,7 @@
AOT_Instructions_kBarePayloadAlignment = 4;
static constexpr dart::compiler::target::word
AOT_Instructions_kNonBarePayloadAlignment = 8;
-static constexpr dart::compiler::target::word AOT_OldPage_kBytesPerCardLog2 =
- 10;
+static constexpr dart::compiler::target::word AOT_OldPage_kBytesPerCardLog2 = 9;
static constexpr dart::compiler::target::word
AOT_NativeEntry_kNumCallWrapperArguments = 2;
static constexpr dart::compiler::target::word AOT_String_kMaxElements =
@@ -12036,23 +12032,23 @@
static constexpr dart::compiler::target::word
AOT_AbstractType_type_test_stub_entry_point_offset = 8;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_count_offset = 32;
+ AOT_ArgumentsDescriptor_count_offset = 20;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_size_offset = 40;
+ AOT_ArgumentsDescriptor_size_offset = 24;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_first_named_entry_offset = 56;
+ AOT_ArgumentsDescriptor_first_named_entry_offset = 32;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_named_entry_size = 16;
+ AOT_ArgumentsDescriptor_named_entry_size = 8;
static constexpr dart::compiler::target::word
AOT_ArgumentsDescriptor_name_offset = 0;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_position_offset = 8;
+ AOT_ArgumentsDescriptor_position_offset = 4;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_positional_count_offset = 48;
+ AOT_ArgumentsDescriptor_positional_count_offset = 28;
static constexpr dart::compiler::target::word
- AOT_ArgumentsDescriptor_type_args_len_offset = 24;
-static constexpr dart::compiler::target::word AOT_Array_data_offset = 24;
-static constexpr dart::compiler::target::word AOT_Array_length_offset = 16;
+ AOT_ArgumentsDescriptor_type_args_len_offset = 16;
+static constexpr dart::compiler::target::word AOT_Array_data_offset = 16;
+static constexpr dart::compiler::target::word AOT_Array_length_offset = 12;
static constexpr dart::compiler::target::word AOT_Array_tags_offset = 0;
static constexpr dart::compiler::target::word AOT_Array_type_arguments_offset =
8;
@@ -12063,13 +12059,13 @@
static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 76;
-static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
+static constexpr dart::compiler::target::word AOT_Closure_context_offset = 24;
static constexpr dart::compiler::target::word
- AOT_Closure_delayed_type_arguments_offset = 24;
-static constexpr dart::compiler::target::word AOT_Closure_function_offset = 32;
+ AOT_Closure_delayed_type_arguments_offset = 16;
+static constexpr dart::compiler::target::word AOT_Closure_function_offset = 20;
static constexpr dart::compiler::target::word
- AOT_Closure_function_type_arguments_offset = 16;
-static constexpr dart::compiler::target::word AOT_Closure_hash_offset = 48;
+ AOT_Closure_function_type_arguments_offset = 12;
+static constexpr dart::compiler::target::word AOT_Closure_hash_offset = 28;
static constexpr dart::compiler::target::word
AOT_Closure_instantiator_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
@@ -12111,9 +12107,9 @@
static constexpr dart::compiler::target::word
AOT_FutureOr_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_data_offset = 24;
+ AOT_GrowableObjectArray_data_offset = 16;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_length_offset = 16;
+ AOT_GrowableObjectArray_length_offset = 12;
static constexpr dart::compiler::target::word
AOT_GrowableObjectArray_type_arguments_offset = 8;
static constexpr dart::compiler::target::word AOT_OldPage_card_table_offset =
@@ -12140,17 +12136,17 @@
AOT_IsolateGroup_cached_class_table_table_offset = 32;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
- 32;
+ 20;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_deleted_keys_offset = 48;
+ AOT_LinkedHashMap_deleted_keys_offset = 28;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_hash_mask_offset = 24;
+ AOT_LinkedHashMap_hash_mask_offset = 16;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_index_offset =
- 16;
+ 12;
static constexpr dart::compiler::target::word
AOT_LinkedHashMap_type_arguments_offset = 8;
static constexpr dart::compiler::target::word
- AOT_LinkedHashMap_used_data_offset = 40;
+ AOT_LinkedHashMap_used_data_offset = 24;
static constexpr dart::compiler::target::word AOT_LocalHandle_ptr_offset = 0;
static constexpr dart::compiler::target::word
AOT_MarkingStackBlock_pointers_offset = 16;
@@ -12444,7 +12440,7 @@
AOT_MonomorphicSmiableCall_target_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
- 16;
+ 12;
static constexpr dart::compiler::target::word AOT_Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@@ -12455,11 +12451,11 @@
static constexpr dart::compiler::target::word AOT_AbstractType_InstanceSize =
24;
static constexpr dart::compiler::target::word AOT_ApiError_InstanceSize = 16;
-static constexpr dart::compiler::target::word AOT_Array_header_size = 24;
+static constexpr dart::compiler::target::word AOT_Array_header_size = 16;
static constexpr dart::compiler::target::word AOT_Bool_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_Capability_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_Class_InstanceSize = 88;
-static constexpr dart::compiler::target::word AOT_Closure_InstanceSize = 64;
+static constexpr dart::compiler::target::word AOT_Closure_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_ClosureData_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_CodeSourceMap_HeaderSize = 16;
static constexpr dart::compiler::target::word
@@ -12484,7 +12480,7 @@
48;
static constexpr dart::compiler::target::word AOT_FutureOr_InstanceSize = 16;
static constexpr dart::compiler::target::word
- AOT_GrowableObjectArray_InstanceSize = 32;
+ AOT_GrowableObjectArray_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_ICData_InstanceSize = 48;
static constexpr dart::compiler::target::word AOT_Instance_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -12501,7 +12497,7 @@
static constexpr dart::compiler::target::word AOT_LibraryPrefix_InstanceSize =
24;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_InstanceSize =
- 56;
+ 32;
static constexpr dart::compiler::target::word
AOT_MegamorphicCache_InstanceSize = 48;
static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
@@ -12549,7 +12545,7 @@
static constexpr dart::compiler::target::word AOT_UnwindError_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_UserTag_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_WeakProperty_InstanceSize =
- 32;
+ 24;
static constexpr dart::compiler::target::word
AOT_WeakSerializationReference_InstanceSize = 16;
#endif // defined(TARGET_ARCH_ARM64) && defined(DART_COMPRESSED_POINTERS)
diff --git a/runtime/vm/compiler/stub_code_compiler.cc b/runtime/vm/compiler/stub_code_compiler.cc
index 53c2924..64edf34 100644
--- a/runtime/vm/compiler/stub_code_compiler.cc
+++ b/runtime/vm/compiler/stub_code_compiler.cc
@@ -100,11 +100,12 @@
__ SmiUntag(kScratchReg);
__ SmiTag(kScratchReg);
#endif
- __ LoadFieldAddressForRegOffset(kAddressReg, kInstanceReg, kScratchReg);
+ __ LoadCompressedFieldAddressForRegOffset(kAddressReg, kInstanceReg,
+ kScratchReg);
Label throw_exception;
if (is_final) {
- __ LoadMemoryValue(kScratchReg, kAddressReg, 0);
+ __ LoadCompressed(kScratchReg, Address(kAddressReg, 0));
__ CompareObject(kScratchReg, SentinelObject());
__ BranchIf(NOT_EQUAL, &throw_exception);
}
@@ -115,8 +116,8 @@
__ MoveRegister(kScratchReg, InitInstanceFieldABI::kResultReg);
__ StoreIntoObject(kInstanceReg, Address(kAddressReg, 0), kScratchReg);
#else
- __ StoreIntoObject(kInstanceReg, Address(kAddressReg, 0),
- InitInstanceFieldABI::kResultReg);
+ __ StoreCompressedIntoObject(kInstanceReg, Address(kAddressReg, 0),
+ InitInstanceFieldABI::kResultReg);
#endif // defined(TARGET_ARCH_IA32)
__ LeaveStubFrame();
diff --git a/runtime/vm/compiler/stub_code_compiler_arm64.cc b/runtime/vm/compiler/stub_code_compiler_arm64.cc
index 5626553..e3238dc 100644
--- a/runtime/vm/compiler/stub_code_compiler_arm64.cc
+++ b/runtime/vm/compiler/stub_code_compiler_arm64.cc
@@ -417,14 +417,16 @@
__ LoadFromOffset(R10, THR, compiler::target::Thread::callback_code_offset());
__ LoadFieldFromOffset(R10, R10,
compiler::target::GrowableObjectArray::data_offset());
- __ ldr(R10, __ ElementAddressForRegIndex(
- /*external=*/false,
- /*array_cid=*/kArrayCid,
- /*index, smi-tagged=*/compiler::target::kWordSize * 2,
- /*index_unboxed=*/false,
- /*array=*/R10,
- /*index=*/R9,
- /*temp=*/TMP));
+ __ LoadCompressed(
+ R10,
+ __ ElementAddressForRegIndex(
+ /*external=*/false,
+ /*array_cid=*/kArrayCid,
+ /*index_scale, smi-tagged=*/compiler::target::kCompressedWordSize * 2,
+ /*index_unboxed=*/false,
+ /*array=*/R10,
+ /*index=*/R9,
+ /*temp=*/TMP));
__ LoadFieldFromOffset(R10, R10,
compiler::target::Code::entry_point_offset());
@@ -523,13 +525,13 @@
// Populate closure object.
__ Pop(AllocateClosureABI::kScratchReg); // Pop type arguments.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::instantiator_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
__ LoadObject(AllocateClosureABI::kScratchReg, EmptyTypeArguments());
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::delayed_type_arguments_offset()),
@@ -852,7 +854,8 @@
// R0: newly allocated array.
// R2: smi-tagged argument count, may be zero (was preserved by the stub).
__ Push(R0); // Array is in R0 and on top of stack.
- __ add(R1, FP, Operand(R2, LSL, 2));
+ __ SmiUntag(R2);
+ __ add(R1, FP, Operand(R2, LSL, target::kWordSizeLog2));
__ AddImmediate(R1,
target::frame_layout.param_end_from_fp * target::kWordSize);
__ AddImmediate(R3, R0, target::Array::data_offset() - kHeapObjectTag);
@@ -865,9 +868,10 @@
__ b(&loop_exit, LE);
__ ldr(R7, Address(R1));
__ AddImmediate(R1, -target::kWordSize);
- __ AddImmediate(R3, target::kWordSize);
- __ AddImmediate(R2, R2, -target::ToRawSmi(1));
- __ StoreIntoObject(R0, Address(R3, -target::kWordSize), R7);
+ __ AddImmediate(R3, target::kCompressedWordSize);
+ __ AddImmediate(R2, R2, -1);
+ __ StoreCompressedIntoObject(R0, Address(R3, -target::kCompressedWordSize),
+ R7);
__ b(&loop);
__ Bind(&loop_exit);
}
@@ -1070,8 +1074,9 @@
FieldAddress(R5, target::CallSiteData::arguments_descriptor_offset()));
// Load the receiver.
- __ LoadFieldFromOffset(R2, R4, target::ArgumentsDescriptor::size_offset());
- __ add(TMP, FP, Operand(R2, LSL, 2)); // R2 is Smi.
+ __ LoadCompressedSmiFieldFromOffset(
+ R2, R4, target::ArgumentsDescriptor::size_offset());
+ __ add(TMP, FP, Operand(R2, LSL, target::kWordSizeLog2 - 1)); // R2 is Smi.
__ LoadFromOffset(R6, TMP,
target::frame_layout.param_end_from_fp * target::kWordSize);
__ Push(ZR); // Result slot.
@@ -1080,11 +1085,12 @@
__ Push(R4); // Arguments descriptor.
// Adjust arguments count.
- __ LoadFieldFromOffset(R3, R4,
- target::ArgumentsDescriptor::type_args_len_offset());
- __ AddImmediate(TMP, R2, 1); // Include the type arguments.
- __ cmp(R3, Operand(0));
- __ csinc(R2, R2, TMP, EQ); // R2 <- (R3 == 0) ? R2 : TMP + 1 (R2 : R2 + 2).
+ __ LoadCompressedSmiFieldFromOffset(
+ R3, R4, target::ArgumentsDescriptor::type_args_len_offset());
+ __ AddImmediate(TMP, R2, 1, kObjectBytes); // Include the type arguments.
+ __ cmp(R3, Operand(0), kObjectBytes);
+ // R2 <- (R3 == 0) ? R2 : TMP + 1 (R2 : R2 + 2).
+ __ csinc(R2, R2, TMP, EQ, kObjectBytes);
// R2: Smi-tagged arguments array length.
PushArrayOfArguments(assembler);
@@ -1130,7 +1136,7 @@
// Compute the size to be allocated, it is based on the array length
// and is computed as:
// RoundedAllocationSize(
- // (array_length * kwordSize) + target::Array::header_size()).
+ // (array_length * kCompressedWordSize) + target::Array::header_size()).
// Check that length is a Smi.
__ BranchIfNotSmi(AllocateArrayABI::kLengthReg, &slow_case);
@@ -1153,8 +1159,12 @@
target::Array::header_size() +
target::ObjectAlignment::kObjectAlignment - 1;
__ LoadImmediate(R3, fixed_size_plus_alignment_padding);
- __ add(R3, R3, Operand(AllocateArrayABI::kLengthReg, LSL, 2),
- kObjectBytes); // R2 is Smi.
+// AllocateArrayABI::kLengthReg is Smi.
+#if defined(DART_COMPRESSED_POINTERS)
+ __ add(R3, R3, Operand(AllocateArrayABI::kLengthReg, LSL, 1), kObjectBytes);
+#else
+ __ add(R3, R3, Operand(AllocateArrayABI::kLengthReg, LSL, 2), kObjectBytes);
+#endif
ASSERT(kSmiTagShift == 1);
__ andi(R3, R3,
Immediate(~(target::ObjectAlignment::kObjectAlignment - 1)));
@@ -1189,14 +1199,14 @@
// R7: new object end address.
// Store the type argument field.
- __ StoreIntoObjectOffsetNoBarrier(AllocateArrayABI::kResultReg,
- target::Array::type_arguments_offset(),
- AllocateArrayABI::kTypeArgumentsReg);
+ __ StoreCompressedIntoObjectOffsetNoBarrier(
+ AllocateArrayABI::kResultReg, target::Array::type_arguments_offset(),
+ AllocateArrayABI::kTypeArgumentsReg);
// Set the length field.
- __ StoreIntoObjectOffsetNoBarrier(AllocateArrayABI::kResultReg,
- target::Array::length_offset(),
- AllocateArrayABI::kLengthReg);
+ __ StoreCompressedIntoObjectOffsetNoBarrier(AllocateArrayABI::kResultReg,
+ target::Array::length_offset(),
+ AllocateArrayABI::kLengthReg);
// Calculate the size tag.
// AllocateArrayABI::kResultReg: new object start as a tagged pointer.
@@ -1233,8 +1243,8 @@
// TODO(cshapiro): StoreIntoObjectNoBarrier
__ CompareRegisters(R3, R7);
__ b(&done, CS);
- __ str(NULL_REG, Address(R3)); // Store if unsigned lower.
- __ AddImmediate(R3, target::kWordSize);
+ __ str(NULL_REG, Address(R3), kObjectBytes); // Store if unsigned lower.
+ __ AddImmediate(R3, target::kCompressedWordSize);
__ b(&loop); // Loop until R3 == R7.
__ Bind(&done);
@@ -1384,13 +1394,14 @@
__ LoadFromOffset(R4, R1, VMHandles::kOffsetOfRawPtrInHandle);
// Load number of arguments into R5 and adjust count for type arguments.
- __ LoadFieldFromOffset(R5, R4, target::ArgumentsDescriptor::count_offset());
- __ LoadFieldFromOffset(R3, R4,
- target::ArgumentsDescriptor::type_args_len_offset());
- __ AddImmediate(TMP, R5, 1); // Include the type arguments.
- __ cmp(R3, Operand(0));
- __ csinc(R5, R5, TMP, EQ); // R5 <- (R3 == 0) ? R5 : TMP + 1 (R5 : R5 + 2).
+ __ LoadCompressedSmiFieldFromOffset(
+ R5, R4, target::ArgumentsDescriptor::count_offset());
+ __ LoadCompressedSmiFieldFromOffset(
+ R3, R4, target::ArgumentsDescriptor::type_args_len_offset());
__ SmiUntag(R5);
+ // Include the type arguments.
+ __ cmp(R3, Operand(0), kObjectBytes);
+ __ csinc(R5, R5, R5, EQ); // R5 <- (R3 == 0) ? R5 : R5 + 1
// Compute address of 'arguments array' data area into R2.
__ LoadFromOffset(R2, R2, VMHandles::kOffsetOfRawPtrInHandle);
@@ -1403,10 +1414,10 @@
__ b(&done_push_arguments, EQ); // check if there are arguments.
__ LoadImmediate(R1, 0);
__ Bind(&push_arguments);
- __ ldr(R3, Address(R2));
+ __ LoadCompressed(R3, Address(R2));
__ Push(R3);
__ add(R1, R1, Operand(1));
- __ add(R2, R2, Operand(target::kWordSize));
+ __ add(R2, R2, Operand(target::kCompressedWordSize));
__ cmp(R1, Operand(R5));
__ b(&push_arguments, LT);
__ Bind(&done_push_arguments);
@@ -1907,8 +1918,10 @@
__ Bind(&init_loop);
__ CompareRegisters(kFieldReg, kNewTopReg);
__ b(&done, UNSIGNED_GREATER_EQUAL);
- __ str(NULL_REG,
- Address(kFieldReg, target::kWordSize, Address::PostIndex));
+ __ str(
+ NULL_REG,
+ Address(kFieldReg, target::kCompressedWordSize, Address::PostIndex),
+ kObjectBytes);
__ b(&init_loop);
__ Bind(&done);
@@ -1932,7 +1945,7 @@
kFourBytes);
// Set the type arguments in the new object.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateObjectABI::kResultReg,
Address(AllocateObjectABI::kResultReg, kTypeOffestReg, UXTX,
Address::Scaled),
@@ -2088,13 +2101,14 @@
__ EnterStubFrame();
// Load the receiver.
- __ LoadFieldFromOffset(R2, R4, target::ArgumentsDescriptor::size_offset());
- __ add(TMP, FP, Operand(R2, LSL, 2)); // R2 is Smi.
+ __ LoadCompressedSmiFieldFromOffset(
+ R2, R4, target::ArgumentsDescriptor::size_offset());
+ __ add(TMP, FP, Operand(R2, LSL, target::kWordSizeLog2 - 1));
__ LoadFromOffset(R6, TMP,
target::frame_layout.param_end_from_fp * target::kWordSize);
// Load the function.
- __ LoadFieldFromOffset(TMP, R6, target::Closure::function_offset());
+ __ LoadCompressedFieldFromOffset(TMP, R6, target::Closure::function_offset());
__ Push(ZR); // Result slot.
__ Push(R6); // Receiver.
@@ -2102,11 +2116,12 @@
__ Push(R4); // Arguments descriptor.
// Adjust arguments count.
- __ LoadFieldFromOffset(R3, R4,
- target::ArgumentsDescriptor::type_args_len_offset());
- __ AddImmediate(TMP, R2, 1); // Include the type arguments.
- __ cmp(R3, Operand(0));
- __ csinc(R2, R2, TMP, EQ); // R2 <- (R3 == 0) ? R2 : TMP + 1 (R2 : R2 + 2).
+ __ LoadCompressedSmiFieldFromOffset(
+ R3, R4, target::ArgumentsDescriptor::type_args_len_offset());
+ __ AddImmediate(TMP, R2, 1, kObjectBytes); // Include the type arguments.
+ __ cmp(R3, Operand(0), kObjectBytes);
+ // R2 <- (R3 == 0) ? R2 : TMP + 1 (R2 : R2 + 2).
+ __ csinc(R2, R2, TMP, EQ, kObjectBytes);
// R2: Smi-tagged arguments array length.
PushArrayOfArguments(assembler);
@@ -2215,11 +2230,11 @@
// Check that first entry is for Smi/Smi.
Label error, ok;
const intptr_t imm_smi_cid = target::ToRawSmi(kSmiCid);
- __ ldr(R1, Address(R6, 0));
- __ CompareImmediate(R1, imm_smi_cid);
+ __ LoadCompressedSmiFromOffset(R1, R6, 0);
+ __ CompareImmediate(R1, imm_smi_cid, kObjectBytes);
__ b(&error, NE);
- __ ldr(R1, Address(R6, target::kWordSize));
- __ CompareImmediate(R1, imm_smi_cid);
+ __ LoadCompressedSmiFromOffset(R1, R6, target::kCompressedWordSize);
+ __ CompareImmediate(R1, imm_smi_cid, kObjectBytes);
__ b(&ok, EQ);
__ Bind(&error);
__ Stop("Incorrect IC data");
@@ -2227,11 +2242,11 @@
#endif
if (FLAG_optimization_counter_threshold >= 0) {
const intptr_t count_offset =
- target::ICData::CountIndexFor(num_args) * target::kWordSize;
+ target::ICData::CountIndexFor(num_args) * target::kCompressedWordSize;
// Update counter, ignore overflow.
- __ LoadFromOffset(R1, R6, count_offset);
- __ adds(R1, R1, Operand(target::ToRawSmi(1)));
- __ StoreToOffset(R1, R6, count_offset);
+ __ LoadCompressedSmiFromOffset(R1, R6, count_offset);
+ __ adds(R1, R1, Operand(target::ToRawSmi(1)), kObjectBytes);
+ __ StoreToOffset(R1, R6, count_offset, kObjectBytes);
}
__ ret();
@@ -2337,8 +2352,8 @@
__ LoadFieldFromOffset(R4, R5,
target::CallSiteData::arguments_descriptor_offset());
if (num_args == 2) {
- __ LoadFieldFromOffset(R7, R4,
- target::ArgumentsDescriptor::count_offset());
+ __ LoadCompressedSmiFieldFromOffset(
+ R7, R4, target::ArgumentsDescriptor::count_offset());
__ SmiUntag(R7); // Untag so we can use the LSL 3 addressing mode.
__ sub(R7, R7, Operand(2));
// R1 <- [SP + (R1 << 3)]
@@ -2350,7 +2365,8 @@
target::CallSiteData::arguments_descriptor_offset());
// Get the receiver's class ID (first read number of arguments from
// arguments descriptor array and then access the receiver from the stack).
- __ LoadFieldFromOffset(R7, R4, target::ArgumentsDescriptor::count_offset());
+ __ LoadCompressedSmiFieldFromOffset(
+ R7, R4, target::ArgumentsDescriptor::count_offset());
__ SmiUntag(R7); // Untag so we can use the LSL 3 addressing mode.
__ sub(R7, R7, Operand(1));
// R0 <- [SP + (R7 << 3)]
@@ -2378,12 +2394,12 @@
for (int unroll = optimize ? 4 : 2; unroll >= 0; unroll--) {
Label update;
- __ LoadFromOffset(R2, R6, 0);
- __ CompareRegisters(R0, R2); // Class id match?
+ __ LoadCompressedSmiFromOffset(R2, R6, 0);
+ __ CompareObjectRegisters(R0, R2); // Class id match?
if (num_args == 2) {
__ b(&update, NE); // Continue.
- __ LoadFromOffset(R2, R6, target::kWordSize);
- __ CompareRegisters(R1, R2); // Class id match?
+ __ LoadCompressedSmiFromOffset(R2, R6, target::kCompressedWordSize);
+ __ CompareObjectRegisters(R1, R2); // Class id match?
}
__ b(&found, EQ); // Break.
@@ -2391,7 +2407,7 @@
const intptr_t entry_size = target::ICData::TestEntryLengthFor(
num_args, exactness == kCheckExactness) *
- target::kWordSize;
+ target::kCompressedWordSize;
__ AddImmediate(R6, entry_size); // Next entry.
__ CompareImmediate(R2, target::ToRawSmi(kIllegalCid)); // Done?
@@ -2406,7 +2422,8 @@
__ Comment("IC miss");
// Compute address of arguments.
- __ LoadFieldFromOffset(R7, R4, target::ArgumentsDescriptor::count_offset());
+ __ LoadCompressedSmiFieldFromOffset(
+ R7, R4, target::ArgumentsDescriptor::count_offset());
__ SmiUntag(R7); // Untag so we can use the LSL 3 addressing mode.
__ sub(R7, R7, Operand(1));
// R7: argument_count - 1 (untagged).
@@ -2458,16 +2475,16 @@
__ Comment("Update caller's counter");
// R6: pointer to an IC data check group.
const intptr_t target_offset =
- target::ICData::TargetIndexFor(num_args) * target::kWordSize;
+ target::ICData::TargetIndexFor(num_args) * target::kCompressedWordSize;
const intptr_t count_offset =
- target::ICData::CountIndexFor(num_args) * target::kWordSize;
- __ LoadFromOffset(R0, R6, target_offset);
+ target::ICData::CountIndexFor(num_args) * target::kCompressedWordSize;
+ __ LoadCompressedFromOffset(R0, R6, target_offset);
if (FLAG_optimization_counter_threshold >= 0) {
// Update counter, ignore overflow.
- __ LoadFromOffset(R1, R6, count_offset);
- __ adds(R1, R1, Operand(target::ToRawSmi(1)));
- __ StoreToOffset(R1, R6, count_offset);
+ __ LoadCompressedSmiFromOffset(R1, R6, count_offset);
+ __ adds(R1, R1, Operand(target::ToRawSmi(1)), kObjectBytes);
+ __ StoreToOffset(R1, R6, count_offset, kObjectBytes);
}
__ Comment("Call target");
@@ -2638,15 +2655,15 @@
__ AddImmediate(R6, target::Array::data_offset() - kHeapObjectTag);
// R6: points directly to the first ic data array element.
const intptr_t target_offset =
- target::ICData::TargetIndexFor(0) * target::kWordSize;
+ target::ICData::TargetIndexFor(0) * target::kCompressedWordSize;
const intptr_t count_offset =
- target::ICData::CountIndexFor(0) * target::kWordSize;
+ target::ICData::CountIndexFor(0) * target::kCompressedWordSize;
if (FLAG_optimization_counter_threshold >= 0) {
// Increment count for this call, ignore overflow.
- __ LoadFromOffset(R1, R6, count_offset);
- __ adds(R1, R1, Operand(target::ToRawSmi(1)));
- __ StoreToOffset(R1, R6, count_offset);
+ __ LoadCompressedSmiFromOffset(R1, R6, count_offset);
+ __ adds(R1, R1, Operand(target::ToRawSmi(1)), kObjectBytes);
+ __ StoreToOffset(R1, R6, count_offset, kObjectBytes);
}
// Load arguments descriptor into R4.
@@ -2654,7 +2671,7 @@
target::CallSiteData::arguments_descriptor_offset());
// Get function and call it, if possible.
- __ LoadFromOffset(R0, R6, target_offset);
+ __ LoadCompressedFromOffset(R0, R6, target_offset);
__ LoadCompressedFieldFromOffset(CODE_REG, R0,
target::Function::code_offset());
__ add(R2, R0, Operand(R8));
@@ -2843,24 +2860,32 @@
// Closure handling.
{
- __ ldr(STCInternalRegs::kInstanceCidOrSignatureReg,
- FieldAddress(TypeTestABI::kInstanceReg,
- target::Closure::function_offset()));
- __ LoadCompressed(STCInternalRegs::kInstanceCidOrSignatureReg,
- FieldAddress(STCInternalRegs::kInstanceCidOrSignatureReg,
- target::Function::signature_offset()));
+ __ Comment("Closure");
+ __ LoadCompressed(
+ STCInternalRegs::kInstanceCidOrSignatureReg,
+ FieldAddress(TypeTestABI::kInstanceReg,
+ target::Closure::function_offset(), kObjectBytes));
+ __ LoadCompressed(
+ STCInternalRegs::kInstanceCidOrSignatureReg,
+ FieldAddress(STCInternalRegs::kInstanceCidOrSignatureReg,
+ target::Function::signature_offset(), kObjectBytes));
if (n >= 3) {
- __ ldr(
+ __ LoadCompressed(
STCInternalRegs::kInstanceInstantiatorTypeArgumentsReg,
FieldAddress(TypeTestABI::kInstanceReg,
- target::Closure::instantiator_type_arguments_offset()));
+ target::Closure::instantiator_type_arguments_offset(),
+ kObjectBytes));
if (n >= 7) {
- __ ldr(STCInternalRegs::kInstanceParentFunctionTypeArgumentsReg,
- FieldAddress(TypeTestABI::kInstanceReg,
- target::Closure::function_type_arguments_offset()));
- __ ldr(STCInternalRegs::kInstanceDelayedFunctionTypeArgumentsReg,
- FieldAddress(TypeTestABI::kInstanceReg,
- target::Closure::delayed_type_arguments_offset()));
+ __ LoadCompressed(
+ STCInternalRegs::kInstanceParentFunctionTypeArgumentsReg,
+ FieldAddress(TypeTestABI::kInstanceReg,
+ target::Closure::function_type_arguments_offset(),
+ kObjectBytes));
+ __ LoadCompressed(
+ STCInternalRegs::kInstanceDelayedFunctionTypeArgumentsReg,
+ FieldAddress(TypeTestABI::kInstanceReg,
+ target::Closure::delayed_type_arguments_offset(),
+ kObjectBytes));
}
}
__ b(&loop);
@@ -2868,6 +2893,7 @@
// Non-Closure handling.
{
+ __ Comment("Non-Closure");
__ Bind(¬_closure);
if (n >= 3) {
Label has_no_type_arguments;
@@ -2881,10 +2907,11 @@
__ CompareImmediate(kScratchReg, target::Class::kNoTypeArguments);
__ b(&has_no_type_arguments, EQ);
__ add(kScratchReg, TypeTestABI::kInstanceReg,
- Operand(kScratchReg, LSL, 3));
- __ ldr(STCInternalRegs::kInstanceInstantiatorTypeArgumentsReg,
- FieldAddress(kScratchReg, 0));
+ Operand(kScratchReg, LSL, kCompressedWordSizeLog2));
+ __ LoadCompressed(STCInternalRegs::kInstanceInstantiatorTypeArgumentsReg,
+ FieldAddress(kScratchReg, 0, kObjectBytes));
__ Bind(&has_no_type_arguments);
+ __ Comment("No type arguments");
if (n >= 7) {
__ mov(STCInternalRegs::kInstanceParentFunctionTypeArgumentsReg,
@@ -2900,43 +2927,54 @@
// Loop header
__ Bind(&loop);
- __ ldr(kScratchReg,
- Address(kCacheArrayReg,
- target::kWordSize *
- target::SubtypeTestCache::kInstanceCidOrSignature));
- __ cmp(kScratchReg, Operand(kNullReg));
+ __ Comment("Loop");
+ __ LoadCompressed(
+ kScratchReg,
+ Address(kCacheArrayReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kInstanceCidOrSignature,
+ Address::Offset, kObjectBytes));
+ __ CompareObjectRegisters(kScratchReg, kNullReg);
__ b(&done, EQ);
- __ cmp(kScratchReg, Operand(STCInternalRegs::kInstanceCidOrSignatureReg));
+ __ CompareObjectRegisters(kScratchReg,
+ STCInternalRegs::kInstanceCidOrSignatureReg);
if (n == 1) {
__ b(&found, EQ);
} else {
__ b(&next_iteration, NE);
- __ ldr(kScratchReg,
- Address(
- kCacheArrayReg,
- target::kWordSize * target::SubtypeTestCache::kDestinationType));
+ __ LoadCompressed(kScratchReg,
+ Address(kCacheArrayReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kDestinationType,
+ Address::Offset, kObjectBytes));
__ cmp(kScratchReg, Operand(TypeTestABI::kDstTypeReg));
__ b(&next_iteration, NE);
- __ ldr(kScratchReg,
- Address(kCacheArrayReg,
- target::kWordSize *
- target::SubtypeTestCache::kInstanceTypeArguments));
+ __ LoadCompressed(
+ kScratchReg,
+ Address(kCacheArrayReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kInstanceTypeArguments,
+ Address::Offset, kObjectBytes));
__ cmp(kScratchReg,
Operand(STCInternalRegs::kInstanceInstantiatorTypeArgumentsReg));
if (n == 3) {
__ b(&found, EQ);
} else {
__ b(&next_iteration, NE);
- __ ldr(kScratchReg,
- Address(kCacheArrayReg,
- target::kWordSize *
- target::SubtypeTestCache::kInstantiatorTypeArguments));
+ __ LoadCompressed(
+ kScratchReg,
+ Address(kCacheArrayReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kInstantiatorTypeArguments,
+ Address::Offset, kObjectBytes));
__ cmp(kScratchReg, Operand(TypeTestABI::kInstantiatorTypeArgumentsReg));
__ b(&next_iteration, NE);
- __ ldr(kScratchReg,
- Address(kCacheArrayReg,
- target::kWordSize *
- target::SubtypeTestCache::kFunctionTypeArguments));
+ __ LoadCompressed(
+ kScratchReg,
+ Address(kCacheArrayReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kFunctionTypeArguments,
+ Address::Offset, kObjectBytes));
__ cmp(kScratchReg, Operand(TypeTestABI::kFunctionTypeArgumentsReg));
if (n == 5) {
__ b(&found, EQ);
@@ -2944,21 +2982,23 @@
ASSERT(n == 7);
__ b(&next_iteration, NE);
- __ ldr(kScratchReg,
- Address(kCacheArrayReg,
- target::kWordSize *
- target::SubtypeTestCache::
- kInstanceParentFunctionTypeArguments));
+ __ LoadCompressed(kScratchReg,
+ Address(kCacheArrayReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::
+ kInstanceParentFunctionTypeArguments,
+ Address::Offset, kObjectBytes));
__ cmp(
kScratchReg,
Operand(STCInternalRegs::kInstanceParentFunctionTypeArgumentsReg));
__ b(&next_iteration, NE);
- __ ldr(kScratchReg,
- Address(kCacheArrayReg,
- target::kWordSize *
- target::SubtypeTestCache::
- kInstanceDelayedFunctionTypeArguments));
+ __ LoadCompressed(kScratchReg,
+ Address(kCacheArrayReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::
+ kInstanceDelayedFunctionTypeArguments,
+ Address::Offset, kObjectBytes));
__ cmp(
kScratchReg,
Operand(STCInternalRegs::kInstanceDelayedFunctionTypeArgumentsReg));
@@ -2967,16 +3007,21 @@
}
}
__ Bind(&next_iteration);
+ __ Comment("Next iteration");
__ AddImmediate(
kCacheArrayReg,
- target::kWordSize * target::SubtypeTestCache::kTestEntryLength);
+ target::kCompressedWordSize * target::SubtypeTestCache::kTestEntryLength);
__ b(&loop);
__ Bind(&found);
- __ ldr(TypeTestABI::kSubtypeTestCacheResultReg,
- Address(kCacheArrayReg,
- target::kWordSize * target::SubtypeTestCache::kTestResult));
+ __ Comment("Found");
+ __ LoadCompressed(TypeTestABI::kSubtypeTestCacheResultReg,
+ Address(kCacheArrayReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kTestResult,
+ Address::Offset, kObjectBytes));
__ Bind(&done);
+ __ Comment("Done");
__ ret();
}
@@ -3252,10 +3297,10 @@
const intptr_t base = target::Array::data_offset();
// R3 is smi tagged, but table entries are 16 bytes, so LSL 3.
- __ add(TMP, R2, Operand(R3, LSL, 3));
- __ ldr(R6, FieldAddress(TMP, base));
+ __ add(TMP, R2, Operand(R3, LSL, kCompressedWordSizeLog2));
+ __ LoadCompressedSmiFieldFromOffset(R6, TMP, base);
Label probe_failed;
- __ CompareRegisters(R6, R8);
+ __ CompareObjectRegisters(R6, R8);
__ b(&probe_failed, NE);
Label load_target;
@@ -3264,14 +3309,15 @@
// proper target for the given name and arguments descriptor. If the
// illegal class id was found, the target is a cache miss handler that can
// be invoked as a normal Dart function.
- const auto target_address = FieldAddress(TMP, base + target::kWordSize);
+ const auto target_address =
+ FieldAddress(TMP, base + target::kCompressedWordSize, kObjectBytes);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
__ ldr(R1, target_address);
__ ldr(
ARGS_DESC_REG,
FieldAddress(R5, target::CallSiteData::arguments_descriptor_offset()));
} else {
- __ ldr(R0, target_address);
+ __ LoadCompressed(R0, target_address);
__ ldr(R1, FieldAddress(R0, target::Function::entry_point_offset()));
__ ldr(
ARGS_DESC_REG,
@@ -3284,7 +3330,7 @@
// Probe failed, check if it is a miss.
__ Bind(&probe_failed);
ASSERT(kIllegalCid == 0);
- __ tst(R6, Operand(R6));
+ __ tst(R6, Operand(R6), kObjectBytes);
Label miss;
__ b(&miss, EQ); // branch if miss.
@@ -3315,28 +3361,35 @@
// R1: receiver cid as Smi
__ Bind(&loop);
- __ ldr(R2, Address(R8, 0));
- __ cmp(R1, Operand(R2));
+ __ LoadCompressedSmi(R2, Address(R8, 0));
+ __ cmp(R1, Operand(R2), kObjectBytes);
__ b(&found, EQ);
- __ CompareImmediate(R2, target::ToRawSmi(kIllegalCid));
+ __ CompareImmediate(R2, target::ToRawSmi(kIllegalCid), kObjectBytes);
__ b(&miss, EQ);
const intptr_t entry_length =
target::ICData::TestEntryLengthFor(1, /*tracking_exactness=*/false) *
- target::kWordSize;
+ target::kCompressedWordSize;
__ AddImmediate(R8, entry_length); // Next entry.
__ b(&loop);
__ Bind(&found);
const intptr_t code_offset =
- target::ICData::CodeIndexFor(1) * target::kWordSize;
+ target::ICData::CodeIndexFor(1) * target::kCompressedWordSize;
+#if defined(DART_COMPRESSED_POINTERS)
+ __ LoadCompressed(CODE_REG,
+ Address(R8, code_offset, Address::Offset, kObjectBytes));
+ __ ldr(R1, FieldAddress(CODE_REG, target::Code::entry_point_offset()));
+ __ br(R1);
+#else
const intptr_t entry_offset =
- target::ICData::EntryPointIndexFor(1) * target::kWordSize;
+ target::ICData::EntryPointIndexFor(1) * target::kCompressedWordSize;
__ ldr(R1, Address(R8, entry_offset));
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
__ ldr(CODE_REG, Address(R8, code_offset));
}
__ br(R1);
+#endif
__ Bind(&miss);
__ LoadIsolate(R2);
@@ -3478,22 +3531,25 @@
// Use load-acquire to test for sentinel, if we found non-sentinel it is safe
// to access the other entries. If we found a sentinel we go to runtime.
- __ LoadAcquire(R5, R0,
- TypeArguments::Instantiation::kInstantiatorTypeArgsIndex *
- target::kWordSize);
- __ CompareImmediate(R5, Smi::RawValue(TypeArguments::kNoInstantiator));
+ __ LoadAcquireCompressed(
+ R5, R0,
+ TypeArguments::Instantiation::kInstantiatorTypeArgsIndex *
+ target::kCompressedWordSize);
+ __ CompareImmediate(R5, Smi::RawValue(TypeArguments::kNoInstantiator),
+ kObjectBytes);
__ b(&call_runtime, EQ);
__ CompareRegisters(R5, InstantiationABI::kInstantiatorTypeArgumentsReg);
__ b(&next, NE);
- __ LoadFromOffset(
+ __ LoadCompressedFromOffset(
R4, R0,
- TypeArguments::Instantiation::kFunctionTypeArgsIndex * target::kWordSize);
+ TypeArguments::Instantiation::kFunctionTypeArgsIndex *
+ target::kCompressedWordSize);
__ CompareRegisters(R4, InstantiationABI::kFunctionTypeArgumentsReg);
__ b(&found, EQ);
__ Bind(&next);
- __ AddImmediate(
- R0, TypeArguments::Instantiation::kSizeInWords * target::kWordSize);
+ __ AddImmediate(R0, TypeArguments::Instantiation::kSizeInWords *
+ target::kCompressedWordSize);
__ b(&loop);
// Instantiate non-null type arguments.
@@ -3510,9 +3566,10 @@
__ Ret();
__ Bind(&found);
- __ LoadFromOffset(InstantiationABI::kResultTypeArgumentsReg, R0,
- TypeArguments::Instantiation::kInstantiatedTypeArgsIndex *
- target::kWordSize);
+ __ LoadCompressedFromOffset(
+ InstantiationABI::kResultTypeArgumentsReg, R0,
+ TypeArguments::Instantiation::kInstantiatedTypeArgsIndex *
+ target::kCompressedWordSize);
__ Ret();
}
diff --git a/runtime/vm/compiler/stub_code_compiler_x64.cc b/runtime/vm/compiler/stub_code_compiler_x64.cc
index c6339b3..ed0be69 100644
--- a/runtime/vm/compiler/stub_code_compiler_x64.cc
+++ b/runtime/vm/compiler/stub_code_compiler_x64.cc
@@ -361,13 +361,15 @@
THR, compiler::target::Thread::callback_code_offset()));
__ movq(TMP, compiler::FieldAddress(
TMP, compiler::target::GrowableObjectArray::data_offset()));
- __ movq(TMP, __ ElementAddressForRegIndex(
- /*external=*/false,
- /*array_cid=*/kArrayCid,
- /*index, smi-tagged=*/compiler::target::kWordSize * 2,
- /*index_unboxed=*/false,
- /*array=*/TMP,
- /*index=*/RAX));
+ __ movq(
+ TMP,
+ __ ElementAddressForRegIndex(
+ /*external=*/false,
+ /*array_cid=*/kArrayCid,
+ /*index_scale, smi-tagged=*/compiler::target::kCompressedWordSize * 2,
+ /*index_unboxed=*/false,
+ /*array=*/TMP,
+ /*index=*/RAX));
__ movq(TMP, compiler::FieldAddress(
TMP, compiler::target::Code::entry_point_offset()));
@@ -464,13 +466,13 @@
// Populate closure object.
__ popq(AllocateClosureABI::kScratchReg); // Pop type argument vector.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::instantiator_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
__ LoadObject(AllocateClosureABI::kScratchReg, EmptyTypeArguments());
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::delayed_type_arguments_offset()),
@@ -781,8 +783,8 @@
__ Bind(&loop);
__ movq(RDI, Address(R12, 0));
// Generational barrier is needed, array is not necessarily in new space.
- __ StoreIntoObject(RAX, Address(RBX, 0), RDI);
- __ addq(RBX, Immediate(target::kWordSize));
+ __ StoreCompressedIntoObject(RAX, Address(RBX, 0), RDI);
+ __ addq(RBX, Immediate(target::kCompressedWordSize));
__ subq(R12, Immediate(target::kWordSize));
__ Bind(&loop_condition);
__ decq(R10);
@@ -991,13 +993,14 @@
__ pushq(R10); // Arguments descriptor array.
// Adjust arguments count.
- __ cmpq(
+ __ OBJ(cmp)(
FieldAddress(R10, target::ArgumentsDescriptor::type_args_len_offset()),
Immediate(0));
- __ movq(R10, RDI);
+ __ OBJ(mov)(R10, RDI);
Label args_count_ok;
__ j(EQUAL, &args_count_ok, Assembler::kNearJump);
- __ addq(R10, Immediate(target::ToRawSmi(1))); // Include the type arguments.
+ // Include the type arguments.
+ __ OBJ(add)(R10, Immediate(target::ToRawSmi(1)));
__ Bind(&args_count_ok);
// R10: Smi-tagged arguments array length.
@@ -1023,7 +1026,8 @@
__ EnterStubFrame();
// Load the receiver.
- __ movq(RDI, FieldAddress(R10, target::ArgumentsDescriptor::size_offset()));
+ __ OBJ(mov)(RDI,
+ FieldAddress(R10, target::ArgumentsDescriptor::size_offset()));
__ movq(RAX,
Address(RBP, RDI, TIMES_HALF_WORD_SIZE,
target::frame_layout.param_end_from_fp * target::kWordSize));
@@ -1040,7 +1044,8 @@
__ movq(R10, FieldAddress(
RBX, target::CallSiteData::arguments_descriptor_offset()));
- __ movq(RDI, FieldAddress(R10, target::ArgumentsDescriptor::size_offset()));
+ __ OBJ(mov)(RDI,
+ FieldAddress(R10, target::ArgumentsDescriptor::size_offset()));
GenerateNoSuchMethodDispatcherBody(assembler, /*receiver_reg=*/RDX);
}
@@ -1059,7 +1064,8 @@
// Compute the size to be allocated, it is based on the array length
// and is computed as:
// RoundedAllocationSize(
- // (array_length * target::kwordSize) + target::Array::header_size()).
+ // (array_length * target::kCompressedWordSize) +
+ // target::Array::header_size()).
__ movq(RDI, AllocateArrayABI::kLengthReg); // Array Length.
// Check that length is Smi.
__ testq(RDI, Immediate(kSmiTagMask));
@@ -1079,7 +1085,8 @@
target::Array::header_size() +
target::ObjectAlignment::kObjectAlignment - 1;
// RDI is a Smi.
- __ OBJ(lea)(RDI, Address(RDI, TIMES_4, fixed_size_plus_alignment_padding));
+ __ OBJ(lea)(RDI, Address(RDI, TIMES_COMPRESSED_HALF_WORD_SIZE,
+ fixed_size_plus_alignment_padding));
ASSERT(kSmiTagShift == 1);
__ andq(RDI, Immediate(-target::ObjectAlignment::kObjectAlignment));
@@ -1128,17 +1135,18 @@
// AllocateArrayABI::kResultReg: new object start as a tagged pointer.
// Store the type argument field.
// No generational barrier needed, since we store into a new object.
- __ StoreIntoObjectNoBarrier(
+ __ StoreCompressedIntoObjectNoBarrier(
AllocateArrayABI::kResultReg,
FieldAddress(AllocateArrayABI::kResultReg,
target::Array::type_arguments_offset()),
AllocateArrayABI::kTypeArgumentsReg);
// Set the length field.
- __ StoreIntoObjectNoBarrier(AllocateArrayABI::kResultReg,
- FieldAddress(AllocateArrayABI::kResultReg,
- target::Array::length_offset()),
- AllocateArrayABI::kLengthReg);
+ __ StoreCompressedIntoObjectNoBarrier(
+ AllocateArrayABI::kResultReg,
+ FieldAddress(AllocateArrayABI::kResultReg,
+ target::Array::length_offset()),
+ AllocateArrayABI::kLengthReg);
// Initialize all array elements to raw_null.
// AllocateArrayABI::kResultReg: new object start as a tagged pointer.
@@ -1159,9 +1167,9 @@
#endif // DEBUG
__ j(ABOVE_EQUAL, &done, kJumpLength);
// No generational barrier needed, since we are storing null.
- __ StoreIntoObjectNoBarrier(AllocateArrayABI::kResultReg, Address(RDI, 0),
- R12);
- __ addq(RDI, Immediate(target::kWordSize));
+ __ StoreCompressedIntoObjectNoBarrier(AllocateArrayABI::kResultReg,
+ Address(RDI, 0), R12);
+ __ addq(RDI, Immediate(target::kCompressedWordSize));
__ jmp(&init_loop, kJumpLength);
__ Bind(&done);
__ ret();
@@ -1314,8 +1322,9 @@
ASSERT(kTargetReg != RDX);
// Load number of arguments into RBX and adjust count for type arguments.
- __ movq(RBX, FieldAddress(R10, target::ArgumentsDescriptor::count_offset()));
- __ cmpq(
+ __ OBJ(mov)(RBX,
+ FieldAddress(R10, target::ArgumentsDescriptor::count_offset()));
+ __ OBJ(cmp)(
FieldAddress(R10, target::ArgumentsDescriptor::type_args_len_offset()),
Immediate(0));
Label args_count_ok;
@@ -1336,7 +1345,12 @@
__ j(ZERO, &done_push_arguments, Assembler::kNearJump);
__ LoadImmediate(RAX, Immediate(0));
__ Bind(&push_arguments);
+#if defined(DART_COMPRESSED_POINTERS)
+ __ LoadCompressed(TMP, Address(RDX, RAX, TIMES_COMPRESSED_WORD_SIZE, 0));
+ __ pushq(TMP);
+#else
__ pushq(Address(RDX, RAX, TIMES_8, 0));
+#endif
__ incq(RAX);
__ cmpq(RAX, RBX);
__ j(LESS, &push_arguments, Assembler::kNearJump);
@@ -1841,9 +1855,9 @@
static auto const kJumpLength = Assembler::kNearJump;
#endif // DEBUG
__ j(ABOVE_EQUAL, &done, kJumpLength);
- __ StoreIntoObjectNoBarrier(AllocateObjectABI::kResultReg,
- Address(kNextFieldReg, 0), kNullReg);
- __ addq(kNextFieldReg, Immediate(target::kWordSize));
+ __ StoreCompressedIntoObjectNoBarrier(
+ AllocateObjectABI::kResultReg, Address(kNextFieldReg, 0), kNullReg);
+ __ addq(kNextFieldReg, Immediate(target::kCompressedWordSize));
__ jmp(&init_loop, Assembler::kNearJump);
__ Bind(&done);
} // kNextFieldReg = RDI, kNullReg = R10
@@ -1865,10 +1879,11 @@
host_type_arguments_field_offset_in_words_offset()));
// Set the type arguments in the new object.
- __ StoreIntoObject(AllocateObjectABI::kResultReg,
- FieldAddress(AllocateObjectABI::kResultReg,
- kTypeOffsetReg, TIMES_8, 0),
- AllocateObjectABI::kTypeArgumentsReg);
+ __ StoreCompressedIntoObject(
+ AllocateObjectABI::kResultReg,
+ FieldAddress(AllocateObjectABI::kResultReg, kTypeOffsetReg,
+ TIMES_COMPRESSED_WORD_SIZE, 0),
+ AllocateObjectABI::kTypeArgumentsReg);
__ Bind(¬_parameterized_case);
} // kTypeOffsetReg = RDI;
@@ -2021,13 +2036,16 @@
__ EnterStubFrame();
// Load the receiver.
- __ movq(R13, FieldAddress(R10, target::ArgumentsDescriptor::size_offset()));
+ // Note: In compressed pointer mode LoadCompressedSmi zero extends R13,
+ // rather than sign extending it. This is ok since it's an unsigned value.
+ __ LoadCompressedSmi(
+ R13, FieldAddress(R10, target::ArgumentsDescriptor::size_offset()));
__ movq(RAX,
Address(RBP, R13, TIMES_4,
target::frame_layout.param_end_from_fp * target::kWordSize));
// Load the function.
- __ movq(RBX, FieldAddress(RAX, target::Closure::function_offset()));
+ __ LoadCompressed(RBX, FieldAddress(RAX, target::Closure::function_offset()));
__ pushq(Immediate(0)); // Result slot.
__ pushq(RAX); // Receiver.
@@ -2035,7 +2053,7 @@
__ pushq(R10); // Arguments descriptor array.
// Adjust arguments count.
- __ cmpq(
+ __ OBJ(cmp)(
FieldAddress(R10, target::ArgumentsDescriptor::type_args_len_offset()),
Immediate(0));
__ movq(R10, R13);
@@ -2151,9 +2169,9 @@
// Check that first entry is for Smi/Smi.
Label error, ok;
const Immediate& imm_smi_cid = Immediate(target::ToRawSmi(kSmiCid));
- __ cmpq(Address(R13, 0 * target::kWordSize), imm_smi_cid);
+ __ OBJ(cmp)(Address(R13, 0 * target::kCompressedWordSize), imm_smi_cid);
__ j(NOT_EQUAL, &error, Assembler::kNearJump);
- __ cmpq(Address(R13, 1 * target::kWordSize), imm_smi_cid);
+ __ OBJ(cmp)(Address(R13, 1 * target::kCompressedWordSize), imm_smi_cid);
__ j(EQUAL, &ok, Assembler::kNearJump);
__ Bind(&error);
__ Stop("Incorrect IC data");
@@ -2162,9 +2180,9 @@
if (FLAG_optimization_counter_threshold >= 0) {
const intptr_t count_offset =
- target::ICData::CountIndexFor(num_args) * target::kWordSize;
+ target::ICData::CountIndexFor(num_args) * target::kCompressedWordSize;
// Update counter, ignore overflow.
- __ addq(Address(R13, count_offset), Immediate(target::ToRawSmi(1)));
+ __ OBJ(add)(Address(R13, count_offset), Immediate(target::ToRawSmi(1)));
}
__ ret();
@@ -2266,16 +2284,16 @@
__ movq(R10, FieldAddress(
RBX, target::CallSiteData::arguments_descriptor_offset()));
if (num_args == 2) {
- __ movq(RCX,
- FieldAddress(R10, target::ArgumentsDescriptor::count_offset()));
+ __ OBJ(mov)(
+ RCX, FieldAddress(R10, target::ArgumentsDescriptor::count_offset()));
__ movq(R9, Address(RSP, RCX, TIMES_4, -target::kWordSize));
__ LoadTaggedClassIdMayBeSmi(RCX, R9);
}
} else {
__ movq(R10, FieldAddress(
RBX, target::CallSiteData::arguments_descriptor_offset()));
- __ movq(RCX,
- FieldAddress(R10, target::ArgumentsDescriptor::count_offset()));
+ __ OBJ(mov)(RCX,
+ FieldAddress(R10, target::ArgumentsDescriptor::count_offset()));
__ movq(RDX, Address(RSP, RCX, TIMES_4, 0));
__ LoadTaggedClassIdMayBeSmi(RAX, RDX);
if (num_args == 2) {
@@ -2294,20 +2312,20 @@
// We unroll the generic one that is generated once more than the others.
const bool optimize = kind == Token::kILLEGAL;
const intptr_t target_offset =
- target::ICData::TargetIndexFor(num_args) * target::kWordSize;
+ target::ICData::TargetIndexFor(num_args) * target::kCompressedWordSize;
const intptr_t count_offset =
- target::ICData::CountIndexFor(num_args) * target::kWordSize;
+ target::ICData::CountIndexFor(num_args) * target::kCompressedWordSize;
const intptr_t exactness_offset =
- target::ICData::ExactnessIndexFor(num_args) * target::kWordSize;
+ target::ICData::ExactnessIndexFor(num_args) * target::kCompressedWordSize;
__ Bind(&loop);
for (int unroll = optimize ? 4 : 2; unroll >= 0; unroll--) {
Label update;
- __ movq(R9, Address(R13, 0));
+ __ OBJ(mov)(R9, Address(R13, 0));
__ cmpq(RAX, R9); // Class id match?
if (num_args == 2) {
__ j(NOT_EQUAL, &update); // Continue.
- __ movq(R9, Address(R13, target::kWordSize));
+ __ OBJ(mov)(R9, Address(R13, target::kCompressedWordSize));
// R9: next class ID to check (smi).
__ cmpq(RCX, R9); // Class id match?
}
@@ -2317,7 +2335,7 @@
const intptr_t entry_size = target::ICData::TestEntryLengthFor(
num_args, exactness == kCheckExactness) *
- target::kWordSize;
+ target::kCompressedWordSize;
__ addq(R13, Immediate(entry_size)); // Next entry.
__ cmpq(R9, Immediate(target::ToRawSmi(kIllegalCid))); // Done?
@@ -2332,7 +2350,8 @@
__ Comment("IC miss");
// Compute address of arguments (first read number of arguments from
// arguments descriptor array and then compute address on the stack).
- __ movq(RAX, FieldAddress(R10, target::ArgumentsDescriptor::count_offset()));
+ __ OBJ(mov)(RAX,
+ FieldAddress(R10, target::ArgumentsDescriptor::count_offset()));
__ leaq(RAX, Address(RSP, RAX, TIMES_4, 0)); // RAX is Smi.
__ EnterStubFrame();
if (save_entry_point) {
@@ -2375,9 +2394,10 @@
if (exactness == kCheckExactness) {
Label exactness_ok;
ASSERT(num_args == 1);
- __ movq(RAX, Address(R13, exactness_offset));
- __ cmpq(RAX, Immediate(target::ToRawSmi(
- StaticTypeExactnessState::HasExactSuperType().Encode())));
+ __ OBJ(mov)(RAX, Address(R13, exactness_offset));
+ __ OBJ(cmp)(RAX,
+ Immediate(target::ToRawSmi(
+ StaticTypeExactnessState::HasExactSuperType().Encode())));
__ j(LESS, &exactness_ok);
__ j(EQUAL, &call_target_function_through_unchecked_entry);
@@ -2390,21 +2410,25 @@
// RAX contains an offset to type arguments in words as a smi,
// hence TIMES_4. RDX is guaranteed to be non-smi because it is expected
// to have type arguments.
- __ cmpq(RCX, FieldAddress(RDX, RAX, TIMES_4, 0));
+#if defined(DART_COMPRESSED_POINTERS)
+ __ movsxd(RAX, RAX);
+#endif
+ __ OBJ(cmp)(RCX,
+ FieldAddress(RDX, RAX, TIMES_COMPRESSED_HALF_WORD_SIZE, 0));
__ j(EQUAL, &call_target_function_through_unchecked_entry);
// Update exactness state (not-exact anymore).
- __ movq(Address(R13, exactness_offset),
- Immediate(target::ToRawSmi(
- StaticTypeExactnessState::NotExact().Encode())));
+ __ OBJ(mov)(Address(R13, exactness_offset),
+ Immediate(target::ToRawSmi(
+ StaticTypeExactnessState::NotExact().Encode())));
__ Bind(&exactness_ok);
}
- __ movq(RAX, Address(R13, target_offset));
+ __ LoadCompressed(RAX, Address(R13, target_offset));
if (FLAG_optimization_counter_threshold >= 0) {
__ Comment("Update ICData counter");
// Ignore overflow.
- __ addq(Address(R13, count_offset), Immediate(target::ToRawSmi(1)));
+ __ OBJ(add)(Address(R13, count_offset), Immediate(target::ToRawSmi(1)));
}
__ Comment("Call target (via specified entry point)");
@@ -2427,7 +2451,7 @@
__ addq(Address(R13, count_offset), Immediate(target::ToRawSmi(1)));
}
__ Comment("Call target (via unchecked entry point)");
- __ movq(RAX, Address(R13, target_offset));
+ __ LoadCompressed(RAX, Address(R13, target_offset));
__ LoadCompressed(CODE_REG,
FieldAddress(RAX, target::Function::code_offset()));
__ jmp(FieldAddress(
@@ -2595,13 +2619,13 @@
__ leaq(R12, FieldAddress(R12, target::Array::data_offset()));
// R12: points directly to the first ic data array element.
const intptr_t target_offset =
- target::ICData::TargetIndexFor(0) * target::kWordSize;
+ target::ICData::TargetIndexFor(0) * target::kCompressedWordSize;
const intptr_t count_offset =
- target::ICData::CountIndexFor(0) * target::kWordSize;
+ target::ICData::CountIndexFor(0) * target::kCompressedWordSize;
if (FLAG_optimization_counter_threshold >= 0) {
// Increment count for this call, ignore overflow.
- __ addq(Address(R12, count_offset), Immediate(target::ToRawSmi(1)));
+ __ OBJ(add)(Address(R12, count_offset), Immediate(target::ToRawSmi(1)));
}
// Load arguments descriptor into R10.
@@ -2609,7 +2633,7 @@
RBX, target::CallSiteData::arguments_descriptor_offset()));
// Get function and call it, if possible.
- __ movq(RAX, Address(R12, target_offset));
+ __ LoadCompressed(RAX, Address(R12, target_offset));
__ LoadCompressed(CODE_REG,
FieldAddress(RAX, target::Function::code_offset()));
@@ -2809,25 +2833,27 @@
// Closure handling.
{
- __ movq(STCInternalRegs::kInstanceCidOrSignatureReg,
- FieldAddress(TypeTestABI::kInstanceReg,
- target::Closure::function_offset()));
+ __ Comment("Closure");
+ __ LoadCompressed(STCInternalRegs::kInstanceCidOrSignatureReg,
+ FieldAddress(TypeTestABI::kInstanceReg,
+ target::Closure::function_offset()));
__ LoadCompressed(STCInternalRegs::kInstanceCidOrSignatureReg,
FieldAddress(STCInternalRegs::kInstanceCidOrSignatureReg,
target::Function::signature_offset()));
if (n >= 3) {
- __ movq(
+ __ LoadCompressed(
STCInternalRegs::kInstanceInstantiatorTypeArgumentsReg,
FieldAddress(TypeTestABI::kInstanceReg,
target::Closure::instantiator_type_arguments_offset()));
if (n >= 7) {
- __ movq(
+ __ LoadCompressed(
kInstanceParentFunctionTypeArgumentsReg,
FieldAddress(TypeTestABI::kInstanceReg,
target::Closure::function_type_arguments_offset()));
- __ movq(kInstanceDelayedFunctionTypeArgumentsReg,
- FieldAddress(TypeTestABI::kInstanceReg,
- target::Closure::delayed_type_arguments_offset()));
+ __ LoadCompressed(
+ kInstanceDelayedFunctionTypeArgumentsReg,
+ FieldAddress(TypeTestABI::kInstanceReg,
+ target::Closure::delayed_type_arguments_offset()));
}
}
__ jmp(&loop, Assembler::kNearJump);
@@ -2835,6 +2861,7 @@
// Non-Closure handling.
{
+ __ Comment("Non-Closure");
__ Bind(¬_closure);
if (n >= 3) {
Label has_no_type_arguments;
@@ -2849,8 +2876,10 @@
__ cmpl(kScratchReg, Immediate(target::Class::kNoTypeArguments));
__ j(EQUAL, &has_no_type_arguments, Assembler::kNearJump);
__ movq(STCInternalRegs::kInstanceInstantiatorTypeArgumentsReg,
- FieldAddress(TypeTestABI::kInstanceReg, kScratchReg, TIMES_8, 0));
+ FieldAddress(TypeTestABI::kInstanceReg, kScratchReg,
+ TIMES_COMPRESSED_WORD_SIZE, 0));
__ Bind(&has_no_type_arguments);
+ __ Comment("No type arguments");
if (n >= 7) {
__ movq(kInstanceParentFunctionTypeArgumentsReg, kNullReg);
@@ -2864,40 +2893,42 @@
// Loop header.
__ Bind(&loop);
- __ movq(kScratchReg,
- Address(STCInternalRegs::kCacheEntryReg,
- target::kWordSize *
- target::SubtypeTestCache::kInstanceCidOrSignature));
- __ cmpq(kScratchReg, kNullReg);
+ __ Comment("Loop");
+ __ OBJ(mov)(kScratchReg,
+ Address(STCInternalRegs::kCacheEntryReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kInstanceCidOrSignature));
+ __ OBJ(cmp)(kScratchReg, kNullReg);
__ j(EQUAL, ¬_found, Assembler::kNearJump);
- __ cmpq(kScratchReg, STCInternalRegs::kInstanceCidOrSignatureReg);
+ __ OBJ(cmp)(kScratchReg, STCInternalRegs::kInstanceCidOrSignatureReg);
if (n == 1) {
__ j(EQUAL, &found, Assembler::kNearJump);
} else {
__ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump);
- __ cmpq(TypeTestABI::kDstTypeReg,
- Address(STCInternalRegs::kCacheEntryReg,
- target::kWordSize *
- target::SubtypeTestCache::kDestinationType));
+ __ OBJ(cmp)(TypeTestABI::kDstTypeReg,
+ Address(STCInternalRegs::kCacheEntryReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kDestinationType));
__ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump);
- __ cmpq(STCInternalRegs::kInstanceInstantiatorTypeArgumentsReg,
- Address(STCInternalRegs::kCacheEntryReg,
- target::kWordSize *
- target::SubtypeTestCache::kInstanceTypeArguments));
+ __ OBJ(cmp)(STCInternalRegs::kInstanceInstantiatorTypeArgumentsReg,
+ Address(STCInternalRegs::kCacheEntryReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kInstanceTypeArguments));
if (n == 3) {
__ j(EQUAL, &found, Assembler::kNearJump);
} else {
__ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump);
- __ cmpq(
+ __ OBJ(cmp)(
TypeTestABI::kInstantiatorTypeArgumentsReg,
Address(STCInternalRegs::kCacheEntryReg,
- target::kWordSize *
+ target::kCompressedWordSize *
target::SubtypeTestCache::kInstantiatorTypeArguments));
__ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump);
- __ cmpq(TypeTestABI::kFunctionTypeArgumentsReg,
- Address(STCInternalRegs::kCacheEntryReg,
- target::kWordSize *
- target::SubtypeTestCache::kFunctionTypeArguments));
+ __ OBJ(cmp)(
+ TypeTestABI::kFunctionTypeArgumentsReg,
+ Address(STCInternalRegs::kCacheEntryReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kFunctionTypeArguments));
if (n == 5) {
__ j(EQUAL, &found, Assembler::kNearJump);
@@ -2905,34 +2936,38 @@
ASSERT(n == 7);
__ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump);
- __ cmpq(kInstanceParentFunctionTypeArgumentsReg,
- Address(STCInternalRegs::kCacheEntryReg,
- target::kWordSize *
- target::SubtypeTestCache::
- kInstanceParentFunctionTypeArguments));
+ __ OBJ(cmp)(kInstanceParentFunctionTypeArgumentsReg,
+ Address(STCInternalRegs::kCacheEntryReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::
+ kInstanceParentFunctionTypeArguments));
__ j(NOT_EQUAL, &next_iteration, Assembler::kNearJump);
- __ cmpq(kInstanceDelayedFunctionTypeArgumentsReg,
- Address(STCInternalRegs::kCacheEntryReg,
- target::kWordSize *
- target::SubtypeTestCache::
- kInstanceDelayedFunctionTypeArguments));
+ __ OBJ(cmp)(kInstanceDelayedFunctionTypeArgumentsReg,
+ Address(STCInternalRegs::kCacheEntryReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::
+ kInstanceDelayedFunctionTypeArguments));
__ j(EQUAL, &found, Assembler::kNearJump);
}
}
}
__ Bind(&next_iteration);
+ __ Comment("Next iteration");
__ addq(STCInternalRegs::kCacheEntryReg,
- Immediate(target::kWordSize *
+ Immediate(target::kCompressedWordSize *
target::SubtypeTestCache::kTestEntryLength));
__ jmp(&loop, Assembler::kNearJump);
__ Bind(&found);
- __ movq(TypeTestABI::kSubtypeTestCacheResultReg,
- Address(STCInternalRegs::kCacheEntryReg,
- target::kWordSize * target::SubtypeTestCache::kTestResult));
+ __ Comment("Found");
+ __ LoadCompressed(TypeTestABI::kSubtypeTestCacheResultReg,
+ Address(STCInternalRegs::kCacheEntryReg,
+ target::kCompressedWordSize *
+ target::SubtypeTestCache::kTestResult));
__ Bind(¬_found);
+ __ Comment("Not found");
if (n >= 7) {
__ popq(kInstanceDelayedFunctionTypeArgumentsReg);
__ popq(kInstanceParentFunctionTypeArgumentsReg);
@@ -3207,7 +3242,7 @@
const intptr_t base = target::Array::data_offset();
// RCX is smi tagged, but table entries are two words, so TIMES_8.
Label probe_failed;
- __ cmpq(RAX, FieldAddress(RDI, RCX, TIMES_8, base));
+ __ OBJ(cmp)(RAX, FieldAddress(RDI, RCX, TIMES_COMPRESSED_WORD_SIZE, base));
__ j(NOT_EQUAL, &probe_failed, Assembler::kNearJump);
Label load_target;
@@ -3216,14 +3251,14 @@
// proper target for the given name and arguments descriptor. If the
// illegal class id was found, the target is a cache miss handler that can
// be invoked as a normal Dart function.
- const auto target_address =
- FieldAddress(RDI, RCX, TIMES_8, base + target::kWordSize);
+ const auto target_address = FieldAddress(RDI, RCX, TIMES_COMPRESSED_WORD_SIZE,
+ base + target::kCompressedWordSize);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
__ movq(R10, FieldAddress(
RBX, target::CallSiteData::arguments_descriptor_offset()));
__ jmp(target_address);
} else {
- __ movq(RAX, target_address);
+ __ LoadCompressed(RAX, target_address);
__ movq(R10, FieldAddress(
RBX, target::CallSiteData::arguments_descriptor_offset()));
__ movq(RCX, FieldAddress(RAX, target::Function::entry_point_offset()));
@@ -3234,8 +3269,8 @@
// Probe failed, check if it is a miss.
__ Bind(&probe_failed);
- __ cmpq(FieldAddress(RDI, RCX, TIMES_8, base),
- Immediate(target::ToRawSmi(kIllegalCid)));
+ __ OBJ(cmp)(FieldAddress(RDI, RCX, TIMES_COMPRESSED_WORD_SIZE, base),
+ Immediate(target::ToRawSmi(kIllegalCid)));
Label miss;
__ j(ZERO, &miss, Assembler::kNearJump);
@@ -3266,29 +3301,34 @@
// RAX: receiver cid as Smi
__ Bind(&loop);
- __ movq(R9, Address(R13, 0));
- __ cmpq(RAX, R9);
+ __ OBJ(mov)(R9, Address(R13, 0));
+ __ OBJ(cmp)(RAX, R9);
__ j(EQUAL, &found, Assembler::kNearJump);
ASSERT(target::ToRawSmi(kIllegalCid) == 0);
- __ testq(R9, R9);
+ __ OBJ(test)(R9, R9);
__ j(ZERO, &miss, Assembler::kNearJump);
const intptr_t entry_length =
target::ICData::TestEntryLengthFor(1, /*tracking_exactness=*/false) *
- target::kWordSize;
- __ addq(R13, Immediate(entry_length)); // Next entry.
+ target::kCompressedWordSize;
+ __ OBJ(add)(R13, Immediate(entry_length)); // Next entry.
__ jmp(&loop);
__ Bind(&found);
const intptr_t code_offset =
- target::ICData::CodeIndexFor(1) * target::kWordSize;
+ target::ICData::CodeIndexFor(1) * target::kCompressedWordSize;
+#if defined(DART_COMPRESSED_POINTERS)
+ __ LoadCompressed(CODE_REG, Address(R13, code_offset));
+ __ jmp(FieldAddress(CODE_REG, target::Code::entry_point_offset()));
+#else
const intptr_t entry_offset =
- target::ICData::EntryPointIndexFor(1) * target::kWordSize;
+ target::ICData::EntryPointIndexFor(1) * target::kCompressedWordSize;
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
__ movq(CODE_REG, Address(R13, code_offset));
}
__ jmp(Address(R13, entry_offset));
+#endif
__ Bind(&miss);
__ LoadIsolate(RAX);
@@ -3410,22 +3450,25 @@
// Use load-acquire to test for sentinel, if we found non-sentinel it is safe
// to access the other entries. If we found a sentinel we go to runtime.
- __ LoadAcquire(RDI, RAX,
- TypeArguments::Instantiation::kInstantiatorTypeArgsIndex *
- target::kWordSize);
- __ CompareImmediate(RDI, Smi::RawValue(TypeArguments::kNoInstantiator));
+ __ LoadAcquireCompressed(
+ RDI, RAX,
+ TypeArguments::Instantiation::kInstantiatorTypeArgsIndex *
+ target::kCompressedWordSize);
+ __ CompareImmediate(RDI, Smi::RawValue(TypeArguments::kNoInstantiator),
+ kObjectBytes);
__ j(EQUAL, &call_runtime, compiler::Assembler::kNearJump);
__ cmpq(RDI, InstantiationABI::kInstantiatorTypeArgumentsReg);
__ j(NOT_EQUAL, &next, compiler::Assembler::kNearJump);
- __ movq(R10, compiler::Address(
- RAX, TypeArguments::Instantiation::kFunctionTypeArgsIndex *
- target::kWordSize));
+ __ LoadCompressed(
+ R10, compiler::Address(
+ RAX, TypeArguments::Instantiation::kFunctionTypeArgsIndex *
+ target::kCompressedWordSize));
__ cmpq(R10, InstantiationABI::kFunctionTypeArgumentsReg);
__ j(EQUAL, &found, compiler::Assembler::kNearJump);
__ Bind(&next);
__ addq(RAX, compiler::Immediate(TypeArguments::Instantiation::kSizeInWords *
- target::kWordSize));
+ target::kCompressedWordSize));
__ jmp(&loop);
// Instantiate non-null type arguments.
@@ -3443,10 +3486,11 @@
__ ret();
__ Bind(&found);
- __ movq(InstantiationABI::kResultTypeArgumentsReg,
- compiler::Address(
- RAX, TypeArguments::Instantiation::kInstantiatedTypeArgsIndex *
- target::kWordSize));
+ __ LoadCompressed(
+ InstantiationABI::kResultTypeArgumentsReg,
+ compiler::Address(
+ RAX, TypeArguments::Instantiation::kInstantiatedTypeArgsIndex *
+ target::kCompressedWordSize));
__ ret();
}
diff --git a/runtime/vm/constants_arm.h b/runtime/vm/constants_arm.h
index 620199b..a5f9f94 100644
--- a/runtime/vm/constants_arm.h
+++ b/runtime/vm/constants_arm.h
@@ -738,7 +738,20 @@
kMulRnShift = 12,
kMulRnBits = 4,
- // Div instruction register field encodings.
+ // ldrex/strex register field encodings.
+ kLdrExRnShift = 16,
+ kLdrExRtShift = 12,
+ kStrExRnShift = 16,
+ kStrExRdShift = 12,
+ kStrExRtShift = 0,
+
+ // Media operation field encodings.
+ kMediaOp1Shift = 20,
+ kMediaOp1Bits = 5,
+ kMediaOp2Shift = 5,
+ kMediaOp2Bits = 3,
+
+ // udiv/sdiv instruction register field encodings.
kDivRdShift = 16,
kDivRdBits = 4,
kDivRmShift = 8,
@@ -746,12 +759,13 @@
kDivRnShift = 0,
kDivRnBits = 4,
- // ldrex/strex register field encodings.
- kLdExRnShift = 16,
- kLdExRtShift = 12,
- kStrExRnShift = 16,
- kStrExRdShift = 12,
- kStrExRtShift = 0,
+ // sbfx/ubfx instruction register and immediate field encodings.
+ kBitFieldExtractWidthShift = 16,
+ kBitFieldExtractWidthBits = 5,
+ kBitFieldExtractLSBShift = 7,
+ kBitFieldExtractLSBBits = 5,
+ kBitFieldExtractRnShift = 0,
+ kBitFieldExtractRnBits = 4,
// MRC instruction offset field encoding.
kCRmShift = 0,
@@ -853,6 +867,7 @@
return static_cast<Condition>(Bits(kConditionShift, kConditionBits));
}
inline int TypeField() const { return Bits(kTypeShift, kTypeBits); }
+ inline int SubtypeField() const { return Bit(4); }
inline Register RnField() const {
return static_cast<Register>(Bits(kRnShift, kRnBits));
@@ -936,6 +951,16 @@
return bit_cast<double, uint64_t>(imm64);
}
+ // Shared fields used in media instructions.
+ inline int MediaOp1Field() const {
+ return static_cast<Register>(Bits(kMediaOp1Shift, kMediaOp1Bits));
+ }
+ inline int MediaOp2Field() const {
+ return static_cast<Register>(Bits(kMediaOp2Shift, kMediaOp2Bits));
+ }
+
+ // Fields used in division instructions.
+ inline bool IsDivUnsigned() const { return Bit(21) == 0b1; }
inline Register DivRdField() const {
return static_cast<Register>(Bits(kDivRdShift, kDivRdBits));
}
@@ -946,6 +971,19 @@
return static_cast<Register>(Bits(kDivRnShift, kDivRnBits));
}
+ // Fields used in bit field extract instructions.
+ inline bool IsBitFieldExtractSignExtended() const { return Bit(22) == 0; }
+ inline uint8_t BitFieldExtractWidthField() const {
+ return Bits(kBitFieldExtractWidthShift, kBitFieldExtractWidthBits);
+ }
+ inline uint8_t BitFieldExtractLSBField() const {
+ return Bits(kBitFieldExtractLSBShift, kBitFieldExtractLSBBits);
+ }
+ inline Register BitFieldExtractRnField() const {
+ return static_cast<Register>(
+ Bits(kBitFieldExtractRnShift, kBitFieldExtractRnBits));
+ }
+
// Test for data processing instructions of type 0 or 1.
// See "ARM Architecture Reference Manual ARMv7-A and ARMv7-R edition",
// section A5.1 "ARM instruction set encoding".
@@ -1012,20 +1050,6 @@
return static_cast<QRegister>(bits >> 1);
}
- inline bool IsDivision() const {
- ASSERT(ConditionField() != kSpecialCondition);
- ASSERT(TypeField() == 3);
- return ((Bit(4) == 1) && (Bits(5, 3) == 0) && (Bit(20) == 1) &&
- (Bits(22, 3) == 4));
- }
-
- inline bool IsRbit() const {
- ASSERT(ConditionField() != kSpecialCondition);
- ASSERT(TypeField() == 3);
- return ((Bits(4, 4) == 3) && (Bits(8, 4) == 15) && (Bits(16, 4) == 15) &&
- (Bits(20, 8) == 111));
- }
-
// Test for VFP data processing or single transfer instructions of type 7.
inline bool IsVFPDataProcessingOrSingleTransfer() const {
ASSERT(ConditionField() != kSpecialCondition);
@@ -1068,6 +1092,37 @@
return (Bits(24, 4) == 4) && (Bit(20) == 0);
}
+ // Tests for media instructions of type 3.
+ inline bool IsMedia() const {
+ ASSERT_EQUAL(TypeField(), 3);
+ return SubtypeField() == 1;
+ }
+
+ inline bool IsDivision() const {
+ ASSERT(ConditionField() != kSpecialCondition);
+ ASSERT(IsMedia());
+ // B21 determines whether the division is signed or unsigned.
+ return (((MediaOp1Field() & 0b11101) == 0b10001) &&
+ (MediaOp2Field() == 0b000));
+ }
+
+ inline bool IsRbit() const {
+ ASSERT(ConditionField() != kSpecialCondition);
+ ASSERT(IsMedia());
+ // B19-B16 and B11-B8 are always set for rbit.
+ return ((MediaOp1Field() == 0b01111) && (MediaOp2Field() == 0b001) &&
+ (Bits(8, 4) == 0b1111) && (Bits(16, 4) == 0b1111));
+ }
+
+ inline bool IsBitFieldExtract() const {
+ ASSERT(ConditionField() != kSpecialCondition);
+ ASSERT(IsMedia());
+ // B22 determines whether extracted value is sign extended or not, and
+ // op bits B20 and B7 are part of the width and LSB fields, respectively.
+ return ((MediaOp1Field() & 0b11010) == 0b11010) &&
+ ((MediaOp2Field() & 0b011) == 0b10);
+ }
+
// Special accessors that test for existence of a value.
inline bool HasS() const { return SField() == 1; }
inline bool HasB() const { return BField() == 1; }
diff --git a/runtime/vm/constants_x64.h b/runtime/vm/constants_x64.h
index 546a0f2..7a3c9b7 100644
--- a/runtime/vm/constants_x64.h
+++ b/runtime/vm/constants_x64.h
@@ -347,6 +347,7 @@
#else
TIMES_COMPRESSED_WORD_SIZE = TIMES_HALF_WORD_SIZE,
#endif
+ TIMES_COMPRESSED_HALF_WORD_SIZE = TIMES_COMPRESSED_WORD_SIZE - 1,
};
#define R(reg) (1 << (reg))
diff --git a/runtime/vm/dart_entry.h b/runtime/vm/dart_entry.h
index 5f868bd..57025df 100644
--- a/runtime/vm/dart_entry.h
+++ b/runtime/vm/dart_entry.h
@@ -66,9 +66,13 @@
return Array::element_offset(kFirstNamedEntryIndex);
}
- static intptr_t name_offset() { return kNameOffset * kWordSize; }
- static intptr_t position_offset() { return kPositionOffset * kWordSize; }
- static intptr_t named_entry_size() { return kNamedEntrySize * kWordSize; }
+ static intptr_t name_offset() { return kNameOffset * kCompressedWordSize; }
+ static intptr_t position_offset() {
+ return kPositionOffset * kCompressedWordSize;
+ }
+ static intptr_t named_entry_size() {
+ return kNamedEntrySize * kCompressedWordSize;
+ }
// Constructs an argument descriptor where all arguments are boxed and
// therefore number of parameters equals parameter size.
diff --git a/runtime/vm/deferred_objects.cc b/runtime/vm/deferred_objects.cc
index 8fa9865..78b8631 100644
--- a/runtime/vm/deferred_objects.cc
+++ b/runtime/vm/deferred_objects.cc
@@ -438,7 +438,7 @@
for (intptr_t i = 0; i < field_count_; i++) {
offset ^= GetFieldOffset(i);
- field ^= offset_map.At(offset.Value() / kWordSize);
+ field ^= offset_map.At(offset.Value() / kCompressedWordSize);
value = GetValue(i);
ASSERT((value.ptr() != Object::sentinel().ptr()) ||
(!field.IsNull() && field.is_late()));
diff --git a/runtime/vm/globals.h b/runtime/vm/globals.h
index 3efe11c..d7426c4 100644
--- a/runtime/vm/globals.h
+++ b/runtime/vm/globals.h
@@ -33,10 +33,14 @@
const intptr_t kSmiMax32 = (static_cast<intptr_t>(1) << kSmiBits32) - 1;
const intptr_t kSmiMin32 = -(static_cast<intptr_t>(1) << kSmiBits32);
-#if !defined(DART_COMPRESSED_POINTERS)
-const intptr_t kCompressedWordSize = kWordSize;
+#if defined(DART_COMPRESSED_POINTERS)
+static constexpr int kCompressedWordSize = kInt32Size;
+static constexpr int kCompressedWordSizeLog2 = kInt32SizeLog2;
+typedef uint32_t compressed_uword;
#else
-const intptr_t kCompressedWordSize = sizeof(uint32_t);
+static constexpr int kCompressedWordSize = kWordSize;
+static constexpr int kCompressedWordSizeLog2 = kWordSizeLog2;
+typedef uintptr_t compressed_uword;
#endif
// Number of bytes per BigInt digit.
diff --git a/runtime/vm/heap/marker.cc b/runtime/vm/heap/marker.cc
index f6a984c..8b4e869 100644
--- a/runtime/vm/heap/marker.cc
+++ b/runtime/vm/heap/marker.cc
@@ -50,12 +50,13 @@
WeakPropertyPtr cur_weak = delayed_weak_properties_;
delayed_weak_properties_ = WeakProperty::null();
while (cur_weak != WeakProperty::null()) {
- WeakPropertyPtr next_weak = cur_weak->untag()->next_;
- ObjectPtr raw_key = cur_weak->untag()->key_;
+ WeakPropertyPtr next_weak =
+ cur_weak->untag()->next_.Decompress(cur_weak->heap_base());
+ ObjectPtr raw_key = cur_weak->untag()->key();
// Reset the next pointer in the weak property.
cur_weak->untag()->next_ = WeakProperty::null();
if (raw_key->untag()->IsMarked()) {
- ObjectPtr raw_val = cur_weak->untag()->value_;
+ ObjectPtr raw_val = cur_weak->untag()->value();
marked = marked ||
(raw_val->IsHeapObject() && !raw_val->untag()->IsMarked());
@@ -147,14 +148,17 @@
ASSERT(raw_weak->IsOldObject());
ASSERT(raw_weak->IsWeakProperty());
ASSERT(raw_weak->untag()->IsMarked());
- ASSERT(raw_weak->untag()->next_ == WeakProperty::null());
+ ASSERT(raw_weak->untag()->next_ ==
+ CompressedWeakPropertyPtr(WeakProperty::null()));
raw_weak->untag()->next_ = delayed_weak_properties_;
delayed_weak_properties_ = raw_weak;
}
intptr_t ProcessWeakProperty(WeakPropertyPtr raw_weak, bool did_mark) {
// The fate of the weak property is determined by its key.
- ObjectPtr raw_key = LoadPointerIgnoreRace(&raw_weak->untag()->key_);
+ ObjectPtr raw_key =
+ LoadCompressedPointerIgnoreRace(&raw_weak->untag()->key_)
+ .Decompress(raw_weak->heap_base());
if (raw_key->IsHeapObject() && raw_key->IsOldObject() &&
!raw_key->untag()->IsMarked()) {
// Key was white. Enqueue the weak property.
@@ -202,9 +206,10 @@
delayed_weak_properties_ = WeakProperty::null();
intptr_t weak_properties_cleared = 0;
while (cur_weak != WeakProperty::null()) {
- WeakPropertyPtr next_weak = cur_weak->untag()->next_;
+ WeakPropertyPtr next_weak =
+ cur_weak->untag()->next_.Decompress(cur_weak->heap_base());
cur_weak->untag()->next_ = WeakProperty::null();
- RELEASE_ASSERT(!cur_weak->untag()->key_->untag()->IsMarked());
+ RELEASE_ASSERT(!cur_weak->untag()->key()->untag()->IsMarked());
WeakProperty::Clear(cur_weak);
weak_properties_cleared++;
// Advance to next weak property in the queue.
diff --git a/runtime/vm/heap/pages.cc b/runtime/vm/heap/pages.cc
index 0aa6d1b..dc87ca1 100644
--- a/runtime/vm/heap/pages.cc
+++ b/runtime/vm/heap/pages.cc
@@ -132,16 +132,20 @@
static_cast<ArrayPtr>(UntaggedObject::FromAddr(object_start()));
ASSERT(obj->IsArray());
ASSERT(obj->untag()->IsCardRemembered());
- ObjectPtr* obj_from = obj->untag()->from();
- ObjectPtr* obj_to = obj->untag()->to(Smi::Value(obj->untag()->length_));
+ CompressedObjectPtr* obj_from = obj->untag()->from();
+ CompressedObjectPtr* obj_to =
+ obj->untag()->to(Smi::Value(obj->untag()->length()));
+ uword heap_base = obj.heap_base();
const intptr_t size = card_table_size();
for (intptr_t i = 0; i < size; i++) {
if (card_table_[i] != 0) {
- ObjectPtr* card_from =
- reinterpret_cast<ObjectPtr*>(this) + (i << kSlotsPerCardLog2);
- ObjectPtr* card_to = reinterpret_cast<ObjectPtr*>(card_from) +
- (1 << kSlotsPerCardLog2) - 1;
+ CompressedObjectPtr* card_from =
+ reinterpret_cast<CompressedObjectPtr*>(this) +
+ (i << kSlotsPerCardLog2);
+ CompressedObjectPtr* card_to =
+ reinterpret_cast<CompressedObjectPtr*>(card_from) +
+ (1 << kSlotsPerCardLog2) - 1;
// Minus 1 because to is inclusive.
if (card_from < obj_from) {
@@ -154,10 +158,10 @@
card_to = obj_to;
}
- visitor->VisitPointers(card_from, card_to);
+ visitor->VisitCompressedPointers(heap_base, card_from, card_to);
bool has_new_target = false;
- for (ObjectPtr* slot = card_from; slot <= card_to; slot++) {
+ for (CompressedObjectPtr* slot = card_from; slot <= card_to; slot++) {
if ((*slot)->IsNewObjectMayBeSmi()) {
has_new_target = true;
break;
diff --git a/runtime/vm/heap/pages.h b/runtime/vm/heap/pages.h
index 4795240..8a91944 100644
--- a/runtime/vm/heap/pages.h
+++ b/runtime/vm/heap/pages.h
@@ -123,7 +123,8 @@
// 1 card = 128 slots.
static const intptr_t kSlotsPerCardLog2 = 7;
- static const intptr_t kBytesPerCardLog2 = kWordSizeLog2 + kSlotsPerCardLog2;
+ static const intptr_t kBytesPerCardLog2 =
+ kCompressedWordSizeLog2 + kSlotsPerCardLog2;
intptr_t card_table_size() const {
return memory_->size() >> kBytesPerCardLog2;
diff --git a/runtime/vm/heap/scavenger.cc b/runtime/vm/heap/scavenger.cc
index 3faf889..820eeff 100644
--- a/runtime/vm/heap/scavenger.cc
+++ b/runtime/vm/heap/scavenger.cc
@@ -1273,11 +1273,12 @@
WeakPropertyPtr cur_weak = delayed_weak_properties_;
delayed_weak_properties_ = WeakProperty::null();
while (cur_weak != WeakProperty::null()) {
- WeakPropertyPtr next_weak = cur_weak->untag()->next_;
+ WeakPropertyPtr next_weak =
+ cur_weak->untag()->next_.Decompress(cur_weak->heap_base());
// Promoted weak properties are not enqueued. So we can guarantee that
// we do not need to think about store barriers here.
ASSERT(cur_weak->IsNewObject());
- ObjectPtr raw_key = cur_weak->untag()->key_;
+ ObjectPtr raw_key = cur_weak->untag()->key();
ASSERT(raw_key->IsHeapObject());
// Key still points into from space even if the object has been
// promoted to old space by now. The key will be updated accordingly
@@ -1334,7 +1335,8 @@
uword header = *reinterpret_cast<uword*>(raw_addr);
ASSERT(!IsForwarding(header));
#endif // defined(DEBUG)
- ASSERT(raw_weak->untag()->next_ == WeakProperty::null());
+ ASSERT(raw_weak->untag()->next_ ==
+ CompressedWeakPropertyPtr(WeakProperty::null()));
raw_weak->untag()->next_ = delayed_weak_properties_;
delayed_weak_properties_ = raw_weak;
}
@@ -1345,7 +1347,7 @@
if (UNLIKELY(class_id == kWeakPropertyCid)) {
WeakPropertyPtr raw_weak = static_cast<WeakPropertyPtr>(raw_obj);
// The fate of the weak property is determined by its key.
- ObjectPtr raw_key = raw_weak->untag()->key_;
+ ObjectPtr raw_key = raw_weak->untag()->key();
if (raw_key->IsHeapObject() && raw_key->IsNewObject()) {
uword raw_addr = UntaggedObject::ToAddr(raw_key);
uword header = *reinterpret_cast<uword*>(raw_addr);
@@ -1422,12 +1424,13 @@
WeakPropertyPtr cur_weak = delayed_weak_properties_;
delayed_weak_properties_ = WeakProperty::null();
while (cur_weak != WeakProperty::null()) {
- WeakPropertyPtr next_weak = cur_weak->untag()->next_;
+ WeakPropertyPtr next_weak =
+ cur_weak->untag()->next_.Decompress(cur_weak->heap_base());
// Reset the next pointer in the weak property.
cur_weak->untag()->next_ = WeakProperty::null();
#if defined(DEBUG)
- ObjectPtr raw_key = cur_weak->untag()->key_;
+ ObjectPtr raw_key = cur_weak->untag()->key();
uword raw_addr = UntaggedObject::ToAddr(raw_key);
uword header = *reinterpret_cast<uword*>(raw_addr);
ASSERT(!IsForwarding(header));
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 477963e..ef519e7 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -2994,7 +2994,7 @@
for (intptr_t i = 0; i < fields.Length(); ++i) {
f ^= fields.At(i);
if (f.is_instance()) {
- array.SetAt(f.HostOffset() >> kWordSizeLog2, f);
+ array.SetAt(f.HostOffset() >> kCompressedWordSizeLog2, f);
}
}
cls = cls.SuperClass(original_classes);
@@ -3448,8 +3448,8 @@
// The instance needs a type_arguments field.
host_type_args_field_offset = host_offset;
target_type_args_field_offset = target_offset;
- host_offset += kWordSize;
- target_offset += compiler::target::kWordSize;
+ host_offset += kCompressedWordSize;
+ target_offset += compiler::target::kCompressedWordSize;
}
} else {
ASSERT(target_type_args_field_offset != RTN::Class::kNoTypeArguments);
@@ -3491,13 +3491,14 @@
break;
}
- const intptr_t host_num_words = field_size / kWordSize;
+ const intptr_t host_num_words = field_size / kCompressedWordSize;
const intptr_t host_next_offset = host_offset + field_size;
- const intptr_t host_next_position = host_next_offset / kWordSize;
+ const intptr_t host_next_position =
+ host_next_offset / kCompressedWordSize;
const intptr_t target_next_offset = target_offset + field_size;
const intptr_t target_next_position =
- target_next_offset / compiler::target::kWordSize;
+ target_next_offset / compiler::target::kCompressedWordSize;
// The bitmap has fixed length. Checks if the offset position is smaller
// than its length. If it is not, than the field should be boxed
@@ -3506,8 +3507,8 @@
for (intptr_t j = 0; j < host_num_words; j++) {
// Activate the respective bit in the bitmap, indicating that the
// content is not a pointer
- host_bitmap.Set(host_offset / kWordSize);
- host_offset += kWordSize;
+ host_bitmap.Set(host_offset / kCompressedWordSize);
+ host_offset += kCompressedWordSize;
}
ASSERT(host_offset == host_next_offset);
@@ -3515,12 +3516,12 @@
} else {
// Make the field boxed
field.set_is_unboxing_candidate(false);
- host_offset += kWordSize;
- target_offset += compiler::target::kWordSize;
+ host_offset += kCompressedWordSize;
+ target_offset += compiler::target::kCompressedWordSize;
}
} else {
- host_offset += kWordSize;
- target_offset += compiler::target::kWordSize;
+ host_offset += kCompressedWordSize;
+ target_offset += compiler::target::kCompressedWordSize;
}
}
}
@@ -7854,6 +7855,9 @@
ASSERT(!sig.IsNull());
#endif
const Array& parameter_names = Array::Handle(sig.parameter_names());
+ if (parameter_names.IsNull()) {
+ return false;
+ }
return parameter_names.Length() > NumParameters();
}
@@ -11303,9 +11307,9 @@
const intptr_t type_arguments_offset = cls.host_type_arguments_field_offset();
ASSERT(type_arguments_offset != Class::kNoTypeArguments);
if (StaticTypeExactnessState::CanRepresentAsTriviallyExact(
- type_arguments_offset / kWordSize)) {
+ type_arguments_offset / kCompressedWordSize)) {
return StaticTypeExactnessState::TriviallyExact(type_arguments_offset /
- kWordSize);
+ kCompressedWordSize);
} else {
return StaticTypeExactnessState::NotExact();
}
@@ -16042,7 +16046,7 @@
const intptr_t data_pos = index * TestEntryLength();
NoSafepointScope no_safepoint;
ArrayPtr raw_data = entries();
- return Smi::Value(Smi::RawCast(raw_data->untag()->data()[data_pos]));
+ return Smi::Value(Smi::RawCast(raw_data->untag()->element(data_pos)));
}
FunctionPtr ICData::GetTargetAt(intptr_t index) const {
@@ -16056,7 +16060,7 @@
NoSafepointScope no_safepoint;
ArrayPtr raw_data = entries();
- return static_cast<FunctionPtr>(raw_data->untag()->data()[data_pos]);
+ return static_cast<FunctionPtr>(raw_data->untag()->element(data_pos));
#endif
}
@@ -17948,10 +17952,12 @@
intptr_t capacity = mask() + 1;
for (intptr_t i = 0; i < capacity; ++i) {
const intptr_t target_index = i * kEntryLength + kTargetFunctionIndex;
- ObjectPtr* slot = &Array::DataOf(buckets())[target_index];
- const intptr_t cid = (*slot)->GetClassIdMayBeSmi();
+ CompressedObjectPtr* slot = &Array::DataOf(buckets())[target_index];
+ ObjectPtr decompressed_slot = slot->Decompress(buckets()->heap_base());
+ const intptr_t cid = decompressed_slot->GetClassIdMayBeSmi();
if (cid == kFunctionCid) {
- CodePtr code = Function::CurrentCodeOf(Function::RawCast(*slot));
+ CodePtr code =
+ Function::CurrentCodeOf(Function::RawCast(decompressed_slot));
*slot = Smi::FromAlignedAddress(Code::EntryPointOf(code));
} else {
ASSERT(cid == kSmiCid || cid == kNullCid);
@@ -18826,9 +18832,11 @@
uword this_addr = reinterpret_cast<uword>(this->untag());
uword other_addr = reinterpret_cast<uword>(other.untag());
for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
- offset += kWordSize) {
- if ((*reinterpret_cast<ObjectPtr*>(this_addr + offset)) !=
- (*reinterpret_cast<ObjectPtr*>(other_addr + offset))) {
+ offset += kCompressedWordSize) {
+ if ((reinterpret_cast<CompressedObjectPtr*>(this_addr + offset)
+ ->Decompress(untag()->heap_base())) !=
+ (reinterpret_cast<CompressedObjectPtr*>(other_addr + offset)
+ ->Decompress(untag()->heap_base()))) {
return false;
}
}
@@ -18850,7 +18858,7 @@
NoSafepointScope no_safepoint(thread);
const intptr_t instance_size = SizeFromClass();
ASSERT(instance_size != 0);
- hash = instance_size / kWordSize;
+ hash = instance_size / kCompressedWordSize;
uword this_addr = reinterpret_cast<uword>(this->untag());
Object& obj = Object::Handle(zone);
Instance& instance = Instance::Handle(zone);
@@ -18860,9 +18868,9 @@
GetClassId());
for (intptr_t offset = Instance::NextFieldOffset();
- offset < cls.host_next_field_offset(); offset += kWordSize) {
- if (unboxed_fields_bitmap.Get(offset / kWordSize)) {
- if (kWordSize == 8) {
+ offset < cls.host_next_field_offset(); offset += kCompressedWordSize) {
+ if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
+ if (kCompressedWordSize == 8) {
hash = CombineHashes(hash,
*reinterpret_cast<uint32_t*>(this_addr + offset));
hash = CombineHashes(
@@ -18872,7 +18880,8 @@
*reinterpret_cast<uint32_t*>(this_addr + offset));
}
} else {
- obj = *reinterpret_cast<ObjectPtr*>(this_addr + offset);
+ obj = reinterpret_cast<CompressedObjectPtr*>(this_addr + offset)
+ ->Decompress(untag()->heap_base());
if (obj.IsSentinel()) {
hash = CombineHashes(hash, 11);
} else {
@@ -18928,16 +18937,16 @@
thread->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(
class_id);
for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
- offset += kWordSize) {
- if (unboxed_fields_bitmap.Get(offset / kWordSize)) {
+ offset += kCompressedWordSize) {
+ if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
continue;
}
- obj ^= *this->FieldAddrAtOffset(offset);
+ obj ^= this->FieldAddrAtOffset(offset)->Decompress(untag()->heap_base());
obj = obj.CanonicalizeLocked(thread);
this->SetFieldAtOffset(offset, obj);
}
} else {
-#if defined(DEBUG)
+#if defined(DEBUG) && !defined(DART_COMPRESSED_POINTERS)
// Make sure that we are not missing any fields.
IsolateGroup* group = IsolateGroup::Current();
CheckForPointers has_pointers(group);
@@ -19014,7 +19023,7 @@
}
}
} else {
- return *FieldAddr(field);
+ return FieldAddr(field)->Decompress(untag()->heap_base());
}
}
@@ -19045,7 +19054,7 @@
} else {
field.RecordStore(value);
const Object* stored_value = field.CloneForUnboxed(value);
- StorePointer(FieldAddr(field), stored_value->ptr());
+ StoreCompressedPointer(FieldAddr(field), stored_value->ptr());
}
}
@@ -19093,7 +19102,8 @@
intptr_t field_offset = cls.host_type_arguments_field_offset();
ASSERT(field_offset != Class::kNoTypeArguments);
TypeArguments& type_arguments = TypeArguments::Handle();
- type_arguments ^= *FieldAddrAtOffset(field_offset);
+ type_arguments ^=
+ FieldAddrAtOffset(field_offset)->Decompress(untag()->heap_base());
return type_arguments.ptr();
}
@@ -19381,7 +19391,8 @@
intptr_t* Instance::NativeFieldsDataAddr() const {
ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
- TypedDataPtr native_fields = static_cast<TypedDataPtr>(*NativeFieldsAddr());
+ TypedDataPtr native_fields = static_cast<TypedDataPtr>(
+ NativeFieldsAddr()->Decompress(untag()->heap_base()));
if (native_fields == TypedData::null()) {
return NULL;
}
@@ -19390,11 +19401,12 @@
void Instance::SetNativeField(int index, intptr_t value) const {
ASSERT(IsValidNativeIndex(index));
- Object& native_fields = Object::Handle(*NativeFieldsAddr());
+ Object& native_fields =
+ Object::Handle(NativeFieldsAddr()->Decompress(untag()->heap_base()));
if (native_fields.IsNull()) {
// Allocate backing storage for the native fields.
native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
- StorePointer(NativeFieldsAddr(), native_fields.ptr());
+ StoreCompressedPointer(NativeFieldsAddr(), native_fields.ptr());
}
intptr_t byte_offset = index * sizeof(intptr_t);
TypedData::Cast(native_fields).SetIntPtr(byte_offset, value);
@@ -19404,11 +19416,12 @@
const intptr_t* field_values) const {
ASSERT(num_native_fields == NumNativeFields());
ASSERT(field_values != NULL);
- Object& native_fields = Object::Handle(*NativeFieldsAddr());
+ Object& native_fields =
+ Object::Handle(NativeFieldsAddr()->Decompress(untag()->heap_base()));
if (native_fields.IsNull()) {
// Allocate backing storage for the native fields.
native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
- StorePointer(NativeFieldsAddr(), native_fields.ptr());
+ StoreCompressedPointer(NativeFieldsAddr(), native_fields.ptr());
}
for (uint16_t i = 0; i < num_native_fields; i++) {
intptr_t byte_offset = i * sizeof(intptr_t);
@@ -19465,7 +19478,8 @@
REUSABLE_CLASS_HANDLESCOPE(thread);
Class& cls = thread->ClassHandle();
cls = clazz();
- return (offset >= 0 && offset <= (cls.host_instance_size() - kWordSize));
+ return (offset >= 0 &&
+ offset <= (cls.host_instance_size() - kCompressedWordSize));
}
intptr_t Instance::ElementSizeFor(intptr_t cid) {
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index e5a26b3..786fcb6 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -981,42 +981,44 @@
bool HasCompressedPointers() const;
intptr_t host_instance_size() const {
ASSERT(is_finalized() || is_prefinalized());
- return (untag()->host_instance_size_in_words_ * kWordSize);
+ return (untag()->host_instance_size_in_words_ * kCompressedWordSize);
}
intptr_t target_instance_size() const {
ASSERT(is_finalized() || is_prefinalized());
#if defined(DART_PRECOMPILER)
return (untag()->target_instance_size_in_words_ *
- compiler::target::kWordSize);
+ compiler::target::kCompressedWordSize);
#else
return host_instance_size();
#endif // defined(DART_PRECOMPILER)
}
static intptr_t host_instance_size(ClassPtr clazz) {
- return (clazz->untag()->host_instance_size_in_words_ * kWordSize);
+ return (clazz->untag()->host_instance_size_in_words_ * kCompressedWordSize);
}
static intptr_t target_instance_size(ClassPtr clazz) {
#if defined(DART_PRECOMPILER)
return (clazz->untag()->target_instance_size_in_words_ *
- compiler::target::kWordSize);
+ compiler::target::kCompressedWordSize);
#else
return host_instance_size(clazz);
#endif // defined(DART_PRECOMPILER)
}
void set_instance_size(intptr_t host_value_in_bytes,
intptr_t target_value_in_bytes) const {
- ASSERT(kWordSize != 0);
+ ASSERT(kCompressedWordSize != 0);
set_instance_size_in_words(
- host_value_in_bytes / kWordSize,
- target_value_in_bytes / compiler::target::kWordSize);
+ host_value_in_bytes / kCompressedWordSize,
+ target_value_in_bytes / compiler::target::kCompressedWordSize);
}
void set_instance_size_in_words(intptr_t host_value,
intptr_t target_value) const {
- ASSERT(Utils::IsAligned((host_value * kWordSize), kObjectAlignment));
+ ASSERT(
+ Utils::IsAligned((host_value * kCompressedWordSize), kObjectAlignment));
StoreNonPointer(&untag()->host_instance_size_in_words_, host_value);
#if defined(DART_PRECOMPILER)
- ASSERT(Utils::IsAligned((target_value * compiler::target::kWordSize),
- compiler::target::kObjectAlignment));
+ ASSERT(
+ Utils::IsAligned((target_value * compiler::target::kCompressedWordSize),
+ compiler::target::kObjectAlignment));
StoreNonPointer(&untag()->target_instance_size_in_words_, target_value);
#else
// Could be different only during cross-compilation.
@@ -1025,12 +1027,12 @@
}
intptr_t host_next_field_offset() const {
- return untag()->host_next_field_offset_in_words_ * kWordSize;
+ return untag()->host_next_field_offset_in_words_ * kCompressedWordSize;
}
intptr_t target_next_field_offset() const {
#if defined(DART_PRECOMPILER)
return untag()->target_next_field_offset_in_words_ *
- compiler::target::kWordSize;
+ compiler::target::kCompressedWordSize;
#else
return host_next_field_offset();
#endif // defined(DART_PRECOMPILER)
@@ -1038,25 +1040,25 @@
void set_next_field_offset(intptr_t host_value_in_bytes,
intptr_t target_value_in_bytes) const {
set_next_field_offset_in_words(
- host_value_in_bytes / kWordSize,
- target_value_in_bytes / compiler::target::kWordSize);
+ host_value_in_bytes / kCompressedWordSize,
+ target_value_in_bytes / compiler::target::kCompressedWordSize);
}
void set_next_field_offset_in_words(intptr_t host_value,
intptr_t target_value) const {
- ASSERT((host_value == -1) ||
- (Utils::IsAligned((host_value * kWordSize), kObjectAlignment) &&
- (host_value == untag()->host_instance_size_in_words_)) ||
- (!Utils::IsAligned((host_value * kWordSize), kObjectAlignment) &&
- ((host_value + 1) == untag()->host_instance_size_in_words_)));
+ // Assert that the next field offset is either negative (ie, this object
+ // can't be extended by dart code), or rounds up to the kObjectAligned
+ // instance size.
+ ASSERT((host_value < 0) ||
+ ((host_value <= untag()->host_instance_size_in_words_) &&
+ (host_value + (kObjectAlignment / kCompressedWordSize) >
+ untag()->host_instance_size_in_words_)));
StoreNonPointer(&untag()->host_next_field_offset_in_words_, host_value);
#if defined(DART_PRECOMPILER)
- ASSERT((target_value == -1) ||
- (Utils::IsAligned((target_value * compiler::target::kWordSize),
- compiler::target::kObjectAlignment) &&
- (target_value == untag()->target_instance_size_in_words_)) ||
- (!Utils::IsAligned((target_value * compiler::target::kWordSize),
- compiler::target::kObjectAlignment) &&
- ((target_value + 1) == untag()->target_instance_size_in_words_)));
+ ASSERT((target_value < 0) ||
+ ((target_value <= untag()->target_instance_size_in_words_) &&
+ (target_value + (compiler::target::kObjectAlignment /
+ compiler::target::kCompressedWordSize) >
+ untag()->target_instance_size_in_words_)));
StoreNonPointer(&untag()->target_next_field_offset_in_words_, target_value);
#else
// Could be different only during cross-compilation.
@@ -1183,7 +1185,8 @@
kNoTypeArguments) {
return kNoTypeArguments;
}
- return untag()->host_type_arguments_field_offset_in_words_ * kWordSize;
+ return untag()->host_type_arguments_field_offset_in_words_ *
+ kCompressedWordSize;
}
intptr_t target_type_arguments_field_offset() const {
#if defined(DART_PRECOMPILER)
@@ -1193,7 +1196,7 @@
return compiler::target::Class::kNoTypeArguments;
}
return untag()->target_type_arguments_field_offset_in_words_ *
- compiler::target::kWordSize;
+ compiler::target::kCompressedWordSize;
#else
return host_type_arguments_field_offset();
#endif // defined(DART_PRECOMPILER)
@@ -1208,9 +1211,10 @@
host_value = kNoTypeArguments;
target_value = RTN::Class::kNoTypeArguments;
} else {
- ASSERT(kWordSize != 0 && compiler::target::kWordSize);
- host_value = host_value_in_bytes / kWordSize;
- target_value = target_value_in_bytes / compiler::target::kWordSize;
+ ASSERT(kCompressedWordSize != 0 && compiler::target::kCompressedWordSize);
+ host_value = host_value_in_bytes / kCompressedWordSize;
+ target_value =
+ target_value_in_bytes / compiler::target::kCompressedWordSize;
}
set_type_arguments_field_offset_in_words(host_value, target_value);
}
@@ -7301,7 +7305,8 @@
const Class& cls = Class::Handle(clazz());
ASSERT(cls.is_finalized() || cls.is_prefinalized());
#endif
- return (clazz()->untag()->host_instance_size_in_words_ * kWordSize);
+ return (clazz()->untag()->host_instance_size_in_words_ *
+ kCompressedWordSize);
}
InstancePtr Canonicalize(Thread* thread) const;
@@ -7448,31 +7453,33 @@
const TypeArguments& other_instantiator_type_arguments,
const TypeArguments& other_function_type_arguments);
- ObjectPtr* FieldAddrAtOffset(intptr_t offset) const {
+ CompressedObjectPtr* FieldAddrAtOffset(intptr_t offset) const {
ASSERT(IsValidFieldOffset(offset));
- return reinterpret_cast<ObjectPtr*>(raw_value() - kHeapObjectTag + offset);
+ return reinterpret_cast<CompressedObjectPtr*>(raw_value() - kHeapObjectTag +
+ offset);
}
- ObjectPtr* FieldAddr(const Field& field) const {
+ CompressedObjectPtr* FieldAddr(const Field& field) const {
return FieldAddrAtOffset(field.HostOffset());
}
- ObjectPtr* NativeFieldsAddr() const {
+ CompressedObjectPtr* NativeFieldsAddr() const {
return FieldAddrAtOffset(sizeof(UntaggedObject));
}
void SetFieldAtOffset(intptr_t offset, const Object& value) const {
- StorePointer(FieldAddrAtOffset(offset), value.ptr());
+ StoreCompressedPointer(FieldAddrAtOffset(offset), value.ptr());
}
bool IsValidFieldOffset(intptr_t offset) const;
// The following raw methods are used for morphing.
// They are needed due to the extraction of the class in IsValidFieldOffset.
- ObjectPtr* RawFieldAddrAtOffset(intptr_t offset) const {
- return reinterpret_cast<ObjectPtr*>(raw_value() - kHeapObjectTag + offset);
+ CompressedObjectPtr* RawFieldAddrAtOffset(intptr_t offset) const {
+ return reinterpret_cast<CompressedObjectPtr*>(raw_value() - kHeapObjectTag +
+ offset);
}
ObjectPtr RawGetFieldAtOffset(intptr_t offset) const {
- return *RawFieldAddrAtOffset(offset);
+ return RawFieldAddrAtOffset(offset)->Decompress(untag()->heap_base());
}
void RawSetFieldAtOffset(intptr_t offset, const Object& value) const {
- StorePointer(RawFieldAddrAtOffset(offset), value.ptr());
+ StoreCompressedPointer(RawFieldAddrAtOffset(offset), value.ptr());
}
static InstancePtr NewFromCidAndSize(SharedClassTable* shared_class_table,
@@ -9968,10 +9975,11 @@
return OFFSET_OF_RETURNED_VALUE(UntaggedArray, data);
}
static intptr_t element_offset(intptr_t index) {
- return OFFSET_OF_RETURNED_VALUE(UntaggedArray, data) + kWordSize * index;
+ return OFFSET_OF_RETURNED_VALUE(UntaggedArray, data) +
+ kBytesPerElement * index;
}
static intptr_t index_at_offset(intptr_t offset_in_bytes) {
- intptr_t index = (offset_in_bytes - data_offset()) / kWordSize;
+ intptr_t index = (offset_in_bytes - data_offset()) / kBytesPerElement;
ASSERT(index >= 0);
return index;
}
@@ -9979,21 +9987,24 @@
struct ArrayTraits {
static intptr_t elements_start_offset() { return Array::data_offset(); }
- static constexpr intptr_t kElementSize = kWordSize;
+ static constexpr intptr_t kElementSize = kCompressedWordSize;
};
static bool Equals(ArrayPtr a, ArrayPtr b) {
if (a == b) return true;
if (a->IsRawNull() || b->IsRawNull()) return false;
if (a->untag()->length() != b->untag()->length()) return false;
- if (a->untag()->type_arguments() != b->untag()->type_arguments())
+ if (a->untag()->type_arguments() != b->untag()->type_arguments()) {
return false;
+ }
const intptr_t length = LengthOf(a);
- return memcmp(a->untag()->data(), b->untag()->data(), kWordSize * length) ==
- 0;
+ return memcmp(a->untag()->data(), b->untag()->data(),
+ kBytesPerElement * length) == 0;
}
- static ObjectPtr* DataOf(ArrayPtr array) { return array->untag()->data(); }
+ static CompressedObjectPtr* DataOf(ArrayPtr array) {
+ return array->untag()->data();
+ }
template <std::memory_order order = std::memory_order_relaxed>
ObjectPtr At(intptr_t index) const {
@@ -10036,7 +10047,7 @@
virtual bool CanonicalizeEquals(const Instance& other) const;
virtual uint32_t CanonicalizeHash() const;
- static const intptr_t kBytesPerElement = kWordSize;
+ static const intptr_t kBytesPerElement = ArrayTraits::kElementSize;
static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
static const intptr_t kMaxNewSpaceElements =
(Heap::kNewAllocatableSize - sizeof(UntaggedArray)) / kBytesPerElement;
@@ -10058,7 +10069,7 @@
static intptr_t InstanceSize(intptr_t len) {
// Ensure that variable length data is not adding to the object length.
ASSERT(sizeof(UntaggedArray) ==
- (sizeof(UntaggedInstance) + (2 * kWordSize)));
+ (sizeof(UntaggedInstance) + (2 * kBytesPerElement)));
ASSERT(IsValidLength(len));
return RoundedAllocationSize(sizeof(UntaggedArray) +
(len * kBytesPerElement));
@@ -10107,7 +10118,7 @@
Heap::Space space = Heap::kNew);
private:
- ObjectPtr const* ObjectAddr(intptr_t index) const {
+ CompressedObjectPtr const* ObjectAddr(intptr_t index) const {
// TODO(iposva): Determine if we should throw an exception here.
ASSERT((index >= 0) && (index < Length()));
return &untag()->data()[index];
@@ -10118,22 +10129,26 @@
untag()->set_length<std::memory_order_release>(Smi::New(value));
}
- template <typename type, std::memory_order order = std::memory_order_relaxed>
- void StoreArrayPointer(type const* addr, type value) const {
- ptr()->untag()->StoreArrayPointer<type, order>(addr, value);
+ template <typename type,
+ std::memory_order order = std::memory_order_relaxed,
+ typename value_type>
+ void StoreArrayPointer(type const* addr, value_type value) const {
+ ptr()->untag()->StoreArrayPointer<type, order, value_type>(addr, value);
}
// Store a range of pointers [from, from + count) into [to, to + count).
// TODO(koda): Use this to fix Object::Clone's broken store buffer logic.
- void StoreArrayPointers(ObjectPtr const* to,
- ObjectPtr const* from,
+ void StoreArrayPointers(CompressedObjectPtr const* to,
+ CompressedObjectPtr const* from,
intptr_t count) {
ASSERT(Contains(reinterpret_cast<uword>(to)));
if (ptr()->IsNewObject()) {
- memmove(const_cast<ObjectPtr*>(to), from, count * kWordSize);
+ memmove(const_cast<CompressedObjectPtr*>(to), from,
+ count * kBytesPerElement);
} else {
+ const uword heap_base = ptr()->heap_base();
for (intptr_t i = 0; i < count; ++i) {
- StoreArrayPointer(&to[i], from[i]);
+ StoreArrayPointer(&to[i], from[i].Decompress(heap_base));
}
}
}
@@ -11507,7 +11522,8 @@
}
static void Clear(WeakPropertyPtr raw_weak) {
- ASSERT(raw_weak->untag()->next_ == WeakProperty::null());
+ ASSERT(raw_weak->untag()->next_ ==
+ CompressedWeakPropertyPtr(WeakProperty::null()));
// This action is performed by the GC. No barrier.
raw_weak->untag()->key_ = Object::null();
raw_weak->untag()->value_ = Object::null();
@@ -11651,13 +11667,13 @@
intptr_t Field::HostOffset() const {
ASSERT(is_instance()); // Valid only for dart instance fields.
- return (Smi::Value(untag()->host_offset_or_field_id()) * kWordSize);
+ return (Smi::Value(untag()->host_offset_or_field_id()) * kCompressedWordSize);
}
intptr_t Field::TargetOffset() const {
ASSERT(is_instance()); // Valid only for dart instance fields.
#if !defined(DART_PRECOMPILED_RUNTIME)
- return (untag()->target_offset_ * compiler::target::kWordSize);
+ return (untag()->target_offset_ * compiler::target::kCompressedWordSize);
#else
return HostOffset();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
@@ -11674,13 +11690,14 @@
void Field::SetOffset(intptr_t host_offset_in_bytes,
intptr_t target_offset_in_bytes) const {
ASSERT(is_instance()); // Valid only for dart instance fields.
- ASSERT(kWordSize != 0);
+ ASSERT(kCompressedWordSize != 0);
untag()->set_host_offset_or_field_id(
- Smi::New(host_offset_in_bytes / kWordSize));
+ Smi::New(host_offset_in_bytes / kCompressedWordSize));
#if !defined(DART_PRECOMPILED_RUNTIME)
- ASSERT(compiler::target::kWordSize != 0);
- StoreNonPointer(&untag()->target_offset_,
- target_offset_in_bytes / compiler::target::kWordSize);
+ ASSERT(compiler::target::kCompressedWordSize != 0);
+ StoreNonPointer(
+ &untag()->target_offset_,
+ target_offset_in_bytes / compiler::target::kCompressedWordSize);
#else
ASSERT(host_offset_in_bytes == target_offset_in_bytes);
#endif // !defined(DART_PRECOMPILED_RUNTIME)
@@ -11713,7 +11730,8 @@
intptr_t Instance::GetNativeField(int index) const {
ASSERT(IsValidNativeIndex(index));
NoSafepointScope no_safepoint;
- TypedDataPtr native_fields = static_cast<TypedDataPtr>(*NativeFieldsAddr());
+ TypedDataPtr native_fields = static_cast<TypedDataPtr>(
+ NativeFieldsAddr()->Decompress(untag()->heap_base()));
if (native_fields == TypedData::null()) {
return 0;
}
@@ -11725,7 +11743,8 @@
NoSafepointScope no_safepoint;
ASSERT(num_fields == NumNativeFields());
ASSERT(field_values != NULL);
- TypedDataPtr native_fields = static_cast<TypedDataPtr>(*NativeFieldsAddr());
+ TypedDataPtr native_fields = static_cast<TypedDataPtr>(
+ NativeFieldsAddr()->Decompress(untag()->heap_base()));
if (native_fields == TypedData::null()) {
for (intptr_t i = 0; i < num_fields; i++) {
field_values[i] = 0;
diff --git a/runtime/vm/object_graph.cc b/runtime/vm/object_graph.cc
index 99b66cd..db7a3f1 100644
--- a/runtime/vm/object_graph.cc
+++ b/runtime/vm/object_graph.cc
@@ -938,15 +938,15 @@
} else if (cid == kArrayCid || cid == kImmutableArrayCid) {
writer_->WriteUnsigned(kLengthData);
writer_->WriteUnsigned(
- Smi::Value(static_cast<ArrayPtr>(obj)->untag()->length_));
+ Smi::Value(static_cast<ArrayPtr>(obj)->untag()->length()));
} else if (cid == kGrowableObjectArrayCid) {
writer_->WriteUnsigned(kLengthData);
writer_->WriteUnsigned(Smi::Value(
- static_cast<GrowableObjectArrayPtr>(obj)->untag()->length_));
+ static_cast<GrowableObjectArrayPtr>(obj)->untag()->length()));
} else if (cid == kLinkedHashMapCid) {
writer_->WriteUnsigned(kLengthData);
writer_->WriteUnsigned(
- Smi::Value(static_cast<LinkedHashMapPtr>(obj)->untag()->used_data_));
+ Smi::Value(static_cast<LinkedHashMapPtr>(obj)->untag()->used_data()));
} else if (cid == kObjectPoolCid) {
writer_->WriteUnsigned(kLengthData);
writer_->WriteUnsigned(static_cast<ObjectPoolPtr>(obj)->untag()->length_);
diff --git a/runtime/vm/object_test.cc b/runtime/vm/object_test.cc
index a920d42..0ee8b47 100644
--- a/runtime/vm/object_test.cc
+++ b/runtime/vm/object_test.cc
@@ -2178,10 +2178,15 @@
EXPECT_EQ(2, new_array.Length());
addr += used_size;
obj = UntaggedObject::FromAddr(addr);
+#if defined(DART_COMPRESSED_POINTERS)
+ // In compressed pointer mode, the TypedData doesn't fit.
+ EXPECT(obj.IsInstance());
+#else
EXPECT(obj.IsTypedData());
left_over_array ^= obj.ptr();
EXPECT_EQ(4 * kWordSize - TypedData::InstanceSize(0),
left_over_array.Length());
+#endif
// 2. Should produce an array of length 3 and a left over int8 array or
// instance.
@@ -2201,10 +2206,10 @@
EXPECT_EQ(3, new_array.Length());
addr += used_size;
obj = UntaggedObject::FromAddr(addr);
- if (TypedData::InstanceSize(0) <= 2 * kWordSize) {
+ if (TypedData::InstanceSize(0) <= 2 * kCompressedWordSize) {
EXPECT(obj.IsTypedData());
left_over_array ^= obj.ptr();
- EXPECT_EQ(2 * kWordSize - TypedData::InstanceSize(0),
+ EXPECT_EQ(2 * kCompressedWordSize - TypedData::InstanceSize(0),
left_over_array.Length());
} else {
EXPECT(obj.IsInstance());
@@ -2227,10 +2232,15 @@
EXPECT_EQ(1, new_array.Length());
addr += used_size;
obj = UntaggedObject::FromAddr(addr);
+#if defined(DART_COMPRESSED_POINTERS)
+ // In compressed pointer mode, the TypedData doesn't fit.
+ EXPECT(obj.IsInstance());
+#else
EXPECT(obj.IsTypedData());
left_over_array ^= obj.ptr();
EXPECT_EQ(8 * kWordSize - TypedData::InstanceSize(0),
left_over_array.Length());
+#endif
// 4. Verify that GC can handle the filler object for a large array.
array = GrowableObjectArray::New((1 * MB) >> kWordSizeLog2);
diff --git a/runtime/vm/raw_object.cc b/runtime/vm/raw_object.cc
index 5340a96..a370775 100644
--- a/runtime/vm/raw_object.cc
+++ b/runtime/vm/raw_object.cc
@@ -271,7 +271,7 @@
do {
OS::Sleep(1);
const ArrayPtr raw_array = static_cast<const ArrayPtr>(this);
- intptr_t array_length = Smi::Value(raw_array->untag()->length_);
+ intptr_t array_length = Smi::Value(raw_array->untag()->length());
instance_size = Array::InstanceSize(array_length);
} while ((instance_size > tags_size) && (--retries_remaining > 0));
}
@@ -547,7 +547,7 @@
COMPRESSED_VISITOR(TypeRef)
COMPRESSED_VISITOR(TypeParameter)
COMPRESSED_VISITOR(Function)
-REGULAR_VISITOR(Closure)
+COMPRESSED_VISITOR(Closure)
COMPRESSED_VISITOR(LibraryPrefix)
REGULAR_VISITOR(SingleTargetCache)
REGULAR_VISITOR(UnlinkedCall)
@@ -560,14 +560,14 @@
COMPRESSED_VISITOR(UnwindError)
COMPRESSED_VISITOR(ExternalOneByteString)
COMPRESSED_VISITOR(ExternalTwoByteString)
-REGULAR_VISITOR(GrowableObjectArray)
-REGULAR_VISITOR(LinkedHashMap)
+COMPRESSED_VISITOR(GrowableObjectArray)
+COMPRESSED_VISITOR(LinkedHashMap)
COMPRESSED_VISITOR(ExternalTypedData)
TYPED_DATA_VIEW_VISITOR(TypedDataView)
COMPRESSED_VISITOR(ReceivePort)
COMPRESSED_VISITOR(StackTrace)
COMPRESSED_VISITOR(RegExp)
-REGULAR_VISITOR(WeakProperty)
+COMPRESSED_VISITOR(WeakProperty)
COMPRESSED_VISITOR(MirrorReference)
COMPRESSED_VISITOR(UserTag)
REGULAR_VISITOR(SubtypeTestCache)
@@ -578,7 +578,7 @@
VARIABLE_COMPRESSED_VISITOR(LocalVarDescriptors, raw_obj->untag()->num_entries_)
VARIABLE_COMPRESSED_VISITOR(ExceptionHandlers, raw_obj->untag()->num_entries_)
VARIABLE_VISITOR(Context, raw_obj->untag()->num_variables_)
-VARIABLE_VISITOR(Array, Smi::Value(raw_obj->untag()->length()))
+VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag()->length()))
VARIABLE_COMPRESSED_VISITOR(
TypedData,
TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *
@@ -713,9 +713,10 @@
// Calculate the first and last raw object pointer fields.
uword obj_addr = UntaggedObject::ToAddr(raw_obj);
uword from = obj_addr + sizeof(UntaggedObject);
- uword to = obj_addr + instance_size - kWordSize;
- visitor->VisitPointers(reinterpret_cast<ObjectPtr*>(from),
- reinterpret_cast<ObjectPtr*>(to));
+ uword to = obj_addr + instance_size - kCompressedWordSize;
+ visitor->VisitCompressedPointers(raw_obj->heap_base(),
+ reinterpret_cast<CompressedObjectPtr*>(from),
+ reinterpret_cast<CompressedObjectPtr*>(to));
return instance_size;
}
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index f928b2b..0738f31 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -509,27 +509,27 @@
intptr_t instance_size = HeapSize();
uword obj_addr = ToAddr(this);
uword from = obj_addr + sizeof(UntaggedObject);
- uword to = obj_addr + instance_size - kWordSize;
- const auto first = reinterpret_cast<ObjectPtr*>(from);
- const auto last = reinterpret_cast<ObjectPtr*>(to);
+ uword to = obj_addr + instance_size - kCompressedWordSize;
+ const auto first = reinterpret_cast<CompressedObjectPtr*>(from);
+ const auto last = reinterpret_cast<CompressedObjectPtr*>(to);
#if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
const auto unboxed_fields_bitmap =
visitor->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
if (!unboxed_fields_bitmap.IsEmpty()) {
- intptr_t bit = sizeof(UntaggedObject) / kWordSize;
- for (ObjectPtr* current = first; current <= last; current++) {
+ intptr_t bit = sizeof(UntaggedObject) / kCompressedWordSize;
+ for (CompressedObjectPtr* current = first; current <= last; current++) {
if (!unboxed_fields_bitmap.Get(bit++)) {
- visitor->VisitPointer(current);
+ visitor->VisitCompressedPointers(heap_base(), current, current);
}
}
} else {
- visitor->VisitPointers(first, last);
+ visitor->VisitCompressedPointers(heap_base(), first, last);
}
#else
// Call visitor function virtually
- visitor->VisitPointers(first, last);
+ visitor->VisitCompressedPointers(heap_base(), first, last);
#endif // defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
return instance_size;
@@ -547,27 +547,27 @@
intptr_t instance_size = HeapSize();
uword obj_addr = ToAddr(this);
uword from = obj_addr + sizeof(UntaggedObject);
- uword to = obj_addr + instance_size - kWordSize;
- const auto first = reinterpret_cast<ObjectPtr*>(from);
- const auto last = reinterpret_cast<ObjectPtr*>(to);
+ uword to = obj_addr + instance_size - kCompressedWordSize;
+ const auto first = reinterpret_cast<CompressedObjectPtr*>(from);
+ const auto last = reinterpret_cast<CompressedObjectPtr*>(to);
#if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
const auto unboxed_fields_bitmap =
visitor->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
if (!unboxed_fields_bitmap.IsEmpty()) {
- intptr_t bit = sizeof(UntaggedObject) / kWordSize;
- for (ObjectPtr* current = first; current <= last; current++) {
+ intptr_t bit = sizeof(UntaggedObject) / kCompressedWordSize;
+ for (CompressedObjectPtr* current = first; current <= last; current++) {
if (!unboxed_fields_bitmap.Get(bit++)) {
- visitor->V::VisitPointers(current, current);
+ visitor->V::VisitCompressedPointers(heap_base(), current, current);
}
}
} else {
- visitor->V::VisitPointers(first, last);
+ visitor->V::VisitCompressedPointers(heap_base(), first, last);
}
#else
// Call visitor function non-virtually
- visitor->V::VisitPointers(first, last);
+ visitor->V::VisitCompressedPointers(heap_base(), first, last);
#endif // defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
return instance_size;
@@ -681,17 +681,20 @@
}
}
- template <typename type, std::memory_order order = std::memory_order_relaxed>
- void StoreArrayPointer(type const* addr, type value) {
+ // Note: StoreArrayPointer won't work if value_type is a compressed pointer.
+ template <typename type,
+ std::memory_order order = std::memory_order_relaxed,
+ typename value_type = type>
+ void StoreArrayPointer(type const* addr, value_type value) {
reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
- ->store(value, order);
+ ->store(type(value), order);
if (value->IsHeapObject()) {
CheckArrayPointerStore(addr, value, Thread::Current());
}
}
- template <typename type>
- void StoreArrayPointer(type const* addr, type value, Thread* thread) {
+ template <typename type, typename value_type = type>
+ void StoreArrayPointer(type const* addr, value_type value, Thread* thread) {
*const_cast<type*>(addr) = value;
if (value->IsHeapObject()) {
CheckArrayPointerStore(addr, value, thread);
@@ -735,11 +738,11 @@
// Use for storing into an explicitly Smi-typed field of an object
// (i.e., both the previous and new value are Smis).
- template <std::memory_order order = std::memory_order_relaxed>
- void StoreSmi(SmiPtr const* addr, SmiPtr value) {
+ template <typename type, std::memory_order order = std::memory_order_relaxed>
+ void StoreSmi(type const* addr, type value) {
// Can't use Contains, as array length is initialized through this method.
ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(this));
- reinterpret_cast<std::atomic<SmiPtr>*>(const_cast<SmiPtr*>(addr))
+ reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
->store(value, order);
}
template <std::memory_order order = std::memory_order_relaxed>
@@ -780,9 +783,9 @@
}
}
- template <typename type>
+ template <typename type, typename value_type>
DART_FORCE_INLINE void CheckArrayPointerStore(type const* addr,
- ObjectPtr value,
+ value_type value,
Thread* thread) {
uword source_tags = this->tags_;
uword target_tags = value->untag()->tags_;
@@ -921,6 +924,22 @@
protected: \
type name##_;
+#define COMPRESSED_ARRAY_POINTER_FIELD(type, name) \
+ public: \
+ template <std::memory_order order = std::memory_order_relaxed> \
+ type name() const { \
+ return LoadPointer<Compressed##type, order>(&name##_).Decompress( \
+ heap_base()); \
+ } \
+ template <std::memory_order order = std::memory_order_relaxed> \
+ void set_##name(type value) { \
+ StoreArrayPointer<Compressed##type, order>(&name##_, \
+ Compressed##type(value)); \
+ } \
+ \
+ protected: \
+ Compressed##type name##_;
+
#define VARIABLE_POINTER_FIELDS(type, accessor_name, array_name) \
public: \
template <std::memory_order order = std::memory_order_relaxed> \
@@ -970,7 +989,7 @@
template <std::memory_order order = std::memory_order_relaxed> \
void set_##name(type value) { \
ASSERT(!value.IsHeapObject()); \
- StoreSmi<order>(&name##_, value); \
+ StoreSmi<type, order>(&name##_, value); \
} \
\
protected: \
@@ -2529,6 +2548,13 @@
RAW_HEAP_OBJECT_IMPLEMENTATION(Instance);
friend class Object;
friend class SnapshotReader;
+
+ public:
+#if defined(DART_COMPRESSED_POINTERS)
+ static constexpr bool kContainsCompressedPointers = true;
+#else
+ static constexpr bool kContainsCompressedPointers = false;
+#endif
};
class UntaggedLibraryPrefix : public UntaggedInstance {
@@ -2746,13 +2772,13 @@
// The following fields are also declared in the Dart source of class
// _Closure.
- POINTER_FIELD(TypeArgumentsPtr, instantiator_type_arguments)
+ COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, instantiator_type_arguments)
VISIT_FROM(instantiator_type_arguments)
- POINTER_FIELD(TypeArgumentsPtr, function_type_arguments)
- POINTER_FIELD(TypeArgumentsPtr, delayed_type_arguments)
- POINTER_FIELD(FunctionPtr, function)
- POINTER_FIELD(ContextPtr, context)
- POINTER_FIELD(SmiPtr, hash)
+ COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, function_type_arguments)
+ COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, delayed_type_arguments)
+ COMPRESSED_POINTER_FIELD(FunctionPtr, function)
+ COMPRESSED_POINTER_FIELD(ContextPtr, context)
+ COMPRESSED_POINTER_FIELD(SmiPtr, hash)
VISIT_TO(hash)
// We have an extra word in the object due to alignment rounding, so use it in
@@ -2761,7 +2787,7 @@
// one entry point, as dynamic calls use dynamic closure call dispatchers.
ONLY_IN_PRECOMPILED(uword entry_point_);
- ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
+ CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
// Note that instantiator_type_arguments_, function_type_arguments_ and
// delayed_type_arguments_ are used to instantiate the signature of function_
@@ -3043,11 +3069,11 @@
class UntaggedArray : public UntaggedInstance {
RAW_HEAP_OBJECT_IMPLEMENTATION(Array);
- ARRAY_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
+ COMPRESSED_ARRAY_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
VISIT_FROM(type_arguments)
- SMI_FIELD(SmiPtr, length)
+ COMPRESSED_SMI_FIELD(SmiPtr, length)
// Variable length data follows here.
- VARIABLE_POINTER_FIELDS(ObjectPtr, element, data)
+ COMPRESSED_VARIABLE_POINTER_FIELDS(ObjectPtr, element, data)
friend class LinkedHashMapSerializationCluster;
friend class LinkedHashMapDeserializationCluster;
@@ -3078,12 +3104,12 @@
class UntaggedGrowableObjectArray : public UntaggedInstance {
RAW_HEAP_OBJECT_IMPLEMENTATION(GrowableObjectArray);
- POINTER_FIELD(TypeArgumentsPtr, type_arguments)
+ COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
VISIT_FROM(type_arguments)
- SMI_FIELD(SmiPtr, length)
- POINTER_FIELD(ArrayPtr, data)
+ COMPRESSED_SMI_FIELD(SmiPtr, length)
+ COMPRESSED_POINTER_FIELD(ArrayPtr, data)
VISIT_TO(data)
- ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
+ CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
friend class SnapshotReader;
friend class ReversePc;
@@ -3092,15 +3118,15 @@
class UntaggedLinkedHashMap : public UntaggedInstance {
RAW_HEAP_OBJECT_IMPLEMENTATION(LinkedHashMap);
- POINTER_FIELD(TypeArgumentsPtr, type_arguments)
+ COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
VISIT_FROM(type_arguments)
- POINTER_FIELD(TypedDataPtr, index)
- POINTER_FIELD(SmiPtr, hash_mask)
- POINTER_FIELD(ArrayPtr, data)
- POINTER_FIELD(SmiPtr, used_data)
- POINTER_FIELD(SmiPtr, deleted_keys)
+ COMPRESSED_POINTER_FIELD(TypedDataPtr, index)
+ COMPRESSED_POINTER_FIELD(SmiPtr, hash_mask)
+ COMPRESSED_POINTER_FIELD(ArrayPtr, data)
+ COMPRESSED_POINTER_FIELD(SmiPtr, used_data)
+ COMPRESSED_POINTER_FIELD(SmiPtr, deleted_keys)
VISIT_TO(deleted_keys)
- ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
+ CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
friend class SnapshotReader;
};
@@ -3294,15 +3320,15 @@
class UntaggedWeakProperty : public UntaggedInstance {
RAW_HEAP_OBJECT_IMPLEMENTATION(WeakProperty);
- POINTER_FIELD(ObjectPtr, key)
+ COMPRESSED_POINTER_FIELD(ObjectPtr, key)
VISIT_FROM(key)
- POINTER_FIELD(ObjectPtr, value)
+ COMPRESSED_POINTER_FIELD(ObjectPtr, value)
VISIT_TO(value)
- ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
+ CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
// Linked list is chaining all pending weak properties. Not visited by
// pointer visitors.
- WeakPropertyPtr next_;
+ CompressedWeakPropertyPtr next_;
friend class GCMarker;
template <bool>
@@ -3343,7 +3369,7 @@
class UntaggedFutureOr : public UntaggedInstance {
RAW_HEAP_OBJECT_IMPLEMENTATION(FutureOr);
- POINTER_FIELD(TypeArgumentsPtr, type_arguments)
+ COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
VISIT_FROM(type_arguments)
VISIT_TO(type_arguments)
diff --git a/runtime/vm/raw_object_snapshot.cc b/runtime/vm/raw_object_snapshot.cc
index 3ca81e8..bbabf60 100644
--- a/runtime/vm/raw_object_snapshot.cc
+++ b/runtime/vm/raw_object_snapshot.cc
@@ -1089,7 +1089,7 @@
Snapshot::Kind kind,
bool as_reference) {
writer->ArrayWriteTo(object_id, kImmutableArrayCid,
- writer->GetObjectTags(this), length_, type_arguments_,
+ writer->GetObjectTags(this), length(), type_arguments(),
data(), as_reference);
}
@@ -1108,8 +1108,8 @@
// Read type arguments of growable array object.
*reader->TypeArgumentsHandle() ^= reader->ReadObjectImpl(kAsInlinedObject);
- array.StorePointer(&array.untag()->type_arguments_,
- reader->TypeArgumentsHandle()->ptr());
+ array.StoreCompressedPointer(&array.untag()->type_arguments_,
+ reader->TypeArgumentsHandle()->ptr());
// Read length of growable array object.
array.SetLength(reader->ReadSmiValue());
@@ -1135,13 +1135,13 @@
writer->WriteTags(writer->GetObjectTags(this));
// Write out the type arguments field.
- writer->WriteObjectImpl(type_arguments_, kAsInlinedObject);
+ writer->WriteObjectImpl(type_arguments(), kAsInlinedObject);
// Write out the used length field.
- writer->Write<ObjectPtr>(length_);
+ writer->Write<ObjectPtr>(length());
// Write out the Array object.
- writer->WriteObjectImpl(data_, kAsReference);
+ writer->WriteObjectImpl(data(), kAsReference);
}
LinkedHashMapPtr LinkedHashMap::ReadFrom(SnapshotReader* reader,
@@ -1206,36 +1206,35 @@
writer->WriteTags(writer->GetObjectTags(this));
// Write out the type arguments.
- writer->WriteObjectImpl(type_arguments_, kAsInlinedObject);
+ writer->WriteObjectImpl(type_arguments(), kAsInlinedObject);
- const intptr_t used_data = Smi::Value(used_data_);
- ASSERT((used_data & 1) == 0); // Keys + values, so must be even.
- const intptr_t deleted_keys = Smi::Value(deleted_keys_);
+ const intptr_t num_used_data = Smi::Value(used_data());
+ ASSERT((num_used_data & 1) == 0); // Keys + values, so must be even.
+ const intptr_t num_deleted_keys = Smi::Value(deleted_keys());
// Write out the number of (not deleted) key/value pairs that will follow.
- writer->Write<ObjectPtr>(Smi::New((used_data >> 1) - deleted_keys));
+ writer->Write<ObjectPtr>(Smi::New((num_used_data >> 1) - num_deleted_keys));
// Write out the keys and values.
const bool write_as_reference = this->IsCanonical() ? false : true;
- ArrayPtr data_array = data_;
- ObjectPtr* data_elements = data_array->untag()->data();
- ASSERT(used_data <= Smi::Value(data_array->untag()->length_));
+ ArrayPtr data_array = data();
+ ASSERT(num_used_data <= Smi::Value(data_array->untag()->length()));
#if defined(DEBUG)
intptr_t deleted_keys_found = 0;
#endif // DEBUG
- for (intptr_t i = 0; i < used_data; i += 2) {
- ObjectPtr key = data_elements[i];
+ for (intptr_t i = 0; i < num_used_data; i += 2) {
+ ObjectPtr key = data_array->untag()->element(i);
if (key == data_array) {
#if defined(DEBUG)
++deleted_keys_found;
#endif // DEBUG
continue;
}
- ObjectPtr value = data_elements[i + 1];
+ ObjectPtr value = data_array->untag()->element(i + 1);
writer->WriteObjectImpl(key, write_as_reference);
writer->WriteObjectImpl(value, write_as_reference);
}
- DEBUG_ASSERT(deleted_keys_found == deleted_keys);
+ DEBUG_ASSERT(deleted_keys_found == num_deleted_keys);
}
Float32x4Ptr Float32x4::ReadFrom(SnapshotReader* reader,
@@ -1806,8 +1805,9 @@
reader->AddBackRef(object_id, &weak_property, kIsDeserialized);
// Set all the object fields.
- READ_OBJECT_FIELDS(weak_property, weak_property.ptr()->untag()->from(),
- weak_property.ptr()->untag()->to(), kAsReference);
+ READ_COMPRESSED_OBJECT_FIELDS(
+ weak_property, weak_property.ptr()->untag()->from(),
+ weak_property.ptr()->untag()->to(), kAsReference);
return weak_property.ptr();
}
@@ -1827,7 +1827,7 @@
// Write out all the object pointer fields.
SnapshotWriterVisitor visitor(writer, kAsReference);
- visitor.VisitPointers(from(), to());
+ visitor.VisitCompressedPointers(heap_base(), from(), to());
}
} // namespace dart
diff --git a/runtime/vm/reverse_pc_lookup_cache.cc b/runtime/vm/reverse_pc_lookup_cache.cc
index 517d8a1..dc7390a 100644
--- a/runtime/vm/reverse_pc_lookup_cache.cc
+++ b/runtime/vm/reverse_pc_lookup_cache.cc
@@ -27,10 +27,10 @@
// through them linearly. If this changes, would could sort the table list
// during deserialization and binary search for the table.
GrowableObjectArrayPtr tables = group->object_store()->instructions_tables();
- intptr_t tables_length = Smi::Value(tables->untag()->length_);
+ intptr_t tables_length = Smi::Value(tables->untag()->length());
for (intptr_t i = 0; i < tables_length; i++) {
InstructionsTablePtr table = static_cast<InstructionsTablePtr>(
- tables->untag()->data_->untag()->data()[i]);
+ tables->untag()->data()->untag()->element(i));
intptr_t index = InstructionsTable::FindEntry(table, pc);
if (index >= 0) {
*code_start = InstructionsTable::PayloadStartAt(table, index);
diff --git a/runtime/vm/simulator_arm.cc b/runtime/vm/simulator_arm.cc
index 78612fb..a775233 100644
--- a/runtime/vm/simulator_arm.cc
+++ b/runtime/vm/simulator_arm.cc
@@ -2223,7 +2223,7 @@
return;
}
- if (instr->Bit(21) == 1) {
+ if (instr->IsDivUnsigned()) {
// unsigned division.
uint32_t rn_val = static_cast<uint32_t>(get_register(rn));
uint32_t rm_val = static_cast<uint32_t>(get_register(rm));
@@ -2245,14 +2245,34 @@
}
void Simulator::DecodeType3(Instr* instr) {
- if (instr->IsDivision()) {
- DoDivision(instr);
- return;
- } else if (instr->IsRbit()) {
- // Format(instr, "rbit'cond 'rd, 'rm");
- Register rm = instr->RmField();
- Register rd = instr->RdField();
- set_register(rd, Utils::ReverseBits32(get_register(rm)));
+ if (instr->IsMedia()) {
+ if (instr->IsDivision()) {
+ DoDivision(instr);
+ return;
+ } else if (instr->IsRbit()) {
+ // Format(instr, "rbit'cond 'rd, 'rm");
+ Register rm = instr->RmField();
+ Register rd = instr->RdField();
+ set_register(rd, Utils::ReverseBits32(get_register(rm)));
+ return;
+ } else if (instr->IsBitFieldExtract()) {
+ // Format(instr, "sbfx'cond 'rd, 'rn, 'lsb, 'width")
+ const Register rd = instr->RdField();
+ const Register rn = instr->BitFieldExtractRnField();
+ const uint8_t width = instr->BitFieldExtractWidthField() + 1;
+ const uint8_t lsb = instr->BitFieldExtractLSBField();
+ const int32_t rn_val = get_register(rn);
+ const uint32_t extracted_bitfield =
+ ((rn_val >> lsb) & Utils::NBitMask(width));
+ const uint32_t sign_extension =
+ (instr->IsBitFieldExtractSignExtended() &&
+ Utils::TestBit(extracted_bitfield, width - 1))
+ ? ~Utils::NBitMask(width)
+ : 0;
+ set_register(rd, sign_extension | extracted_bitfield);
+ } else {
+ UNREACHABLE();
+ }
return;
}
Register rd = instr->RdField();
diff --git a/runtime/vm/snapshot.cc b/runtime/vm/snapshot.cc
index f18f6ab..278a04f 100644
--- a/runtime/vm/snapshot.cc
+++ b/runtime/vm/snapshot.cc
@@ -644,7 +644,7 @@
result_cid);
while (offset < next_field_offset) {
- if (unboxed_fields.Get(offset / kWordSize)) {
+ if (unboxed_fields.Get(offset / kCompressedWordSize)) {
uword* p = reinterpret_cast<uword*>(result->raw_value() -
kHeapObjectTag + offset);
// Reads 32 bits of the unboxed value at a time
@@ -661,7 +661,7 @@
// across the call to ReadObjectImpl.
cls_ = isolate_group()->class_table()->At(result_cid);
array_ = cls_.OffsetToFieldMap();
- field_ ^= array_.At(offset >> kWordSizeLog2);
+ field_ ^= array_.At(offset >> kCompressedWordSizeLog2);
ASSERT(!field_.IsNull());
ASSERT(field_.HostOffset() == offset);
obj_ = pobj_.ptr();
@@ -670,7 +670,7 @@
// TODO(fschneider): Verify the guarded cid and length for other kinds
// of snapshot (kFull, kScript) with asserts.
}
- offset += kWordSize;
+ offset += kCompressedWordSize;
}
if (UntaggedObject::IsCanonical(tags)) {
*result = result->Canonicalize(thread());
@@ -1365,7 +1365,7 @@
intptr_t tags,
SmiPtr length,
TypeArgumentsPtr type_arguments,
- ObjectPtr data[],
+ CompressedObjectPtr data[],
bool as_reference) {
if (as_reference) {
// Write out the serialization header value for this object.
@@ -1395,8 +1395,9 @@
// Write out the individual object ids.
bool write_as_reference = UntaggedObject::IsCanonical(tags) ? false : true;
+ uword heap_base = type_arguments.heap_base();
for (intptr_t i = 0; i < len; i++) {
- WriteObjectImpl(data[i], write_as_reference);
+ WriteObjectImpl(data[i].Decompress(heap_base), write_as_reference);
}
}
}
@@ -1483,7 +1484,7 @@
WriteObjectImpl(cls, kAsInlinedObject);
} else {
intptr_t next_field_offset = Class::host_next_field_offset_in_words(cls)
- << kWordSizeLog2;
+ << kCompressedWordSizeLog2;
ASSERT(next_field_offset > 0);
// Write out the serialization header value for this object.
@@ -1508,18 +1509,20 @@
bool write_as_reference = UntaggedObject::IsCanonical(tags) ? false : true;
intptr_t offset = Instance::NextFieldOffset();
+ uword heap_base = raw->heap_base();
while (offset < next_field_offset) {
- if (unboxed_fields.Get(offset / kWordSize)) {
+ if (unboxed_fields.Get(offset / kCompressedWordSize)) {
// Writes 32 bits of the unboxed value at a time
- const uword value = *reinterpret_cast<uword*>(
+ const uword value = *reinterpret_cast<compressed_uword*>(
reinterpret_cast<uword>(raw->untag()) + offset);
WriteWordWith32BitWrites(value);
} else {
- ObjectPtr raw_obj = *reinterpret_cast<ObjectPtr*>(
- reinterpret_cast<uword>(raw->untag()) + offset);
+ ObjectPtr raw_obj = reinterpret_cast<CompressedObjectPtr*>(
+ reinterpret_cast<uword>(raw->untag()) + offset)
+ ->Decompress(heap_base);
WriteObjectImpl(raw_obj, write_as_reference);
}
- offset += kWordSize;
+ offset += kCompressedWordSize;
}
}
return;
diff --git a/runtime/vm/snapshot.h b/runtime/vm/snapshot.h
index 2eb5417..60da2b3 100644
--- a/runtime/vm/snapshot.h
+++ b/runtime/vm/snapshot.h
@@ -647,7 +647,7 @@
intptr_t tags,
SmiPtr length,
TypeArgumentsPtr type_arguments,
- ObjectPtr data[],
+ CompressedObjectPtr data[],
bool as_reference);
ClassPtr GetFunctionOwner(FunctionPtr func);
void CheckForNativeFields(ClassPtr cls);
diff --git a/runtime/vm/tagged_pointer.h b/runtime/vm/tagged_pointer.h
index 765af22..cbf9a35 100644
--- a/runtime/vm/tagged_pointer.h
+++ b/runtime/vm/tagged_pointer.h
@@ -16,61 +16,73 @@
class IsolateGroup;
class UntaggedObject;
+#define OBJECT_POINTER_CORE_FUNCTIONS(type, ptr) \
+ type* operator->() { return this; } \
+ const type* operator->() const { return this; } \
+ bool IsWellFormed() const { \
+ const uword value = ptr; \
+ return (value & kSmiTagMask) == 0 || \
+ Utils::IsAligned(value - kHeapObjectTag, kWordSize); \
+ } \
+ bool IsHeapObject() const { \
+ ASSERT(IsWellFormed()); \
+ const uword value = ptr; \
+ return (value & kSmiTagMask) == kHeapObjectTag; \
+ } \
+ /* Assumes this is a heap object. */ \
+ bool IsNewObject() const { \
+ ASSERT(IsHeapObject()); \
+ const uword addr = ptr; \
+ return (addr & kNewObjectAlignmentOffset) == kNewObjectAlignmentOffset; \
+ } \
+ bool IsNewObjectMayBeSmi() const { \
+ static const uword kNewObjectBits = \
+ (kNewObjectAlignmentOffset | kHeapObjectTag); \
+ const uword addr = ptr; \
+ return (addr & kObjectAlignmentMask) == kNewObjectBits; \
+ } \
+ /* Assumes this is a heap object. */ \
+ bool IsOldObject() const { \
+ ASSERT(IsHeapObject()); \
+ const uword addr = ptr; \
+ return (addr & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset; \
+ } \
+ \
+ /* Like !IsHeapObject() || IsOldObject() but compiles to a single branch. */ \
+ bool IsSmiOrOldObject() const { \
+ ASSERT(IsWellFormed()); \
+ static const uword kNewObjectBits = \
+ (kNewObjectAlignmentOffset | kHeapObjectTag); \
+ const uword addr = ptr; \
+ return (addr & kObjectAlignmentMask) != kNewObjectBits; \
+ } \
+ \
+ /* Like !IsHeapObject() || IsNewObject() but compiles to a single branch. */ \
+ bool IsSmiOrNewObject() const { \
+ ASSERT(IsWellFormed()); \
+ static const uword kOldObjectBits = \
+ (kOldObjectAlignmentOffset | kHeapObjectTag); \
+ const uword addr = ptr; \
+ return (addr & kObjectAlignmentMask) != kOldObjectBits; \
+ } \
+ \
+ bool operator==(const type& other) { return ptr == other.ptr; } \
+ bool operator!=(const type& other) { return ptr != other.ptr; } \
+ constexpr bool operator==(const type& other) const { \
+ return ptr == other.ptr; \
+ } \
+ constexpr bool operator!=(const type& other) const { \
+ return ptr != other.ptr; \
+ }
+
class ObjectPtr {
public:
- ObjectPtr* operator->() { return this; }
- const ObjectPtr* operator->() const { return this; }
+ OBJECT_POINTER_CORE_FUNCTIONS(ObjectPtr, tagged_pointer_)
+
UntaggedObject* untag() const {
return reinterpret_cast<UntaggedObject*>(untagged_pointer());
}
- bool IsWellFormed() const {
- uword value = tagged_pointer_;
- return (value & kSmiTagMask) == 0 ||
- Utils::IsAligned(value - kHeapObjectTag, kWordSize);
- }
- bool IsHeapObject() const {
- ASSERT(IsWellFormed());
- uword value = tagged_pointer_;
- return (value & kSmiTagMask) == kHeapObjectTag;
- }
- // Assumes this is a heap object.
- bool IsNewObject() const {
- ASSERT(IsHeapObject());
- uword addr = tagged_pointer_;
- return (addr & kNewObjectAlignmentOffset) == kNewObjectAlignmentOffset;
- }
- bool IsNewObjectMayBeSmi() const {
- static const uword kNewObjectBits =
- (kNewObjectAlignmentOffset | kHeapObjectTag);
- const uword addr = tagged_pointer_;
- return (addr & kObjectAlignmentMask) == kNewObjectBits;
- }
- // Assumes this is a heap object.
- bool IsOldObject() const {
- ASSERT(IsHeapObject());
- uword addr = tagged_pointer_;
- return (addr & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset;
- }
-
- // Like !IsHeapObject() || IsOldObject(), but compiles to a single branch.
- bool IsSmiOrOldObject() const {
- ASSERT(IsWellFormed());
- static const uword kNewObjectBits =
- (kNewObjectAlignmentOffset | kHeapObjectTag);
- const uword addr = tagged_pointer_;
- return (addr & kObjectAlignmentMask) != kNewObjectBits;
- }
-
- // Like !IsHeapObject() || IsNewObject(), but compiles to a single branch.
- bool IsSmiOrNewObject() const {
- ASSERT(IsWellFormed());
- static const uword kOldObjectBits =
- (kOldObjectAlignmentOffset | kHeapObjectTag);
- const uword addr = tagged_pointer_;
- return (addr & kObjectAlignmentMask) != kOldObjectBits;
- }
-
#define DEFINE_IS_CID(clazz) \
bool Is##clazz() const { return ((GetClassId() == k##clazz##Cid)); }
CLASS_LIST(DEFINE_IS_CID)
@@ -116,18 +128,6 @@
void Validate(IsolateGroup* isolate_group) const;
- bool operator==(const ObjectPtr& other) {
- return tagged_pointer_ == other.tagged_pointer_;
- }
- bool operator!=(const ObjectPtr& other) {
- return tagged_pointer_ != other.tagged_pointer_;
- }
- constexpr bool operator==(const ObjectPtr& other) const {
- return tagged_pointer_ == other.tagged_pointer_;
- }
- constexpr bool operator!=(const ObjectPtr& other) const {
- return tagged_pointer_ != other.tagged_pointer_;
- }
bool operator==(const std::nullptr_t& other) { return tagged_pointer_ == 0; }
bool operator!=(const std::nullptr_t& other) { return tagged_pointer_ != 0; }
constexpr bool operator==(const std::nullptr_t& other) const {
@@ -242,11 +242,20 @@
#else
class CompressedObjectPtr {
public:
+ OBJECT_POINTER_CORE_FUNCTIONS(CompressedObjectPtr, compressed_pointer_)
+
explicit CompressedObjectPtr(ObjectPtr uncompressed)
: compressed_pointer_(
static_cast<uint32_t>(static_cast<uword>(uncompressed))) {}
+ explicit constexpr CompressedObjectPtr(uword tagged)
+ : compressed_pointer_(static_cast<uint32_t>(tagged)) {}
ObjectPtr Decompress(uword heap_base) const {
+ if ((compressed_pointer_ & kSmiTagMask) != kHeapObjectTag) {
+ // TODO(liama): Make all native code robust to junk in the upper 32-bits
+ // of SMIs, then remove this special casing.
+ return DecompressSmi();
+ }
return static_cast<ObjectPtr>(static_cast<uword>(compressed_pointer_) +
heap_base);
}
@@ -287,6 +296,9 @@
compressed_pointer_ = static_cast<uint32_t>(static_cast<uword>(other)); \
return other; \
} \
+ klass##Ptr Decompress(uword heap_base) const { \
+ return klass##Ptr(CompressedObjectPtr::Decompress(heap_base)); \
+ } \
};
#endif
@@ -313,6 +325,7 @@
constexpr klass##Ptr(std::nullptr_t) : base##Ptr(nullptr) {} /* NOLINT */ \
explicit klass##Ptr(const UntaggedObject* untagged) \
: base##Ptr(reinterpret_cast<uword>(untagged) + kHeapObjectTag) {} \
+ klass##Ptr Decompress(uword heap_base) const { return *this; } \
}; \
DEFINE_COMPRESSED_POINTER(klass, base)
diff --git a/runtime/vm/thread.cc b/runtime/vm/thread.cc
index da39430..16a1f8f 100644
--- a/runtime/vm/thread.cc
+++ b/runtime/vm/thread.cc
@@ -1163,10 +1163,11 @@
}
if (entry != 0) {
- ObjectPtr* const code_array =
+ CompressedObjectPtr* const code_array =
Array::DataOf(GrowableObjectArray::NoSafepointData(array));
// RawCast allocates handles in ASSERTs.
- const CodePtr code = static_cast<CodePtr>(code_array[callback_id]);
+ const CodePtr code = static_cast<CodePtr>(
+ code_array[callback_id].Decompress(array.heap_base()));
if (!Code::ContainsInstructionAt(code, entry)) {
FATAL("Cannot invoke callback on incorrect isolate.");
}
diff --git a/runtime/vm/type_testing_stubs.cc b/runtime/vm/type_testing_stubs.cc
index 879677c..45d98c8 100644
--- a/runtime/vm/type_testing_stubs.cc
+++ b/runtime/vm/type_testing_stubs.cc
@@ -415,7 +415,7 @@
// fall through to continue
// b) Then we'll load the values for the type parameters.
- __ LoadFieldFromOffset(
+ __ LoadCompressedFieldFromOffset(
TTSInternalRegs::kInstanceTypeArgumentsReg, TypeTestABI::kInstanceReg,
compiler::target::Class::TypeArgumentsFieldOffset(type_class));
diff --git a/tools/VERSION b/tools/VERSION
index 64e07b4..f2353b5 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
MAJOR 2
MINOR 14
PATCH 0
-PRERELEASE 243
+PRERELEASE 244
PRERELEASE_PATCH 0
\ No newline at end of file