[vm] Reduce cost of allocation tracing.

Remove separate table for predefined classes. The original allocation stats CL had this separate to allow directly embedding the table address into code, but we no longer embed addresses to support AppAOT and AppJIT.

Change-Id: Ida3d0764ac8ff179c0541ee73a3283c9c50affab
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/97286
Reviewed-by: Alexander Aprelev <aam@google.com>
Commit-Queue: Ryan Macnak <rmacnak@google.com>
diff --git a/runtime/vm/class_table.cc b/runtime/vm/class_table.cc
index 3aa1544..f3f5fd5 100644
--- a/runtime/vm/class_table.cc
+++ b/runtime/vm/class_table.cc
@@ -22,7 +22,6 @@
       table_(NULL),
       old_tables_(new MallocGrowableArray<ClassAndSize*>()) {
   NOT_IN_PRODUCT(class_heap_stats_table_ = NULL);
-  NOT_IN_PRODUCT(predefined_class_heap_stats_table_ = NULL);
   if (Dart::vm_isolate() == NULL) {
     capacity_ = initial_capacity_;
     table_ = reinterpret_cast<ClassAndSize*>(
@@ -41,20 +40,12 @@
     table_[kForwardingCorpse] = vm_class_table->PairAt(kForwardingCorpse);
     table_[kDynamicCid] = vm_class_table->PairAt(kDynamicCid);
     table_[kVoidCid] = vm_class_table->PairAt(kVoidCid);
-
-#ifndef PRODUCT
-    class_heap_stats_table_ = reinterpret_cast<ClassHeapStats*>(
-        calloc(capacity_, sizeof(ClassHeapStats)));  // NOLINT
-    for (intptr_t i = 0; i < capacity_; i++) {
-      class_heap_stats_table_[i].Initialize();
-    }
-#endif  // !PRODUCT
   }
 #ifndef PRODUCT
-  predefined_class_heap_stats_table_ = reinterpret_cast<ClassHeapStats*>(
-      calloc(kNumPredefinedCids, sizeof(ClassHeapStats)));  // NOLINT
-  for (intptr_t i = 0; i < kNumPredefinedCids; i++) {
-    predefined_class_heap_stats_table_[i].Initialize();
+  class_heap_stats_table_ = reinterpret_cast<ClassHeapStats*>(
+      calloc(capacity_, sizeof(ClassHeapStats)));  // NOLINT
+  for (intptr_t i = 0; i < capacity_; i++) {
+    class_heap_stats_table_[i].Initialize();
   }
 #endif  // !PRODUCT
 }
@@ -65,7 +56,6 @@
       table_(original->table_),
       old_tables_(NULL) {
   NOT_IN_PRODUCT(class_heap_stats_table_ = NULL);
-  NOT_IN_PRODUCT(predefined_class_heap_stats_table_ = NULL);
 }
 
 ClassTable::~ClassTable() {
@@ -73,11 +63,9 @@
     FreeOldTables();
     delete old_tables_;
     free(table_);
-    NOT_IN_PRODUCT(free(predefined_class_heap_stats_table_));
     NOT_IN_PRODUCT(free(class_heap_stats_table_));
   } else {
     // This instance was a shallow copy. It doesn't own any memory.
-    NOT_IN_PRODUCT(ASSERT(predefined_class_heap_stats_table_ == NULL));
     NOT_IN_PRODUCT(ASSERT(class_heap_stats_table_ == NULL));
   }
 }
@@ -93,18 +81,6 @@
   }
 }
 
-#ifndef PRODUCT
-void ClassTable::SetTraceAllocationFor(intptr_t cid, bool trace) {
-  ClassHeapStats* stats = PreliminaryStatsAt(cid);
-  stats->set_trace_allocation(trace);
-}
-
-bool ClassTable::TraceAllocationFor(intptr_t cid) {
-  ClassHeapStats* stats = PreliminaryStatsAt(cid);
-  return stats->trace_allocation();
-}
-#endif  // !PRODUCT
-
 void ClassTable::Register(const Class& cls) {
   ASSERT(Thread::Current()->IsMutatorThread());
   intptr_t index = cls.id();
@@ -466,15 +442,6 @@
   return !RawObject::IsVariableSizeClassId(cid);
 }
 
-ClassHeapStats* ClassTable::PreliminaryStatsAt(intptr_t cid) {
-  ASSERT(cid > 0);
-  if (cid < kNumPredefinedCids) {
-    return &predefined_class_heap_stats_table_[cid];
-  }
-  ASSERT(cid < top_);
-  return &class_heap_stats_table_[cid];
-}
-
 ClassHeapStats* ClassTable::StatsWithUpdatedSize(intptr_t cid) {
   if (!HasValidClassAt(cid) || (cid == kFreeListElement) ||
       (cid == kForwardingCorpse) || (cid == kSmiCid)) {
@@ -494,41 +461,25 @@
 }
 
 void ClassTable::ResetCountersOld() {
-  for (intptr_t i = 0; i < kNumPredefinedCids; i++) {
-    predefined_class_heap_stats_table_[i].ResetAtOldGC();
-  }
-  for (intptr_t i = kNumPredefinedCids; i < top_; i++) {
+  for (intptr_t i = 0; i < top_; i++) {
     class_heap_stats_table_[i].ResetAtOldGC();
   }
 }
 
 void ClassTable::ResetCountersNew() {
-  for (intptr_t i = 0; i < kNumPredefinedCids; i++) {
-    predefined_class_heap_stats_table_[i].ResetAtNewGC();
-  }
-  for (intptr_t i = kNumPredefinedCids; i < top_; i++) {
+  for (intptr_t i = 0; i < top_; i++) {
     class_heap_stats_table_[i].ResetAtNewGC();
   }
 }
 
 void ClassTable::UpdatePromoted() {
-  for (intptr_t i = 0; i < kNumPredefinedCids; i++) {
-    predefined_class_heap_stats_table_[i].UpdatePromotedAfterNewGC();
-  }
-  for (intptr_t i = kNumPredefinedCids; i < top_; i++) {
+  for (intptr_t i = 0; i < top_; i++) {
     class_heap_stats_table_[i].UpdatePromotedAfterNewGC();
   }
 }
 
-ClassHeapStats** ClassTable::TableAddressFor(intptr_t cid) {
-  return (cid < kNumPredefinedCids) ? &predefined_class_heap_stats_table_
-                                    : &class_heap_stats_table_;
-}
-
 intptr_t ClassTable::TableOffsetFor(intptr_t cid) {
-  return (cid < kNumPredefinedCids)
-             ? OFFSET_OF(ClassTable, predefined_class_heap_stats_table_)
-             : OFFSET_OF(ClassTable, class_heap_stats_table_);
+  return OFFSET_OF(ClassTable, class_heap_stats_table_);
 }
 
 intptr_t ClassTable::ClassOffsetFor(intptr_t cid) {
diff --git a/runtime/vm/class_table.h b/runtime/vm/class_table.h
index df71e43..6a49f54 100644
--- a/runtime/vm/class_table.h
+++ b/runtime/vm/class_table.h
@@ -275,7 +275,6 @@
   void UpdatePromoted();
 
   // Used by the generated code.
-  ClassHeapStats** TableAddressFor(intptr_t cid);
   static intptr_t TableOffsetFor(intptr_t cid);
 
   // Used by the generated code.
@@ -293,14 +292,21 @@
   void ResetAllocationAccumulators();
 
   void PrintToJSONObject(JSONObject* object);
+
+  void SetTraceAllocationFor(intptr_t cid, bool trace) {
+    ClassHeapStats* stats = PreliminaryStatsAt(cid);
+    stats->set_trace_allocation(trace);
+  }
+  bool TraceAllocationFor(intptr_t cid) {
+    ClassHeapStats* stats = PreliminaryStatsAt(cid);
+    return stats->trace_allocation();
+  }
 #endif  // !PRODUCT
 
   void AddOldTable(ClassAndSize* old_table);
   // Deallocates table copies. Do not call during concurrent access to table.
   void FreeOldTables();
 
-  void SetTraceAllocationFor(intptr_t cid, bool trace);
-  bool TraceAllocationFor(intptr_t cid);
 
  private:
   friend class GCMarker;
@@ -323,10 +329,13 @@
 
 #ifndef PRODUCT
   ClassHeapStats* class_heap_stats_table_;
-  ClassHeapStats* predefined_class_heap_stats_table_;
 
   // May not have updated size for variable size classes.
-  ClassHeapStats* PreliminaryStatsAt(intptr_t cid);
+  ClassHeapStats* PreliminaryStatsAt(intptr_t cid) {
+    ASSERT(cid > 0);
+    ASSERT(cid < top_);
+    return &class_heap_stats_table_[cid];
+  }
   void UpdateLiveOld(intptr_t cid, intptr_t size, intptr_t count = 1);
   void UpdateLiveNew(intptr_t cid, intptr_t size);
   void UpdateLiveNewGC(intptr_t cid, intptr_t size);
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 8d2c158..312fd15 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -2115,15 +2115,13 @@
     UNREACHABLE();
   }
 #ifndef PRODUCT
-  Isolate* isolate = thread->isolate();
-  ClassTable* class_table = isolate->class_table();
+  ClassTable* class_table = thread->isolate()->class_table();
   if (space == Heap::kNew) {
     class_table->UpdateAllocatedNew(cls_id, size);
   } else {
     class_table->UpdateAllocatedOld(cls_id, size);
   }
-  const Class& cls = Class::Handle(class_table->At(cls_id));
-  if (FLAG_profiler && cls.TraceAllocation(isolate)) {
+  if (class_table->TraceAllocationFor(cls_id)) {
     Profiler::SampleAllocation(thread, cls_id);
   }
 #endif  // !PRODUCT