Version 2.13.0-88.0.dev

Merge commit '26e710693763f091e9c8368cba933a004c80d441' into 'dev'
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index fa44bea..fbc6871 100644
--- a/runtime/vm/clustered_snapshot.cc
+++ b/runtime/vm/clustered_snapshot.cc
@@ -1635,35 +1635,74 @@
                            active_unchecked_offset, code, deferred);
     }
 
-    // No need to write object pool out if we are producing full AOT
-    // snapshot with bare instructions.
-    if (!(kind == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
-      WriteField(code, object_pool_);
-#if defined(DART_PRECOMPILER)
-    } else if (FLAG_write_v8_snapshot_profile_to != nullptr &&
-               code->untag()->object_pool_ != ObjectPool::null()) {
-      // If we are writing V8 snapshot profile then attribute references
-      // going through the object pool to the code object itself.
-      ObjectPoolPtr pool = code->untag()->object_pool_;
-
-      for (intptr_t i = 0; i < pool->untag()->length_; i++) {
-        uint8_t bits = pool->untag()->entry_bits()[i];
-        if (ObjectPool::TypeBits::decode(bits) ==
-            ObjectPool::EntryType::kTaggedObject) {
-          s->AttributeElementRef(pool->untag()->data()[i].raw_obj_, i);
-        }
-      }
-#endif  // defined(DART_PRECOMPILER)
-    }
-    WriteField(code, owner_);
-    WriteField(code, exception_handlers_);
-    WriteField(code, pc_descriptors_);
-    WriteField(code, catch_entry_);
     if (s->InCurrentLoadingUnit(code->untag()->compressed_stackmaps_)) {
       WriteField(code, compressed_stackmaps_);
     } else {
       WriteFieldValue(compressed_stackmaps_, CompressedStackMaps::null());
     }
+
+    s->Write<int32_t>(code->untag()->state_bits_);
+
+#if defined(DART_PRECOMPILER)
+    if (FLAG_write_v8_snapshot_profile_to != nullptr) {
+      // If we are writing V8 snapshot profile then attribute references going
+      // through the object pool and static calls to the code object itself.
+      if (kind == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
+          code->untag()->object_pool_ != ObjectPool::null()) {
+        ObjectPoolPtr pool = code->untag()->object_pool_;
+
+        for (intptr_t i = 0; i < pool->untag()->length_; i++) {
+          uint8_t bits = pool->untag()->entry_bits()[i];
+          if (ObjectPool::TypeBits::decode(bits) ==
+              ObjectPool::EntryType::kTaggedObject) {
+            s->AttributeElementRef(pool->untag()->data()[i].raw_obj_, i);
+          }
+        }
+      }
+      if (code->untag()->static_calls_target_table_ != Array::null()) {
+        array_ = code->untag()->static_calls_target_table_;
+        intptr_t index = code->untag()->object_pool_ != ObjectPool::null()
+                             ? code->untag()->object_pool_->untag()->length_
+                             : 0;
+        for (auto entry : StaticCallsTable(array_)) {
+          auto kind = Code::KindField::decode(
+              Smi::Value(entry.Get<Code::kSCallTableKindAndOffset>()));
+          switch (kind) {
+            case Code::kCallViaCode:
+              // Code object in the pool.
+              continue;
+            case Code::kPcRelativeTTSCall:
+              // TTS will be reachable through type object which itself is
+              // in the pool.
+              continue;
+            case Code::kPcRelativeCall:
+            case Code::kPcRelativeTailCall:
+              auto destination = entry.Get<Code::kSCallTableCodeOrTypeTarget>();
+              ASSERT(destination->IsHeapObject() && destination->IsCode());
+              s->AttributeElementRef(destination, index++);
+          }
+        }
+      }
+    }
+#endif  // defined(DART_PRECOMPILER)
+
+    if (Code::IsDiscarded(code)) {
+      // Only write instructions, compressed stackmaps and state bits
+      // for the discarded Code objects.
+      ASSERT(kind == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
+             FLAG_dwarf_stack_traces_mode && !FLAG_retain_code_objects);
+      return;
+    }
+
+    // No need to write object pool out if we are producing full AOT
+    // snapshot with bare instructions.
+    if (!(kind == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
+      WriteField(code, object_pool_);
+    }
+    WriteField(code, owner_);
+    WriteField(code, exception_handlers_);
+    WriteField(code, pc_descriptors_);
+    WriteField(code, catch_entry_);
     if (FLAG_precompiled_mode && FLAG_dwarf_stack_traces_mode) {
       WriteFieldValue(inlined_id_to_function_, Array::null());
       WriteFieldValue(code_source_map_, CodeSourceMap::null());
@@ -1680,43 +1719,12 @@
       WriteField(code, static_calls_target_table_);
     }
 
-#if defined(DART_PRECOMPILER)
-    if (FLAG_write_v8_snapshot_profile_to != nullptr &&
-        code->untag()->static_calls_target_table_ != Array::null()) {
-      // If we are writing V8 snapshot profile then attribute references
-      // going through static calls.
-      array_ = code->untag()->static_calls_target_table_;
-      intptr_t index = code->untag()->object_pool_ != ObjectPool::null()
-                           ? code->untag()->object_pool_->untag()->length_
-                           : 0;
-      for (auto entry : StaticCallsTable(array_)) {
-        auto kind = Code::KindField::decode(
-            Smi::Value(entry.Get<Code::kSCallTableKindAndOffset>()));
-        switch (kind) {
-          case Code::kCallViaCode:
-            // Code object in the pool.
-            continue;
-          case Code::kPcRelativeTTSCall:
-            // TTS will be reachable through type object which itself is
-            // in the pool.
-            continue;
-          case Code::kPcRelativeCall:
-          case Code::kPcRelativeTailCall:
-            auto destination = entry.Get<Code::kSCallTableCodeOrTypeTarget>();
-            ASSERT(destination->IsHeapObject() && destination->IsCode());
-            s->AttributeElementRef(destination, index++);
-        }
-      }
-    }
-#endif  // defined(DART_PRECOMPILER)
-
 #if !defined(PRODUCT)
     WriteField(code, return_address_metadata_);
     if (FLAG_code_comments) {
       WriteField(code, comments_);
     }
 #endif
-    s->Write<int32_t>(code->untag()->state_bits_);
   }
 
   GrowableArray<CodePtr>* discovered_objects() { return &objects_; }
@@ -1732,7 +1740,7 @@
     for (auto code : objects_) {
       ObjectPtr owner =
           WeakSerializationReference::Unwrap(code->untag()->owner_);
-      if (s->CreateArtificalNodeIfNeeded(owner)) {
+      if (s->CreateArtificalNodeIfNeeded(owner) || Code::IsDiscarded(code)) {
         AutoTraceObject(code);
         s->AttributePropertyRef(owner, ":owner_",
                                 /*permit_artificial_ref=*/true);
@@ -1798,6 +1806,19 @@
 
     d->ReadInstructions(code, deferred);
 
+    code->untag()->compressed_stackmaps_ =
+        static_cast<CompressedStackMapsPtr>(d->ReadRef());
+    code->untag()->state_bits_ = d->Read<int32_t>();
+
+#if defined(DART_PRECOMPILED_RUNTIME)
+    if (Code::IsDiscarded(code)) {
+      code->untag()->owner_ = Smi::New(kFunctionCid);
+      return;
+    }
+#else
+    ASSERT(!Code::IsDiscarded(code));
+#endif  // defined(DART_PRECOMPILED_RUNTIME)
+
     // There would be a single global pool if this is a full AOT snapshot
     // with bare instructions.
     if (!(d->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
@@ -1811,8 +1832,6 @@
     code->untag()->pc_descriptors_ =
         static_cast<PcDescriptorsPtr>(d->ReadRef());
     code->untag()->catch_entry_ = d->ReadRef();
-    code->untag()->compressed_stackmaps_ =
-        static_cast<CompressedStackMapsPtr>(d->ReadRef());
     code->untag()->inlined_id_to_function_ =
         static_cast<ArrayPtr>(d->ReadRef());
     code->untag()->code_source_map_ =
@@ -1834,8 +1853,6 @@
                                    : Array::null();
     code->untag()->compile_timestamp_ = 0;
 #endif
-
-    code->untag()->state_bits_ = d->Read<int32_t>();
   }
 
   void PostLoad(Deserializer* d, const Array& refs, bool is_canonical) {
diff --git a/runtime/vm/compiler/aot/precompiler.cc b/runtime/vm/compiler/aot/precompiler.cc
index 22ded31..08be7fb 100644
--- a/runtime/vm/compiler/aot/precompiler.cc
+++ b/runtime/vm/compiler/aot/precompiler.cc
@@ -170,6 +170,10 @@
       pending_functions_(
           GrowableObjectArray::Handle(GrowableObjectArray::New())),
       sent_selectors_(),
+      entry_point_functions_(
+          HashTables::New<FunctionSet>(/*initial_capacity=*/128)),
+      functions_called_dynamically_(
+          HashTables::New<FunctionSet>(/*initial_capacity=*/1024)),
       seen_functions_(HashTables::New<FunctionSet>(/*initial_capacity=*/1024)),
       possibly_retained_functions_(
           HashTables::New<FunctionSet>(/*initial_capacity=*/1024)),
@@ -192,6 +196,8 @@
 
 Precompiler::~Precompiler() {
   // We have to call Release() in DEBUG mode.
+  entry_point_functions_.Release();
+  functions_called_dynamically_.Release();
   seen_functions_.Release();
   possibly_retained_functions_.Release();
   functions_to_retain_.Release();
@@ -445,6 +451,7 @@
              non_visited.ToFullyQualifiedCString());
     }
 #endif
+    DiscardCodeObjects();
     ProgramVisitor::Dedup(T);
 
     zone_ = NULL;
@@ -796,6 +803,11 @@
   if (functions_to_retain_.ContainsKey(function)) return;
   functions_to_retain_.Insert(function);
 
+  if (function.NeedsMonomorphicCheckedEntry(Z) ||
+      Function::IsDynamicInvocationForwarderName(function.name())) {
+    functions_called_dynamically_.Insert(function);
+  }
+
   const FunctionType& signature = FunctionType::Handle(Z, function.signature());
   AddType(signature);
 
@@ -1079,6 +1091,7 @@
   }
 
   if (possibly_retained_functions_.ContainsKey(function)) return;
+
   if (retain || MustRetainFunction(function)) {
     possibly_retained_functions_.Insert(function);
   }
@@ -1239,6 +1252,7 @@
           if (type == EntryPointPragma::kAlways ||
               type == EntryPointPragma::kCallOnly) {
             AddFunction(function);
+            entry_point_functions_.Insert(function);
           }
 
           if ((type == EntryPointPragma::kAlways ||
@@ -1247,10 +1261,12 @@
               !function.IsSetterFunction()) {
             function2 = function.ImplicitClosureFunction();
             AddFunction(function2);
+            entry_point_functions_.Insert(function2);
           }
 
           if (function.IsGenerativeConstructor()) {
             AddInstantiatedClass(cls);
+            entry_point_functions_.Insert(function);
           }
         }
         if (function.kind() == UntaggedFunction::kImplicitGetter &&
@@ -1259,6 +1275,7 @@
             field ^= implicit_getters.At(i);
             if (function.accessor_field() == field.ptr()) {
               AddFunction(function);
+              entry_point_functions_.Insert(function);
             }
           }
         }
@@ -1268,6 +1285,7 @@
             field ^= implicit_setters.At(i);
             if (function.accessor_field() == field.ptr()) {
               AddFunction(function);
+              entry_point_functions_.Insert(function);
             }
           }
         }
@@ -1277,6 +1295,7 @@
             field ^= implicit_static_getters.At(i);
             if (function.accessor_field() == field.ptr()) {
               AddFunction(function);
+              entry_point_functions_.Insert(function);
             }
           }
         }
@@ -2397,6 +2416,139 @@
   libraries_ = retained_libraries.ptr();
 }
 
+// Traverse program structure and mark Code objects
+// which do not have useful information as discarded.
+void Precompiler::DiscardCodeObjects() {
+  class DiscardCodeVisitor : public CodeVisitor {
+   public:
+    DiscardCodeVisitor(Zone* zone,
+                       const FunctionSet& functions_to_retain,
+                       const FunctionSet& entry_point_functions,
+                       const FunctionSet& functions_called_dynamically)
+        : zone_(zone),
+          function_(Function::Handle(zone)),
+          functions_to_retain_(functions_to_retain),
+          entry_point_functions_(entry_point_functions),
+          functions_called_dynamically_(functions_called_dynamically) {}
+
+    void VisitCode(const Code& code) override {
+      ++total_code_objects_;
+
+      // Only discard Code objects corresponding to Dart functions.
+      if (!code.IsFunctionCode()) {
+        ++non_function_codes_;
+        return;
+      }
+
+      // Retain Code object if it has exception handlers or PC descriptors.
+      if (code.exception_handlers() !=
+          Object::empty_exception_handlers().ptr()) {
+        ++codes_with_exception_handlers_;
+        return;
+      }
+      if (code.pc_descriptors() != Object::empty_descriptors().ptr()) {
+        ++codes_with_pc_descriptors_;
+        return;
+      }
+
+      function_ = code.function();
+      if (functions_to_retain_.ContainsKey(function_)) {
+        // Retain Code objects corresponding to:
+        // * invisible functions (to filter them from stack traces);
+        // * async/async* closures (to construct async stacks).
+        // * native functions (to find native implementation).
+        if (!function_.is_visible()) {
+          ++codes_with_invisible_function_;
+          return;
+        }
+        if (function_.is_native()) {
+          ++codes_with_native_function_;
+          return;
+        }
+        if (function_.IsAsyncClosure() || function_.IsAsyncGenClosure()) {
+          ++codes_with_async_closure_function_;
+          return;
+        }
+
+        // Retain Code objects for entry points.
+        if (entry_point_functions_.ContainsKey(function_)) {
+          ++codes_with_entry_point_function_;
+          return;
+        }
+
+        // Retain Code objects corresponding to dynamically
+        // called functions.
+        if (functions_called_dynamically_.ContainsKey(function_)) {
+          ++codes_with_dynamically_called_function_;
+          return;
+        }
+      } else {
+        ASSERT(!entry_point_functions_.ContainsKey(function_));
+        ASSERT(!functions_called_dynamically_.ContainsKey(function_));
+      }
+
+      code.set_is_discarded(true);
+      ++discarded_codes_;
+    }
+
+    void PrintStatistics() const {
+      THR_Print("Discarding Code objects:\n");
+      THR_Print("    %8" Pd " non-function Codes\n", non_function_codes_);
+      THR_Print("    %8" Pd " Codes with exception handlers\n",
+                codes_with_exception_handlers_);
+      THR_Print("    %8" Pd " Codes with pc descriptors\n",
+                codes_with_pc_descriptors_);
+      THR_Print("    %8" Pd " Codes with invisible functions\n",
+                codes_with_invisible_function_);
+      THR_Print("    %8" Pd " Codes with native functions\n",
+                codes_with_native_function_);
+      THR_Print("    %8" Pd " Codes with async closure functions\n",
+                codes_with_async_closure_function_);
+      THR_Print("    %8" Pd " Codes with dynamically called functions\n",
+                codes_with_dynamically_called_function_);
+      THR_Print("    %8" Pd " Codes with entry point functions\n",
+                codes_with_entry_point_function_);
+      THR_Print("    %8" Pd " Codes discarded\n", discarded_codes_);
+      THR_Print("    %8" Pd " Codes total\n", total_code_objects_);
+    }
+
+   private:
+    Zone* zone_;
+    Function& function_;
+    const FunctionSet& functions_to_retain_;
+    const FunctionSet& entry_point_functions_;
+    const FunctionSet& functions_called_dynamically_;
+
+    // Statistics
+    intptr_t total_code_objects_ = 0;
+    intptr_t non_function_codes_ = 0;
+    intptr_t codes_with_exception_handlers_ = 0;
+    intptr_t codes_with_pc_descriptors_ = 0;
+    intptr_t codes_with_invisible_function_ = 0;
+    intptr_t codes_with_native_function_ = 0;
+    intptr_t codes_with_async_closure_function_ = 0;
+    intptr_t codes_with_dynamically_called_function_ = 0;
+    intptr_t codes_with_entry_point_function_ = 0;
+    intptr_t discarded_codes_ = 0;
+  };
+
+  // Code objects are stored in stack frames if not use_bare_instructions.
+  // Code objects are used by stack traces if not dwarf_stack_traces.
+  // Code objects are used by profiler in non-PRODUCT mode.
+  if (!FLAG_use_bare_instructions || !FLAG_dwarf_stack_traces_mode ||
+      FLAG_retain_code_objects) {
+    return;
+  }
+
+  DiscardCodeVisitor visitor(Z, functions_to_retain_, entry_point_functions_,
+                             functions_called_dynamically_);
+  ProgramVisitor::WalkProgram(Z, IG, &visitor);
+
+  if (FLAG_trace_precompiler) {
+    visitor.PrintStatistics();
+  }
+}
+
 // Traits for the HashTable template.
 struct CodeKeyTraits {
   static uint32_t Hash(const Object& key) { return Code::Cast(key).Size(); }
diff --git a/runtime/vm/compiler/aot/precompiler.h b/runtime/vm/compiler/aot/precompiler.h
index 42406c4..4544053 100644
--- a/runtime/vm/compiler/aot/precompiler.h
+++ b/runtime/vm/compiler/aot/precompiler.h
@@ -333,6 +333,7 @@
   void DropLibraryEntries();
   void DropClasses();
   void DropLibraries();
+  void DiscardCodeObjects();
 
   DEBUG_ONLY(FunctionPtr FindUnvisitedRetainedFunction());
 
@@ -376,6 +377,8 @@
   GrowableObjectArray& libraries_;
   const GrowableObjectArray& pending_functions_;
   SymbolSet sent_selectors_;
+  FunctionSet entry_point_functions_;
+  FunctionSet functions_called_dynamically_;
   FunctionSet seen_functions_;
   FunctionSet possibly_retained_functions_;
   FieldSet fields_to_retain_;
diff --git a/runtime/vm/flag_list.h b/runtime/vm/flag_list.h
index f6c90bf..c118e0f 100644
--- a/runtime/vm/flag_list.h
+++ b/runtime/vm/flag_list.h
@@ -194,6 +194,9 @@
   P(retain_function_objects, bool, true,                                       \
     "Serialize function objects for all code objects even if not otherwise "   \
     "needed in the precompiled runtime.")                                      \
+  P(retain_code_objects, bool, true,                                           \
+    "Serialize all code objects even if not otherwise "                        \
+    "needed in the precompiled runtime.")                                      \
   P(enable_isolate_groups, bool, false,                                        \
     "Enable isolate group support in AOT.")                                    \
   P(experimental_enable_isolate_groups_jit, bool, false,                       \
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 1d8c7ac..007ceb4 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -16250,6 +16250,10 @@
   set_state_bits(AliveBit::update(value, untag()->state_bits_));
 }
 
+void Code::set_is_discarded(bool value) const {
+  set_state_bits(DiscardedBit::update(value, untag()->state_bits_));
+}
+
 void Code::set_compressed_stackmaps(const CompressedStackMaps& maps) const {
   ASSERT(maps.IsOld());
   untag()->set_compressed_stackmaps(maps.ptr());
@@ -17079,41 +17083,6 @@
   reader.DumpSourcePositions(relative_addresses ? 0 : PayloadStart());
 }
 
-bool Code::CanBeOmittedFromAOTSnapshot() const {
-  NoSafepointScope no_safepoint;
-
-  // Code objects are stored in stack frames if not use_bare_instructions.
-  // Code objects are used by stack traces if not dwarf_stack_traces.
-  if (!FLAG_precompiled_mode || !FLAG_use_bare_instructions ||
-      !FLAG_dwarf_stack_traces_mode) {
-    return false;
-  }
-  // Only omit Code objects corresponding to Dart functions.
-  if (!IsFunctionCode()) {
-    return false;
-  }
-  // Retain Code object if it has exception handlers or PC descriptors.
-  if ((exception_handlers() != Object::empty_exception_handlers().ptr()) ||
-      (pc_descriptors() != Object::empty_descriptors().ptr())) {
-    return false;
-  }
-  if (!owner()->IsHeapObject()) {
-    // Can drop Code if precompiler dropped the Function and only left Smi
-    // classId.
-    return true;
-  }
-  // Retain Code objects corresponding to:
-  // * invisible functions (to filter them from stack traces);
-  // * async/async* closures (to construct async stacks).
-  // * native functions (to find native implementation).
-  const auto& func = Function::Handle(function());
-  if (!func.is_visible() || func.is_native() || func.IsAsyncClosure() ||
-      func.IsAsyncGenClosure()) {
-    return false;
-  }
-  return true;
-}
-
 intptr_t Context::GetLevel() const {
   intptr_t level = 0;
   Context& parent_ctx = Context::Handle(parent());
@@ -25090,6 +25059,7 @@
   // debugging options like the observatory available.
   if (value) {
     FLAG_retain_function_objects = false;
+    FLAG_retain_code_objects = false;
   }
 #endif
 }
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 8b81dc7..17187ee 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -5956,6 +5956,12 @@
   bool is_alive() const { return AliveBit::decode(untag()->state_bits_); }
   void set_is_alive(bool value) const;
 
+  bool is_discarded() const { return IsDiscarded(ptr()); }
+  static bool IsDiscarded(const CodePtr code) {
+    return DiscardedBit::decode(code->untag()->state_bits_);
+  }
+  void set_is_discarded(bool value) const;
+
   bool HasMonomorphicEntry() const { return HasMonomorphicEntry(ptr()); }
   static bool HasMonomorphicEntry(const CodePtr code) {
 #if defined(DART_PRECOMPILED_RUNTIME)
@@ -6378,11 +6384,6 @@
     untag()->set_object_pool(object_pool);
   }
 
-  // Returns true if given Code object can be omitted from
-  // the AOT snapshot (when corresponding instructions are
-  // included).
-  bool CanBeOmittedFromAOTSnapshot() const;
-
  private:
   void set_state_bits(intptr_t bits) const;
 
@@ -6392,8 +6393,9 @@
     kOptimizedBit = 0,
     kForceOptimizedBit = 1,
     kAliveBit = 2,
-    kPtrOffBit = 3,
-    kPtrOffSize = 29,
+    kDiscardedBit = 3,
+    kPtrOffBit = 4,
+    kPtrOffSize = kBitsPerInt32 - kPtrOffBit,
   };
 
   class OptimizedBit : public BitField<int32_t, bool, kOptimizedBit, 1> {};
@@ -6404,6 +6406,13 @@
       : public BitField<int32_t, bool, kForceOptimizedBit, 1> {};
 
   class AliveBit : public BitField<int32_t, bool, kAliveBit, 1> {};
+
+  // Set by precompiler if this Code object doesn't contain
+  // useful information besides instructions and compressed stack map.
+  // Such object is serialized in a shorter form. (In future such
+  // Code objects will not be re-created during snapshot deserialization.)
+  class DiscardedBit : public BitField<int32_t, bool, kDiscardedBit, 1> {};
+
   class PtrOffBits
       : public BitField<int32_t, intptr_t, kPtrOffBit, kPtrOffSize> {};
 
diff --git a/runtime/vm/stack_frame.cc b/runtime/vm/stack_frame.cc
index 037e70d..f5df240 100644
--- a/runtime/vm/stack_frame.cc
+++ b/runtime/vm/stack_frame.cc
@@ -359,7 +359,7 @@
       // behavior of ReversePc::Lookup which will return
       // StubCode::UnknownDartCode() if code object is omitted from
       // the snapshot.
-      if (FLAG_dwarf_stack_traces_mode && code.CanBeOmittedFromAOTSnapshot()) {
+      if (code.is_discarded()) {
         ASSERT(StubCode::UnknownDartCode().PayloadStart() == 0);
         ASSERT(StubCode::UnknownDartCode().Size() == kUwordMax);
         ASSERT(StubCode::UnknownDartCode().IsFunctionCode());
diff --git a/tools/VERSION b/tools/VERSION
index 06f4903..fa72651 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 2
 MINOR 13
 PATCH 0
-PRERELEASE 87
+PRERELEASE 88
 PRERELEASE_PATCH 0
\ No newline at end of file