Commit ef1640b8 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Remove flag always_promote_young_mc

This flag is now enabled by default for quite some time in production.
In addition that flag was already defined readonly and couldn't be
disabled, so let's remove this flag for good.

Bug: v8:10064
Change-Id: I0e71eee9d25960a96324d56c8f0191fe678dc6e6
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3268907
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77806}
parent 6366f334
...@@ -578,8 +578,6 @@ DEFINE_NEG_NEG_IMPLICATION(allocation_site_tracking, ...@@ -578,8 +578,6 @@ DEFINE_NEG_NEG_IMPLICATION(allocation_site_tracking,
DEFINE_BOOL(allocation_site_pretenuring, true, DEFINE_BOOL(allocation_site_pretenuring, true,
"pretenure with allocation sites") "pretenure with allocation sites")
DEFINE_BOOL(page_promotion, true, "promote pages based on utilization") DEFINE_BOOL(page_promotion, true, "promote pages based on utilization")
DEFINE_BOOL_READONLY(always_promote_young_mc, true,
"always promote young objects during mark-compact")
DEFINE_INT(page_promotion_threshold, 70, DEFINE_INT(page_promotion_threshold, 70,
"min percentage of live bytes on a page to enable fast evacuation") "min percentage of live bytes on a page to enable fast evacuation")
DEFINE_BOOL(trace_pretenuring, false, DEFINE_BOOL(trace_pretenuring, false,
......
This diff is collapsed.
...@@ -184,6 +184,7 @@ class LiveObjectVisitor : AllStatic { ...@@ -184,6 +184,7 @@ class LiveObjectVisitor : AllStatic {
static void RecomputeLiveBytes(MemoryChunk* chunk, MarkingState* state); static void RecomputeLiveBytes(MemoryChunk* chunk, MarkingState* state);
}; };
enum class AlwaysPromoteYoung { kYes, kNo };
enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD }; enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD };
enum MarkingTreatmentMode { KEEP, CLEAR }; enum MarkingTreatmentMode { KEEP, CLEAR };
enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY }; enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
...@@ -215,8 +216,6 @@ class MarkCompactCollectorBase { ...@@ -215,8 +216,6 @@ class MarkCompactCollectorBase {
virtual void Evacuate() = 0; virtual void Evacuate() = 0;
virtual void EvacuatePagesInParallel() = 0; virtual void EvacuatePagesInParallel() = 0;
virtual void UpdatePointersAfterEvacuation() = 0; virtual void UpdatePointersAfterEvacuation() = 0;
virtual std::unique_ptr<UpdatingItem> CreateToSpaceUpdatingItem(
MemoryChunk* chunk, Address start, Address end) = 0;
virtual std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem( virtual std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) = 0; MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) = 0;
...@@ -228,10 +227,9 @@ class MarkCompactCollectorBase { ...@@ -228,10 +227,9 @@ class MarkCompactCollectorBase {
MigrationObserver* migration_observer); MigrationObserver* migration_observer);
// Returns whether this page should be moved according to heuristics. // Returns whether this page should be moved according to heuristics.
bool ShouldMovePage(Page* p, intptr_t live_bytes, bool promote_young); bool ShouldMovePage(Page* p, intptr_t live_bytes,
AlwaysPromoteYoung promote_young);
int CollectToSpaceUpdatingItems(
std::vector<std::unique_ptr<UpdatingItem>>* items);
template <typename IterateableSpace> template <typename IterateableSpace>
int CollectRememberedSetUpdatingItems( int CollectRememberedSetUpdatingItems(
std::vector<std::unique_ptr<UpdatingItem>>* items, std::vector<std::unique_ptr<UpdatingItem>>* items,
...@@ -712,9 +710,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -712,9 +710,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
void EvacuatePagesInParallel() override; void EvacuatePagesInParallel() override;
void UpdatePointersAfterEvacuation() override; void UpdatePointersAfterEvacuation() override;
std::unique_ptr<UpdatingItem> CreateToSpaceUpdatingItem(MemoryChunk* chunk,
Address start,
Address end) override;
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem( std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override; MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
...@@ -870,10 +865,13 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -870,10 +865,13 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
std::unique_ptr<UpdatingItem> CreateToSpaceUpdatingItem(MemoryChunk* chunk, std::unique_ptr<UpdatingItem> CreateToSpaceUpdatingItem(MemoryChunk* chunk,
Address start, Address start,
Address end) override; Address end);
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem( std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override; MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
int CollectToSpaceUpdatingItems(
std::vector<std::unique_ptr<UpdatingItem>>* items);
void SweepArrayBufferExtensions(); void SweepArrayBufferExtensions();
MarkingWorklist* worklist_; MarkingWorklist* worklist_;
......
...@@ -390,7 +390,7 @@ void MemoryChunk::InvalidateRecordedSlots(HeapObject object) { ...@@ -390,7 +390,7 @@ void MemoryChunk::InvalidateRecordedSlots(HeapObject object) {
RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object); RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object);
} }
if (!FLAG_always_promote_young_mc || slot_set_[OLD_TO_NEW] != nullptr) if (slot_set_[OLD_TO_NEW] != nullptr)
RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object); RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object);
} }
......
...@@ -362,7 +362,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) { ...@@ -362,7 +362,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
} }
HEAP_TEST(CompactionPartiallyAbortedPageWithRememberedSetEntries) { HEAP_TEST(CompactionPartiallyAbortedPageWithRememberedSetEntries) {
if (FLAG_never_compact || FLAG_always_promote_young_mc) return; if (FLAG_never_compact) return;
// Test the scenario where we reach OOM during compaction and parts of the // Test the scenario where we reach OOM during compaction and parts of the
// page have already been migrated to a new one. Objects on the aborted page // page have already been migrated to a new one. Objects on the aborted page
// are linked together and the very first object on the aborted page points // are linked together and the very first object on the aborted page points
......
...@@ -6685,8 +6685,7 @@ HEAP_TEST(Regress779503) { ...@@ -6685,8 +6685,7 @@ HEAP_TEST(Regress779503) {
// currently scavenging. // currently scavenging.
heap->delay_sweeper_tasks_for_testing_ = true; heap->delay_sweeper_tasks_for_testing_ = true;
CcTest::CollectGarbage(OLD_SPACE); CcTest::CollectGarbage(OLD_SPACE);
CHECK(FLAG_always_promote_young_mc ? !Heap::InYoungGeneration(*byte_array) CHECK(!Heap::InYoungGeneration(*byte_array));
: Heap::InYoungGeneration(*byte_array));
} }
// Scavenging and sweeping the same page will crash as slots will be // Scavenging and sweeping the same page will crash as slots will be
// overridden. // overridden.
......
...@@ -73,33 +73,6 @@ TEST(Promotion) { ...@@ -73,33 +73,6 @@ TEST(Promotion) {
} }
} }
HEAP_TEST(NoPromotion) {
if (FLAG_always_promote_young_mc) return;
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
// Page promotion allows pages to be moved to old space even in the case of
// OOM scenarios.
FLAG_page_promotion = false;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
{
v8::HandleScope sc(CcTest::isolate());
Heap* heap = isolate->heap();
heap::SealCurrentObjects(heap);
int array_length = heap::FixedArrayLenFromSize(kMaxRegularHeapObjectSize);
Handle<FixedArray> array = isolate->factory()->NewFixedArray(array_length);
heap->set_force_oom(true);
// Array should be in the new space.
CHECK(heap->InSpace(*array, NEW_SPACE));
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
CHECK(heap->InSpace(*array, NEW_SPACE));
}
}
// This is the same as Factory::NewMap, except it doesn't retry on // This is the same as Factory::NewMap, except it doesn't retry on
// allocation failure. // allocation failure.
AllocationResult HeapTester::AllocateMapForTest(Isolate* isolate) { AllocationResult HeapTester::AllocateMapForTest(Isolate* isolate) {
......
...@@ -100,72 +100,6 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) { ...@@ -100,72 +100,6 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) {
isolate->Dispose(); isolate->Dispose();
} }
UNINITIALIZED_TEST(PagePromotion_NewToNew) {
if (!i::FLAG_page_promotion || FLAG_always_promote_young_mc) return;
v8::Isolate* isolate = NewIsolateForPagePromotion();
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
v8::Context::New(isolate)->Enter();
Heap* heap = i_isolate->heap();
std::vector<Handle<FixedArray>> handles;
heap::SimulateFullSpace(heap->new_space(), &handles);
CHECK_GT(handles.size(), 0u);
// Last object in handles should definitely be on a page that does not
// contain the age mark, thus qualifying for moving.
Handle<FixedArray> last_object = handles.back();
Page* to_be_promoted_page = Page::FromHeapObject(*last_object);
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
CHECK(to_be_promoted_page->Contains(last_object->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address()));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address()));
CHECK(to_be_promoted_page->Contains(last_object->address()));
}
isolate->Dispose();
}
UNINITIALIZED_HEAP_TEST(Regress658718) {
if (!i::FLAG_page_promotion || FLAG_always_promote_young_mc) return;
v8::Isolate* isolate = NewIsolateForPagePromotion(4, 8);
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
v8::Context::New(isolate)->Enter();
Heap* heap = i_isolate->heap();
heap->delay_sweeper_tasks_for_testing_ = true;
GrowNewSpace(heap);
{
v8::HandleScope inner_handle_scope(isolate);
std::vector<Handle<FixedArray>> handles;
heap::SimulateFullSpace(heap->new_space(), &handles);
CHECK_GT(handles.size(), 0u);
// Last object in handles should definitely be on a page that does not
// contain the age mark, thus qualifying for moving.
Handle<FixedArray> last_object = handles.back();
Page* to_be_promoted_page = Page::FromHeapObject(*last_object);
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
CHECK(to_be_promoted_page->Contains(last_object->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address()));
heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting);
CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address()));
CHECK(to_be_promoted_page->Contains(last_object->address()));
}
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
heap->new_space()->Shrink();
heap->memory_allocator()->unmapper()->EnsureUnmappingCompleted();
heap->delay_sweeper_tasks_for_testing_ = false;
heap->mark_compact_collector()->sweeper()->StartSweeperTasks();
heap->mark_compact_collector()->EnsureSweepingCompleted();
}
isolate->Dispose();
}
#endif // V8_LITE_MODE #endif // V8_LITE_MODE
} // namespace heap } // namespace heap
......
...@@ -40,8 +40,7 @@ TEST(WeakReferencesBasic) { ...@@ -40,8 +40,7 @@ TEST(WeakReferencesBasic) {
MaybeObject code_object = lh->data1(); MaybeObject code_object = lh->data1();
CHECK(code_object->IsSmi()); CHECK(code_object->IsSmi());
CcTest::CollectAllGarbage(); CcTest::CollectAllGarbage();
CHECK(FLAG_always_promote_young_mc ? !Heap::InYoungGeneration(*lh) CHECK(!Heap::InYoungGeneration(*lh));
: Heap::InYoungGeneration(*lh));
CHECK_EQ(code_object, lh->data1()); CHECK_EQ(code_object, lh->data1());
{ {
......
...@@ -166,9 +166,7 @@ TEST(WeakMapPromotionMarkCompact) { ...@@ -166,9 +166,7 @@ TEST(WeakMapPromotionMarkCompact) {
CcTest::CollectAllGarbage(); CcTest::CollectAllGarbage();
CHECK(FLAG_always_promote_young_mc CHECK(!ObjectInYoungGeneration(weakmap->table()));
? !ObjectInYoungGeneration(weakmap->table())
: ObjectInYoungGeneration(weakmap->table()));
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
Handle<JSObject> object = factory->NewJSObjectFromMap(map); Handle<JSObject> object = factory->NewJSObjectFromMap(map);
...@@ -180,8 +178,7 @@ TEST(WeakMapPromotionMarkCompact) { ...@@ -180,8 +178,7 @@ TEST(WeakMapPromotionMarkCompact) {
EphemeronHashTable::cast(weakmap->table()), *object)); EphemeronHashTable::cast(weakmap->table()), *object));
CcTest::CollectAllGarbage(); CcTest::CollectAllGarbage();
CHECK(FLAG_always_promote_young_mc ? !ObjectInYoungGeneration(*object) CHECK(!ObjectInYoungGeneration(*object));
: ObjectInYoungGeneration(*object));
CHECK(!ObjectInYoungGeneration(weakmap->table())); CHECK(!ObjectInYoungGeneration(weakmap->table()));
CHECK(EphemeronHashTableContainsKey( CHECK(EphemeronHashTableContainsKey(
EphemeronHashTable::cast(weakmap->table()), *object)); EphemeronHashTable::cast(weakmap->table()), *object));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment