Commit bdd9eba0 authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

[heap] Update unittests for MinorMC

This CL includes:
1) Using 1 full GC instead of 2 young GCs to force object promotion.
2) A couple of needed bailouts.
3) Using manual evacuation candidates in old space to ensure an object
   is evacuted (moved to a different address)  instead of relying on
   Scavenger.
And some other minor tweaks

Bug: v8:12612
Change-Id: Idfd925ccdf30215998ab6e7cc632ce750fa2077a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3816661Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82327}
parent 6de5fb85
......@@ -93,9 +93,7 @@ TEST(ArrayBuffer_OnlyScavenge) {
CHECK(IsTrackedYoung(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedYoung(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedOld(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTrackedOld(heap, extension));
}
heap::GcAndSweep(heap, OLD_SPACE);
......@@ -121,13 +119,13 @@ TEST(ArrayBuffer_ScavengeAndMC) {
CHECK(IsTrackedYoung(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedYoung(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedOld(heap, extension));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTrackedOld(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedOld(heap, extension));
}
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedOld(heap, extension));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(heap, extension));
}
......@@ -147,8 +145,7 @@ TEST(ArrayBuffer_Compaction) {
Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
CHECK(IsTracked(heap, *buf1));
heap::GcAndSweep(heap, NEW_SPACE);
heap::GcAndSweep(heap, NEW_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
Page* page_before_gc = Page::FromHeapObject(*buf1);
heap::ForceEvacuationCandidate(page_before_gc);
......
......@@ -154,9 +154,8 @@ TEST(ExternalString_ExternalBackingStoreSizeIncreasesAfterExternalization) {
CHECK_EQ(0, heap->new_space()->ExternalBackingStoreBytes(type) -
new_backing_store_before);
// Trigger GCs so that the newly allocated string moves to old gen.
heap::GcAndSweep(heap, NEW_SPACE); // in survivor space now
heap::GcAndSweep(heap, NEW_SPACE); // in old gen now
// Trigger full GC so that the newly allocated string moves to old gen.
heap::GcAndSweep(heap, OLD_SPACE);
bool success =
str->MakeExternal(new TestOneByteResource(i::StrDup(TEST_STR)));
......
......@@ -2034,6 +2034,11 @@ TEST(TestAlignedAllocation) {
HeapObject obj;
HeapObject filler;
if (double_misalignment) {
if (FLAG_minor_mc) {
// Make one allocation to force allocating an allocation area. Using
// kDoubleSize to not change space alignment
USE(CcTest::heap()->new_space()->AllocateRawUnaligned(kDoubleSize));
}
// Allocate a pointer sized object that must be double aligned at an
// aligned address.
start = AlignNewSpace(kDoubleAligned, 0);
......@@ -4458,8 +4463,7 @@ TEST(NewSpaceObjectsInOptimizedCode) {
.ToLocalChecked())));
CHECK(Heap::InYoungGeneration(*foo));
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(OLD_SPACE);
CHECK(!Heap::InYoungGeneration(*foo));
#ifdef VERIFY_HEAP
CcTest::heap()->Verify();
......@@ -5463,8 +5467,7 @@ TEST(NewSpaceAllocationCounter) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
size_t counter1 = heap->NewSpaceAllocationCounter();
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(NEW_SPACE); // Ensure new space is empty.
CcTest::CollectGarbage(OLD_SPACE); // Ensure new space is empty.
const size_t kSize = 1024;
AllocateInSpace(isolate, kSize, NEW_SPACE);
size_t counter2 = heap->NewSpaceAllocationCounter();
......@@ -6464,7 +6467,18 @@ TEST(RememberedSet_InsertOnPromotingObjectToOld) {
// Create a young object and age it one generation inside the new space.
Handle<FixedArray> arr = factory->NewFixedArray(1);
CcTest::CollectGarbage(i::NEW_SPACE);
std::vector<Handle<FixedArray>> handles;
if (FLAG_minor_mc) {
NewSpace* new_space = heap->new_space();
CHECK(!new_space->IsAtMaximumCapacity());
// Fill current pages to force MinorMC to promote them.
SimulateFullSpace(new_space, &handles);
SafepointScope scope(heap);
// New empty pages should remain in new space.
new_space->Grow();
} else {
CcTest::CollectGarbage(i::NEW_SPACE);
}
CHECK(Heap::InYoungGeneration(*arr));
// Add into 'arr' a reference to an object one generation younger.
......@@ -6516,7 +6530,7 @@ TEST(RememberedSet_RemoveStaleOnScavenge) {
// Run GC to promote the remaining young object and fixup the stale entries in
// the remembered set.
CcTest::CollectGarbage(i::NEW_SPACE);
CcTest::CollectGarbage(i::OLD_SPACE);
CHECK_EQ(0, GetRememberedSetSize<OLD_TO_NEW>(*tail));
}
......@@ -7257,9 +7271,9 @@ TEST(Regress978156) {
// 1. Ensure that the new space is empty.
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(NEW_SPACE);
// 2. Fill the first page of the new space with FixedArrays.
// 2. Fill the new space with FixedArrays.
std::vector<Handle<FixedArray>> arrays;
i::heap::FillCurrentPage(heap->new_space(), &arrays);
i::heap::SimulateFullSpace(heap->new_space(), &arrays);
// 3. Trim the last array by one word thus creating a one-word filler.
Handle<FixedArray> last = arrays.back();
CHECK_GT(last->length(), 0);
......
......@@ -69,12 +69,10 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) {
// Ensure that the new space is empty so that the page to be promoted
// does not contain the age mark.
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting);
std::vector<Handle<FixedArray>> handles;
heap::SimulateFullSpace(heap->new_space(), &handles);
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
CHECK_GT(handles.size(), 0u);
Page* const to_be_promoted_page = FindLastPageInNewSpace(handles);
CHECK_NOT_NULL(to_be_promoted_page);
......
......@@ -267,8 +267,7 @@ TEST(ObjectWithWeakReferencePromoted) {
CHECK(Heap::InYoungGeneration(*fixed_array));
lh->set_data1(HeapObjectReference::Weak(*fixed_array));
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(OLD_SPACE);
CHECK(heap->InOldSpace(*lh));
CHECK(heap->InOldSpace(*fixed_array));
......@@ -290,11 +289,7 @@ TEST(ObjectWithClearedWeakReferencePromoted) {
lh->set_data1(HeapObjectReference::ClearedValue(isolate));
CcTest::CollectGarbage(NEW_SPACE);
CHECK(Heap::InYoungGeneration(*lh));
CHECK(lh->data1()->IsCleared());
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(OLD_SPACE);
CHECK(heap->InOldSpace(*lh));
CHECK(lh->data1()->IsCleared());
......
......@@ -680,9 +680,8 @@ TEST(MakingExternalStringConditions) {
if (!v8::internal::FLAG_single_generation) {
// We should refuse to externalize new space strings.
CHECK(!local_string->CanMakeExternal());
// Trigger GCs so that the newly allocated string moves to old gen.
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
// Trigger full GC so that the newly allocated string moves to old gen.
CcTest::CollectGarbage(i::OLD_SPACE);
}
// Old space strings should be accepted.
CHECK(local_string->CanMakeExternal());
......@@ -714,9 +713,8 @@ TEST(MakingExternalOneByteStringConditions) {
if (!v8::internal::FLAG_single_generation) {
// We should refuse to externalize new space strings.
CHECK(!local_string->CanMakeExternal());
// Trigger GCs so that the newly allocated string moves to old gen.
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
// Trigger full GC so that the newly allocated string moves to old gen.
CcTest::CollectGarbage(i::OLD_SPACE);
CHECK(!tiny_local_string->CanMakeExternal());
}
// Old space strings should be accepted.
......@@ -73,6 +73,13 @@ TEST(Regress340063) {
TEST(Regress470390) {
#ifdef VERIFY_HEAP
// With MinorMC, we may have object allocated after `new_space->top()`. If the
// next object after `new_space->top()` is an invalid memento, heap
// verification should fail.
if (FLAG_minor_mc) return;
#endif // VERIFY_HEAP
CcTest::InitializeVM();
if (!i::FLAG_allocation_site_pretenuring || FLAG_single_generation) return;
v8::HandleScope scope(CcTest::isolate());
......
......@@ -9,6 +9,7 @@
#include "src/heap/parked-scope.h"
#include "src/objects/objects-inl.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-utils.h"
namespace v8 {
namespace internal {
......@@ -674,6 +675,7 @@ UNINITIALIZED_TEST(PromotionMarkCompact) {
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
FLAG_shared_string_table = true;
FLAG_manual_evacuation_candidates_selection = true;
MultiClientIsolateTest test;
IsolateWrapper isolate_wrapper(test.NewClientIsolate());
......@@ -697,10 +699,13 @@ UNINITIALIZED_TEST(PromotionMarkCompact) {
CHECK(String::IsInPlaceInternalizable(*one_byte_seq));
CHECK(heap->InSpace(*one_byte_seq, NEW_SPACE));
for (int i = 0; i < 2; i++) {
heap->CollectAllGarbage(Heap::kNoGCFlags,
GarbageCollectionReason::kTesting);
}
// 1st GC moves `one_byte_seq` to old space and 2nd GC evacuates it within
// old space.
heap->CollectAllGarbage(Heap::kNoGCFlags,
GarbageCollectionReason::kTesting);
heap::ForceEvacuationCandidate(i::Page::FromHeapObject(*one_byte_seq));
heap->CollectAllGarbage(Heap::kNoGCFlags,
GarbageCollectionReason::kTesting);
// In-place-internalizable strings are promoted into the shared heap when
// sharing.
......@@ -710,6 +715,7 @@ UNINITIALIZED_TEST(PromotionMarkCompact) {
}
UNINITIALIZED_TEST(PromotionScavenge) {
if (FLAG_minor_mc) return;
if (FLAG_single_generation) return;
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return;
......
......@@ -210,13 +210,15 @@ TEST(WeakMapScavenge) {
CHECK(EphemeronHashTableContainsKey(
EphemeronHashTable::cast(weakmap->table()), *object));
heap::GcAndSweep(isolate->heap(), NEW_SPACE);
CHECK(ObjectInYoungGeneration(*object));
CHECK(!ObjectInYoungGeneration(weakmap->table()));
CHECK(EphemeronHashTableContainsKey(
EphemeronHashTable::cast(weakmap->table()), *object));
if (!FLAG_minor_mc) {
heap::GcAndSweep(isolate->heap(), NEW_SPACE);
CHECK(ObjectInYoungGeneration(*object));
CHECK(!ObjectInYoungGeneration(weakmap->table()));
CHECK(EphemeronHashTableContainsKey(
EphemeronHashTable::cast(weakmap->table()), *object));
}
heap::GcAndSweep(isolate->heap(), NEW_SPACE);
heap::GcAndSweep(isolate->heap(), OLD_SPACE);
CHECK(!ObjectInYoungGeneration(*object));
CHECK(!ObjectInYoungGeneration(weakmap->table()));
CHECK(EphemeronHashTableContainsKey(
......
......@@ -961,8 +961,7 @@ V8_NOINLINE void StackToHeapTest(v8::Isolate* v8_isolate,
EXPECT_TRUE(InCorrectGeneration(*v8::Utils::OpenHandle(*to_object)));
if (!FLAG_single_generation &&
target_handling == TargetHandling::kInitializedOldGen) {
YoungGC(v8_isolate);
YoungGC(v8_isolate);
FullGC(v8_isolate);
EXPECT_FALSE(
i::Heap::InYoungGeneration(*v8::Utils::OpenHandle(*to_object)));
}
......@@ -1003,8 +1002,7 @@ V8_NOINLINE void HeapToStackTest(v8::Isolate* v8_isolate,
EXPECT_TRUE(InCorrectGeneration(*v8::Utils::OpenHandle(*to_object)));
if (!FLAG_single_generation &&
target_handling == TargetHandling::kInitializedOldGen) {
YoungGC(v8_isolate);
YoungGC(v8_isolate);
FullGC(v8_isolate);
EXPECT_FALSE(
i::Heap::InYoungGeneration(*v8::Utils::OpenHandle(*to_object)));
}
......@@ -1044,8 +1042,7 @@ V8_NOINLINE void StackToStackTest(v8::Isolate* v8_isolate,
EXPECT_TRUE(InCorrectGeneration(*v8::Utils::OpenHandle(*to_object)));
if (!FLAG_single_generation &&
target_handling == TargetHandling::kInitializedOldGen) {
YoungGC(v8_isolate);
YoungGC(v8_isolate);
FullGC(v8_isolate);
EXPECT_FALSE(
i::Heap::InYoungGeneration(*v8::Utils::OpenHandle(*to_object)));
}
......
......@@ -761,6 +761,7 @@ TEST_F(ScannerStreamsTest, TestOverlongAndInvalidSequences) {
TEST_F(ScannerStreamsTest, RelocatingCharacterStream) {
// This test relies on the invariant that the scavenger will move objects
if (i::FLAG_single_generation) return;
i::FLAG_manual_evacuation_candidates_selection = true;
v8::internal::ManualGCScope manual_gc_scope(i_isolate());
v8::HandleScope scope(isolate());
......@@ -783,7 +784,13 @@ TEST_F(ScannerStreamsTest, RelocatingCharacterStream) {
CHECK_EQ('b', two_byte_string_stream->Advance());
CHECK_EQ(size_t{2}, two_byte_string_stream->pos());
i::String raw = *two_byte_string;
i_isolate()->heap()->CollectGarbage(i::NEW_SPACE,
// 1st GC moves `two_byte_string` to old space and 2nd GC evacuates it within
// old space.
i_isolate()->heap()->CollectGarbage(i::OLD_SPACE,
i::GarbageCollectionReason::kUnknown);
i::Page::FromHeapObject(*two_byte_string)
->SetFlag(i::MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
i_isolate()->heap()->CollectGarbage(i::OLD_SPACE,
i::GarbageCollectionReason::kUnknown);
// GC moved the string.
CHECK_NE(raw, *two_byte_string);
......@@ -794,6 +801,7 @@ TEST_F(ScannerStreamsTest, RelocatingCharacterStream) {
TEST_F(ScannerStreamsTest, RelocatingUnbufferedCharacterStream) {
// This test relies on the invariant that the scavenger will move objects
if (i::FLAG_single_generation) return;
i::FLAG_manual_evacuation_candidates_selection = true;
v8::internal::ManualGCScope manual_gc_scope(i_isolate());
v8::HandleScope scope(isolate());
......@@ -819,7 +827,13 @@ TEST_F(ScannerStreamsTest, RelocatingUnbufferedCharacterStream) {
CHECK_EQ(size_t{3}, two_byte_string_stream->pos());
i::String raw = *two_byte_string;
i_isolate()->heap()->CollectGarbage(i::NEW_SPACE,
// 1st GC moves `two_byte_string` to old space and 2nd GC evacuates it within
// old space.
i_isolate()->heap()->CollectGarbage(i::OLD_SPACE,
i::GarbageCollectionReason::kUnknown);
i::Page::FromHeapObject(*two_byte_string)
->SetFlag(i::MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
i_isolate()->heap()->CollectGarbage(i::OLD_SPACE,
i::GarbageCollectionReason::kUnknown);
// GC moved the string and buffer was updated to the correct location.
CHECK_NE(raw, *two_byte_string);
......
......@@ -815,10 +815,10 @@ TEST_F(IdentityMapTest, GCShortCutting) {
t.map.Insert(internalized_string, &internalized_string);
// Do an explicit, real GC, this should short-cut the thin string to point
// to the internalized string.
// to the internalized string (this is not implemented for MinorMC).
isolate()->heap()->CollectGarbage(i::NEW_SPACE,
i::GarbageCollectionReason::kTesting);
DCHECK_IMPLIES(!FLAG_optimize_for_size,
DCHECK_IMPLIES(!FLAG_minor_mc && !FLAG_optimize_for_size,
*thin_string == *internalized_string);
// Check that getting the object points to one of the handles.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment