Commit 9d3d5f22 authored by Nikolaos Papaspyrou's avatar Nikolaos Papaspyrou Committed by V8 LUCI CQ

heap: Refactor sweeping finalization for young GC

Before a young GC, the scavenger finalizes a pending full GC sweeping,
in case there are no more running sweeping jobs, to avoid unnecessarily
pausing sweeping and then resuming it.

This CL moves this sweeping finalization from
ScavengerCollector::CollectGarbage to Heap::CompleteSweepingYoung, so
that it is also performed for the minor mark-compactor and that sweeping
is correctly attributed to the previous full GC cycle (instead of the
beginning young cycle). Furthermore, it also finalizes CppGC sweeping
if there are no more running sweeping jobs.

Bug: chromium:1154636
Change-Id: Ic9ba4803f49db32c0a539f080329f012859bc8bc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3508011Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarNico Hartmann <nicohartmann@chromium.org>
Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79407}
parent bc0c89b4
......@@ -516,7 +516,8 @@ Handle<Object> TranslatedValue::GetValue() {
// headers.
// TODO(hpayer): Find a cleaner way to support a group of
// non-fully-initialized objects.
isolate()->heap()->mark_compact_collector()->EnsureSweepingCompleted();
isolate()->heap()->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
// 2. Initialize the objects. If we have allocated only byte arrays
// for some objects, we now overwrite the byte arrays with the
......
......@@ -824,6 +824,8 @@ CppHeap::MetricRecorderAdapter* CppHeap::GetMetricRecorder() const {
void CppHeap::FinishSweepingIfRunning() { sweeper_.FinishIfRunning(); }
void CppHeap::FinishSweepingIfOutOfWork() { sweeper_.FinishIfOutOfWork(); }
std::unique_ptr<CppMarkingState> CppHeap::CreateCppMarkingState() {
DCHECK(IsMarking());
return std::make_unique<CppMarkingState>(
......
......@@ -118,6 +118,7 @@ class V8_EXPORT_PRIVATE CppHeap final
std::unique_ptr<CustomSpaceStatisticsReceiver>);
void FinishSweepingIfRunning();
void FinishSweepingIfOutOfWork();
void InitializeTracing(
cppgc::internal::GarbageCollector::Config::CollectionType,
......
......@@ -809,10 +809,25 @@ class Sweeper::SweeperImpl final {
NotifyDone();
}
void FinishIfOutOfWork() {
if (is_in_progress_ && !is_sweeping_on_mutator_thread_ &&
concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid() &&
!concurrent_sweeper_handle_->IsActive()) {
// At this point we know that the concurrent sweeping task has run
// out-of-work: all pages are swept. The main thread still needs to finish
// sweeping though.
DCHECK(std::all_of(space_states_.begin(), space_states_.end(),
[](const SpaceState& state) {
return state.unswept_pages.IsEmpty();
}));
FinishIfRunning();
}
}
void Finish() {
DCHECK(is_in_progress_);
MutatorThreadSweepingScope sweeping_in_progresss(*this);
MutatorThreadSweepingScope sweeping_in_progress(*this);
// First, call finalizers on the mutator thread.
SweepFinalizer finalizer(platform_, config_.free_memory_handling);
......@@ -1001,6 +1016,7 @@ void Sweeper::Start(SweepingConfig config) {
impl_->Start(config, heap_.platform());
}
void Sweeper::FinishIfRunning() { impl_->FinishIfRunning(); }
void Sweeper::FinishIfOutOfWork() { impl_->FinishIfOutOfWork(); }
void Sweeper::WaitForConcurrentSweepingForTesting() {
impl_->WaitForConcurrentSweepingForTesting();
}
......
......@@ -48,6 +48,7 @@ class V8_EXPORT_PRIVATE Sweeper final {
// Sweeper::Start assumes the heap holds no linear allocation buffers.
void Start(SweepingConfig);
void FinishIfRunning();
void FinishIfOutOfWork();
void NotifyDoneIfNeeded();
// SweepForAllocationIfRunning sweeps the given |space| until a slot that can
// fit an allocation of size |size| is found. Returns true if a slot was
......
......@@ -2017,11 +2017,9 @@ void Heap::StartIncrementalMarking(int gc_flags,
void Heap::CompleteSweepingFull() {
array_buffer_sweeper()->EnsureFinished();
mark_compact_collector()->EnsureSweepingCompleted();
mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kUnifiedHeap);
DCHECK(!mark_compact_collector()->sweeping_in_progress());
if (cpp_heap()) {
CppHeap::From(cpp_heap())->FinishSweepingIfRunning();
}
tracer()->StopCycleIfSweeping();
}
......@@ -2412,8 +2410,15 @@ void Heap::CompleteSweepingYoung(GarbageCollector collector) {
UNREACHABLE();
}
TRACE_GC_EPOCH(tracer(), scope_id, ThreadKind::kMain);
array_buffer_sweeper()->EnsureFinished();
{
TRACE_GC_EPOCH(tracer(), scope_id, ThreadKind::kMain);
array_buffer_sweeper()->EnsureFinished();
}
// If sweeping is in progress and there are no sweeper tasks running, finish
// the sweeping here, to avoid having to pause and resume during the young
// generation GC.
mark_compact_collector()->FinishSweepingIfOutOfWork();
}
void Heap::EnsureSweepingCompleted(HeapObject object) {
......@@ -3610,7 +3615,8 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim,
}
void Heap::MakeHeapIterable() {
mark_compact_collector()->EnsureSweepingCompleted();
mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
local_heap->MakeLinearAllocationAreaIterable();
......
......@@ -677,28 +677,51 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
#endif // VERIFY_HEAP
void MarkCompactCollector::EnsureSweepingCompleted() {
if (!sweeper()->sweeping_in_progress()) return;
void MarkCompactCollector::FinishSweepingIfOutOfWork() {
if (sweeper()->sweeping_in_progress() && FLAG_concurrent_sweeping &&
!sweeper()->AreSweeperTasksRunning()) {
// At this point we know that all concurrent sweeping tasks have run
// out of work and quit: all pages are swept. The main thread still needs
// to complete sweeping though.
EnsureSweepingCompleted(SweepingForcedFinalizationMode::kV8Only);
}
if (heap()->cpp_heap()) {
// Ensure that sweeping is also completed for the C++ managed heap, if one
// exists and it's out of work.
CppHeap::From(heap()->cpp_heap())->FinishSweepingIfOutOfWork();
}
}
void MarkCompactCollector::EnsureSweepingCompleted(
SweepingForcedFinalizationMode mode) {
if (sweeper()->sweeping_in_progress()) {
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_COMPLETE_SWEEPING,
ThreadKind::kMain);
sweeper()->EnsureCompleted();
heap()->old_space()->RefillFreeList();
heap()->code_space()->RefillFreeList();
if (heap()->map_space()) {
heap()->map_space()->RefillFreeList();
heap()->map_space()->SortFreeList();
}
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_COMPLETE_SWEEPING,
ThreadKind::kMain);
heap()->tracer()->NotifySweepingCompleted();
sweeper()->EnsureCompleted();
heap()->old_space()->RefillFreeList();
heap()->code_space()->RefillFreeList();
if (heap()->map_space()) {
heap()->map_space()->RefillFreeList();
heap()->map_space()->SortFreeList();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !evacuation()) {
FullEvacuationVerifier verifier(heap());
verifier.Run();
}
#endif
}
heap()->tracer()->NotifySweepingCompleted();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !evacuation()) {
FullEvacuationVerifier verifier(heap());
verifier.Run();
if (mode == SweepingForcedFinalizationMode::kUnifiedHeap &&
heap()->cpp_heap()) {
// Ensure that sweeping is also completed for the C++ managed heap, if one
// exists.
CppHeap::From(heap()->cpp_heap())->FinishSweepingIfRunning();
}
#endif
}
void MarkCompactCollector::EnsurePageIsSwept(Page* page) {
......
......@@ -530,10 +530,15 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
bool is_compacting() const { return compacting_; }
bool is_shared_heap() const { return is_shared_heap_; }
void FinishSweepingIfOutOfWork();
enum class SweepingForcedFinalizationMode { kUnifiedHeap, kV8Only };
// Ensures that sweeping is finished.
//
// Note: Can only be called safely from main thread.
V8_EXPORT_PRIVATE void EnsureSweepingCompleted();
V8_EXPORT_PRIVATE void EnsureSweepingCompleted(
SweepingForcedFinalizationMode mode);
void EnsurePageIsSwept(Page* page);
......
......@@ -295,16 +295,6 @@ void ScavengerCollector::CollectGarbage() {
{
Sweeper* sweeper = heap_->mark_compact_collector()->sweeper();
// Try to finish sweeping here, such that the following code doesn't need to
// pause & resume sweeping.
if (sweeper->sweeping_in_progress() && FLAG_concurrent_sweeping &&
!sweeper->AreSweeperTasksRunning()) {
// At this point we know that all concurrent sweeping tasks have run
// out-of-work and quit: all pages are swept. The main thread still needs
// to complete sweeping though.
heap_->mark_compact_collector()->EnsureSweepingCompleted();
}
// Pause the concurrent sweeper.
Sweeper::PauseOrCompleteScope pause_scope(sweeper);
// Filter out pages from the sweeper that need to be processed for old to
......
......@@ -36,7 +36,8 @@ Sweeper::PauseOrCompleteScope::PauseOrCompleteScope(Sweeper* sweeper)
// Complete sweeping if there's nothing more to do.
if (sweeper_->IsDoneSweeping()) {
sweeper_->heap_->mark_compact_collector()->EnsureSweepingCompleted();
sweeper_->heap_->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
DCHECK(!sweeper_->sweeping_in_progress());
} else {
// Unless sweeping is complete the flag still indicates that the sweeper
......
......@@ -31,7 +31,8 @@ void SealCurrentObjects(Heap* heap) {
CHECK(!FLAG_stress_concurrent_allocation);
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
heap->old_space()->FreeLinearAllocationArea();
for (Page* page : *heap->old_space()) {
page->MarkNeverAllocateForTesting();
......@@ -172,7 +173,8 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
i::MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
SafepointScope scope(heap);
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
if (marking->IsStopped()) {
......@@ -201,7 +203,8 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
CodePageCollectionMemoryModificationScope modification_scope(space->heap());
i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
space->FreeLinearAllocationArea();
space->ResetFreeList();
......@@ -218,7 +221,8 @@ void GcAndSweep(Heap* heap, AllocationSpace space) {
heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
if (heap->mark_compact_collector()->sweeping_in_progress()) {
SafepointScope scope(heap);
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
}
......
......@@ -79,7 +79,8 @@ HEAP_TEST(CompactionFullAbortedPage) {
heap->set_force_oom(true);
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
// Check that all handles still point to the same page, i.e., compaction
// has been aborted on the page.
......@@ -160,7 +161,8 @@ HEAP_TEST(CompactionPartiallyAbortedPage) {
heap->set_force_oom(true);
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
bool migration_aborted = false;
for (Handle<FixedArray> object : compaction_page_handles) {
......@@ -256,7 +258,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithInvalidatedSlots) {
heap->set_force_oom(true);
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
CHECK_EQ(Page::FromHeapObject(*compaction_page_handles.front()),
page_to_fill);
......@@ -334,7 +337,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
heap->set_force_oom(true);
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
// The following check makes sure that we compacted "some" objects, while
// leaving others in place.
......@@ -435,7 +439,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithRememberedSetEntries) {
heap->set_force_oom(true);
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
// The following check makes sure that we compacted "some" objects, while
// leaving others in place.
......
......@@ -34,7 +34,8 @@ TEST(ConcurrentMarking) {
if (!heap->incremental_marking()->IsStopped()) return;
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
MarkingWorklists marking_worklists;
......@@ -56,7 +57,8 @@ TEST(ConcurrentMarkingReschedule) {
if (!heap->incremental_marking()->IsStopped()) return;
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
MarkingWorklists marking_worklists;
......@@ -82,7 +84,8 @@ TEST(ConcurrentMarkingPreemptAndReschedule) {
if (!heap->incremental_marking()->IsStopped()) return;
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
MarkingWorklists marking_worklists;
......
......@@ -275,7 +275,8 @@ TEST(FinalizeTracingWhenMarking) {
// Finalize a potentially running garbage collection.
heap->CollectGarbage(OLD_SPACE, GarbageCollectionReason::kTesting);
if (heap->mark_compact_collector()->sweeping_in_progress()) {
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
heap->tracer()->StopCycleIfSweeping();
CHECK(heap->incremental_marking()->IsStopped());
......
......@@ -1622,7 +1622,8 @@ TEST(TestSizeOfRegExpCode) {
CcTest::CollectAllAvailableGarbage();
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
......@@ -1663,7 +1664,8 @@ HEAP_TEST(TestSizeOfObjects) {
// the heap size and return with sweeping finished completely.
CcTest::CollectAllAvailableGarbage();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
int initial_size = static_cast<int>(heap->SizeOfObjects());
......@@ -1688,7 +1690,8 @@ HEAP_TEST(TestSizeOfObjects) {
CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
// Waiting for sweeper threads should not change heap size.
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
}
......@@ -2321,7 +2324,8 @@ HEAP_TEST(GCFlags) {
MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
IncrementalMarking* marking = heap->incremental_marking();
......@@ -5419,7 +5423,8 @@ HEAP_TEST(Regress587004) {
CcTest::CollectGarbage(OLD_SPACE);
heap::SimulateFullSpace(heap->old_space());
heap->RightTrimFixedArray(*array, N - 1);
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
ByteArray byte_array;
const int M = 256;
// Don't allow old space expansion. The test works without this flag too,
......@@ -5591,7 +5596,8 @@ TEST(Regress598319) {
CcTest::CollectGarbage(OLD_SPACE);
MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(heap->lo_space()->Contains(arr.get()));
......@@ -5665,7 +5671,8 @@ Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
for (int i = 0; i < 5; i++) {
CcTest::CollectAllGarbage();
}
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
// Disable LAB, such that calculations with SizeOfObjects() and object size
// are correct.
heap->DisableInlineAllocation();
......@@ -5680,7 +5687,8 @@ Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
CHECK_EQ(size_after_allocation, size_after_shrinking);
// GC and sweeping updates the size to acccount for shrinking.
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
intptr_t size_after_gc = heap->SizeOfObjects();
CHECK_EQ(size_after_gc, size_before_allocation + array->Size());
return array;
......@@ -5717,7 +5725,8 @@ TEST(Regress615489) {
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
......@@ -5826,7 +5835,8 @@ TEST(LeftTrimFixedArrayInBlackArea) {
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
......@@ -5867,7 +5877,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
......@@ -5936,7 +5947,8 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
......@@ -6386,7 +6398,8 @@ HEAP_TEST(Regress670675) {
CcTest::CollectAllGarbage();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
heap->tracer()->StopCycleIfSweeping();
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
......@@ -6440,7 +6453,8 @@ HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
CcTest::CollectAllGarbage();
MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(object->map().IsMap());
}
......
......@@ -205,7 +205,8 @@ HEAP_TEST(DoNotEvacuatePinnedPages) {
page->SetFlag(MemoryChunk::PINNED);
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
// The pinned flag should prevent the page from moving.
for (Handle<FixedArray> object : handles) {
......@@ -215,7 +216,8 @@ HEAP_TEST(DoNotEvacuatePinnedPages) {
page->ClearFlag(MemoryChunk::PINNED);
CcTest::CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
heap->mark_compact_collector()->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
// `compact_on_every_full_gc` ensures that this page is an evacuation
// candidate, so with the pin flag cleared compaction should now move it.
......@@ -410,7 +412,8 @@ TEST(Regress5829) {
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
......
......@@ -16715,7 +16715,9 @@ TEST(TestIdleNotification) {
static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
IdlePauseInSeconds);
if (CcTest::heap()->mark_compact_collector()->sweeping_in_progress()) {
CcTest::heap()->mark_compact_collector()->EnsureSweepingCompleted();
CcTest::heap()->mark_compact_collector()->EnsureSweepingCompleted(
v8::internal::MarkCompactCollector::SweepingForcedFinalizationMode::
kV8Only);
}
}
intptr_t final_size = CcTest::heap()->SizeOfObjects();
......@@ -19,7 +19,8 @@ void HeapInternalsBase::SimulateIncrementalMarking(Heap* heap,
i::MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
SafepointScope scope(heap);
collector->EnsureSweepingCompleted();
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
if (marking->IsStopped()) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment