Commit d5245542 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Remove unused sweeping for iterability mechanism

Since we now promote all young objects into old space on full GCs, we
don't need to sweep pages for iterability anymore in new space.

Minor MC doesn't need to make a page iterable when promoting the full
page into the new space. This is because maps are not reclaimed during
a minor GC.

Bug: v8:12760
Change-Id: I16d666e417d00ebf450453864cbd87afd6606afc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3635723Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80447}
parent 7e233997
...@@ -818,7 +818,6 @@ void GCTracer::PrintNVP() const { ...@@ -818,7 +818,6 @@ void GCTracer::PrintNVP() const {
"gc=%s " "gc=%s "
"reduce_memory=%d " "reduce_memory=%d "
"minor_mc=%.2f " "minor_mc=%.2f "
"finish_sweeping=%.2f "
"time_to_safepoint=%.2f " "time_to_safepoint=%.2f "
"mark=%.2f " "mark=%.2f "
"mark.seed=%.2f " "mark.seed=%.2f "
...@@ -842,7 +841,6 @@ void GCTracer::PrintNVP() const { ...@@ -842,7 +841,6 @@ void GCTracer::PrintNVP() const {
"reset_liveness=%.2f\n", "reset_liveness=%.2f\n",
duration, spent_in_mutator, "mmc", current_.reduce_memory, duration, spent_in_mutator, "mmc", current_.reduce_memory,
current_scope(Scope::MINOR_MC), current_scope(Scope::MINOR_MC),
current_scope(Scope::MINOR_MC_SWEEPING),
current_scope(Scope::TIME_TO_SAFEPOINT), current_scope(Scope::TIME_TO_SAFEPOINT),
current_scope(Scope::MINOR_MC_MARK), current_scope(Scope::MINOR_MC_MARK),
current_scope(Scope::MINOR_MC_MARK_SEED), current_scope(Scope::MINOR_MC_MARK_SEED),
......
...@@ -2694,8 +2694,6 @@ void Heap::EvacuateYoungGeneration() { ...@@ -2694,8 +2694,6 @@ void Heap::EvacuateYoungGeneration() {
DCHECK(CanPromoteYoungAndExpandOldGeneration(0)); DCHECK(CanPromoteYoungAndExpandOldGeneration(0));
} }
mark_compact_collector()->sweeper()->EnsureIterabilityCompleted();
// Move pages from new->old generation. // Move pages from new->old generation.
PageRange range(new_space()->first_allocatable_address(), new_space()->top()); PageRange range(new_space()->first_allocatable_address(), new_space()->top());
for (auto it = range.begin(); it != range.end();) { for (auto it = range.begin(); it != range.end();) {
...@@ -2776,8 +2774,6 @@ void Heap::Scavenge() { ...@@ -2776,8 +2774,6 @@ void Heap::Scavenge() {
IncrementalMarking::PauseBlackAllocationScope pause_black_allocation( IncrementalMarking::PauseBlackAllocationScope pause_black_allocation(
incremental_marking()); incremental_marking());
mark_compact_collector()->sweeper()->EnsureIterabilityCompleted();
SetGCState(SCAVENGE); SetGCState(SCAVENGE);
// Flip the semispaces. After flipping, to space is empty, from space has // Flip the semispaces. After flipping, to space is empty, from space has
...@@ -3350,11 +3346,11 @@ void Heap::CreateFillerObjectAtBackground(Address addr, int size) { ...@@ -3350,11 +3346,11 @@ void Heap::CreateFillerObjectAtBackground(Address addr, int size) {
ClearRecordedSlots::kNo, VerifyNoSlotsRecorded::kNo); ClearRecordedSlots::kNo, VerifyNoSlotsRecorded::kNo);
} }
void Heap::CreateFillerObjectAtSweeper(Address addr, int size, void Heap::CreateFillerObjectAtSweeper(Address addr, int size) {
ClearFreedMemoryMode clear_memory_mode) {
// Do not verify whether slots are cleared here: the concurrent sweeper is not // Do not verify whether slots are cleared here: the concurrent sweeper is not
// allowed to access the main thread's remembered set. // allowed to access the main thread's remembered set.
CreateFillerObjectAtRaw(addr, size, clear_memory_mode, CreateFillerObjectAtRaw(addr, size,
ClearFreedMemoryMode::kDontClearFreedMemory,
ClearRecordedSlots::kNo, VerifyNoSlotsRecorded::kNo); ClearRecordedSlots::kNo, VerifyNoSlotsRecorded::kNo);
} }
......
...@@ -1831,10 +1831,8 @@ class Heap { ...@@ -1831,10 +1831,8 @@ class Heap {
// This method is used by the sweeper on free memory ranges to make the page // This method is used by the sweeper on free memory ranges to make the page
// iterable again. Unlike `CreateFillerObjectAt` this method will not verify // iterable again. Unlike `CreateFillerObjectAt` this method will not verify
// slots since the sweeper can run concurrently. The sweeper can also // slots since the sweeper can run concurrently.
// optionally clear the object payload. void CreateFillerObjectAtSweeper(Address addr, int size);
void CreateFillerObjectAtSweeper(Address addr, int size,
ClearFreedMemoryMode clear_memory_mode);
// Creates a filler object in the specificed memory area. This method is the // Creates a filler object in the specificed memory area. This method is the
// internal method used by all CreateFillerObjectAtXXX-methods. // internal method used by all CreateFillerObjectAtXXX-methods.
......
...@@ -1097,7 +1097,6 @@ void MarkCompactCollector::Finish() { ...@@ -1097,7 +1097,6 @@ void MarkCompactCollector::Finish() {
} }
sweeper()->StartSweeperTasks(); sweeper()->StartSweeperTasks();
sweeper()->StartIterabilityTasks();
// Give pages that are queued to be freed back to the OS. // Give pages that are queued to be freed back to the OS.
heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); heap()->memory_allocator()->unmapper()->FreeQueuedChunks();
...@@ -5515,11 +5514,6 @@ void MinorMarkCompactCollector::CollectGarbage() { ...@@ -5515,11 +5514,6 @@ void MinorMarkCompactCollector::CollectGarbage() {
// Minor MC does not support processing the ephemeron remembered set. // Minor MC does not support processing the ephemeron remembered set.
DCHECK(heap()->ephemeron_remembered_set_.empty()); DCHECK(heap()->ephemeron_remembered_set_.empty());
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_SWEEPING);
heap()->mark_compact_collector()->sweeper()->EnsureIterabilityCompleted();
}
heap()->array_buffer_sweeper()->EnsureFinished(); heap()->array_buffer_sweeper()->EnsureFinished();
MarkLiveObjects(); MarkLiveObjects();
...@@ -5590,7 +5584,7 @@ void MinorMarkCompactCollector::MakeIterable( ...@@ -5590,7 +5584,7 @@ void MinorMarkCompactCollector::MakeIterable(
full_collector->non_atomic_marking_state()->bitmap(p)->ClearRange( full_collector->non_atomic_marking_state()->bitmap(p)->ClearRange(
p->AddressToMarkbitIndex(free_start), p->AddressToMarkbitIndex(free_start),
p->AddressToMarkbitIndex(free_end)); p->AddressToMarkbitIndex(free_end));
if (free_space_mode == ZAP_FREE_SPACE) { if (free_space_mode == FreeSpaceTreatmentMode::kZapFreeSpace) {
ZapCode(free_start, size); ZapCode(free_start, size);
} }
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size)); p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size));
...@@ -5607,7 +5601,7 @@ void MinorMarkCompactCollector::MakeIterable( ...@@ -5607,7 +5601,7 @@ void MinorMarkCompactCollector::MakeIterable(
full_collector->non_atomic_marking_state()->bitmap(p)->ClearRange( full_collector->non_atomic_marking_state()->bitmap(p)->ClearRange(
p->AddressToMarkbitIndex(free_start), p->AddressToMarkbitIndex(free_start),
p->AddressToMarkbitIndex(p->area_end())); p->AddressToMarkbitIndex(p->area_end()));
if (free_space_mode == ZAP_FREE_SPACE) { if (free_space_mode == FreeSpaceTreatmentMode::kZapFreeSpace) {
ZapCode(free_start, size); ZapCode(free_start, size);
} }
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size)); p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size));
...@@ -6171,13 +6165,14 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk, ...@@ -6171,13 +6165,14 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
marking_state->live_bytes(chunk)); marking_state->live_bytes(chunk));
if (!chunk->IsLargePage()) { if (!chunk->IsLargePage()) {
if (heap()->ShouldZapGarbage()) { if (heap()->ShouldZapGarbage()) {
collector_->MakeIterable(static_cast<Page*>(chunk), ZAP_FREE_SPACE); collector_->MakeIterable(static_cast<Page*>(chunk),
FreeSpaceTreatmentMode::kZapFreeSpace);
} else if (heap()->incremental_marking()->IsMarking()) { } else if (heap()->incremental_marking()->IsMarking()) {
// When incremental marking is on, we need to clear the mark bits of // When incremental marking is on, we need to clear the mark bits of
// the full collector. We cannot yet discard the young generation mark // the full collector. We cannot yet discard the young generation mark
// bits as they are still relevant for pointers updating. // bits as they are still relevant for pointers updating.
collector_->MakeIterable(static_cast<Page*>(chunk), collector_->MakeIterable(static_cast<Page*>(chunk),
IGNORE_FREE_SPACE); FreeSpaceTreatmentMode::kIgnoreFreeSpace);
} }
} }
break; break;
...@@ -6189,12 +6184,14 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk, ...@@ -6189,12 +6184,14 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
marking_state->live_bytes(chunk)); marking_state->live_bytes(chunk));
DCHECK(!chunk->IsLargePage()); DCHECK(!chunk->IsLargePage());
if (heap()->ShouldZapGarbage()) { if (heap()->ShouldZapGarbage()) {
collector_->MakeIterable(static_cast<Page*>(chunk), ZAP_FREE_SPACE); collector_->MakeIterable(static_cast<Page*>(chunk),
FreeSpaceTreatmentMode::kZapFreeSpace);
} else if (heap()->incremental_marking()->IsMarking()) { } else if (heap()->incremental_marking()->IsMarking()) {
// When incremental marking is on, we need to clear the mark bits of // When incremental marking is on, we need to clear the mark bits of
// the full collector. We cannot yet discard the young generation mark // the full collector. We cannot yet discard the young generation mark
// bits as they are still relevant for pointers updating. // bits as they are still relevant for pointers updating.
collector_->MakeIterable(static_cast<Page*>(chunk), IGNORE_FREE_SPACE); collector_->MakeIterable(static_cast<Page*>(chunk),
FreeSpaceTreatmentMode::kIgnoreFreeSpace);
} }
break; break;
case kObjectsOldToOld: case kObjectsOldToOld:
......
...@@ -24,9 +24,6 @@ Sweeper::Sweeper(Heap* heap, MajorNonAtomicMarkingState* marking_state) ...@@ -24,9 +24,6 @@ Sweeper::Sweeper(Heap* heap, MajorNonAtomicMarkingState* marking_state)
marking_state_(marking_state), marking_state_(marking_state),
incremental_sweeper_pending_(false), incremental_sweeper_pending_(false),
sweeping_in_progress_(false), sweeping_in_progress_(false),
iterability_task_semaphore_(0),
iterability_in_progress_(false),
iterability_task_started_(false),
should_reduce_memory_(false) {} should_reduce_memory_(false) {}
Sweeper::PauseScope::PauseScope(Sweeper* sweeper) : sweeper_(sweeper) { Sweeper::PauseScope::PauseScope(Sweeper* sweeper) : sweeper_(sweeper) {
...@@ -147,7 +144,6 @@ void Sweeper::TearDown() { ...@@ -147,7 +144,6 @@ void Sweeper::TearDown() {
void Sweeper::StartSweeping() { void Sweeper::StartSweeping() {
sweeping_in_progress_ = true; sweeping_in_progress_ = true;
iterability_in_progress_ = true;
should_reduce_memory_ = heap_->ShouldReduceMemory(); should_reduce_memory_ = heap_->ShouldReduceMemory();
MajorNonAtomicMarkingState* marking_state = MajorNonAtomicMarkingState* marking_state =
heap_->mark_compact_collector()->non_atomic_marking_state(); heap_->mark_compact_collector()->non_atomic_marking_state();
...@@ -193,8 +189,6 @@ Page* Sweeper::GetSweptPageSafe(PagedSpace* space) { ...@@ -193,8 +189,6 @@ Page* Sweeper::GetSweptPageSafe(PagedSpace* space) {
void Sweeper::EnsureCompleted() { void Sweeper::EnsureCompleted() {
if (!sweeping_in_progress_) return; if (!sweeping_in_progress_) return;
EnsureIterabilityCompleted();
// If sweeping is not completed or not running at all, we try to complete it // If sweeping is not completed or not running at all, we try to complete it
// here. // here.
ForAllSweepingSpaces([this](AllocationSpace space) { ForAllSweepingSpaces([this](AllocationSpace space) {
...@@ -228,24 +222,16 @@ bool Sweeper::AreSweeperTasksRunning() { ...@@ -228,24 +222,16 @@ bool Sweeper::AreSweeperTasksRunning() {
V8_INLINE size_t Sweeper::FreeAndProcessFreedMemory( V8_INLINE size_t Sweeper::FreeAndProcessFreedMemory(
Address free_start, Address free_end, Page* page, Space* space, Address free_start, Address free_end, Page* page, Space* space,
FreeListRebuildingMode free_list_mode, FreeSpaceTreatmentMode free_space_treatment_mode) {
FreeSpaceTreatmentMode free_space_mode) {
CHECK_GT(free_end, free_start); CHECK_GT(free_end, free_start);
size_t freed_bytes = 0; size_t freed_bytes = 0;
size_t size = static_cast<size_t>(free_end - free_start); size_t size = static_cast<size_t>(free_end - free_start);
if (free_space_mode == ZAP_FREE_SPACE) { if (free_space_treatment_mode == FreeSpaceTreatmentMode::kZapFreeSpace) {
ZapCode(free_start, size); ZapCode(free_start, size);
} }
ClearFreedMemoryMode clear_memory_mode = page->heap()->CreateFillerObjectAtSweeper(free_start, static_cast<int>(size));
(free_list_mode == REBUILD_FREE_LIST)
? ClearFreedMemoryMode::kDontClearFreedMemory
: ClearFreedMemoryMode::kClearFreedMemory;
page->heap()->CreateFillerObjectAtSweeper(free_start, static_cast<int>(size),
clear_memory_mode);
if (free_list_mode == REBUILD_FREE_LIST) {
freed_bytes = freed_bytes =
reinterpret_cast<PagedSpace*>(space)->UnaccountedFree(free_start, size); reinterpret_cast<PagedSpace*>(space)->UnaccountedFree(free_start, size);
}
if (should_reduce_memory_) page->DiscardUnusedMemory(free_start, size); if (should_reduce_memory_) page->DiscardUnusedMemory(free_start, size);
return freed_bytes; return freed_bytes;
...@@ -313,30 +299,22 @@ void Sweeper::CleanupInvalidTypedSlotsOfFreeRanges( ...@@ -313,30 +299,22 @@ void Sweeper::CleanupInvalidTypedSlotsOfFreeRanges(
page->ClearInvalidTypedSlots<OLD_TO_SHARED>(free_ranges_map); page->ClearInvalidTypedSlots<OLD_TO_SHARED>(free_ranges_map);
} }
void Sweeper::ClearMarkBitsAndHandleLivenessStatistics( void Sweeper::ClearMarkBitsAndHandleLivenessStatistics(Page* page,
Page* page, size_t live_bytes, FreeListRebuildingMode free_list_mode) { size_t live_bytes) {
marking_state_->bitmap(page)->Clear(); marking_state_->bitmap(page)->Clear();
if (free_list_mode == IGNORE_FREE_LIST) {
marking_state_->SetLiveBytes(page, 0);
// We did not free memory, so have to adjust allocated bytes here.
intptr_t freed_bytes = page->area_size() - live_bytes;
page->DecreaseAllocatedBytes(freed_bytes);
} else {
// Keep the old live bytes counter of the page until RefillFreeList, where // Keep the old live bytes counter of the page until RefillFreeList, where
// the space size is refined. // the space size is refined.
// The allocated_bytes() counter is precisely the total size of objects. // The allocated_bytes() counter is precisely the total size of objects.
DCHECK_EQ(live_bytes, page->allocated_bytes()); DCHECK_EQ(live_bytes, page->allocated_bytes());
}
} }
int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode, int Sweeper::RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode,
FreeSpaceTreatmentMode free_space_mode,
SweepingMode sweeping_mode, SweepingMode sweeping_mode,
const base::MutexGuard& page_guard) { const base::MutexGuard& page_guard) {
Space* space = p->owner(); Space* space = p->owner();
DCHECK_NOT_NULL(space); DCHECK_NOT_NULL(space);
DCHECK(free_list_mode == IGNORE_FREE_LIST || space->identity() == OLD_SPACE || DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE ||
space->identity() == CODE_SPACE || space->identity() == MAP_SPACE); space->identity() == MAP_SPACE);
DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone());
// Phase 1: Prepare the page for sweeping. // Phase 1: Prepare the page for sweeping.
...@@ -405,7 +383,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode, ...@@ -405,7 +383,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
max_freed_bytes = max_freed_bytes =
std::max(max_freed_bytes, std::max(max_freed_bytes,
FreeAndProcessFreedMemory(free_start, free_end, p, space, FreeAndProcessFreedMemory(free_start, free_end, p, space,
free_list_mode, free_space_mode)); free_space_treatment_mode));
CleanupRememberedSetEntriesForFreedMemory( CleanupRememberedSetEntriesForFreedMemory(
free_start, free_end, p, record_free_ranges, &free_ranges_map, free_start, free_end, p, record_free_ranges, &free_ranges_map,
sweeping_mode, &invalidated_old_to_new_cleanup, sweeping_mode, &invalidated_old_to_new_cleanup,
...@@ -434,7 +412,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode, ...@@ -434,7 +412,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
max_freed_bytes = max_freed_bytes =
std::max(max_freed_bytes, std::max(max_freed_bytes,
FreeAndProcessFreedMemory(free_start, free_end, p, space, FreeAndProcessFreedMemory(free_start, free_end, p, space,
free_list_mode, free_space_mode)); free_space_treatment_mode));
CleanupRememberedSetEntriesForFreedMemory( CleanupRememberedSetEntriesForFreedMemory(
free_start, free_end, p, record_free_ranges, &free_ranges_map, free_start, free_end, p, record_free_ranges, &free_ranges_map,
sweeping_mode, &invalidated_old_to_new_cleanup, sweeping_mode, &invalidated_old_to_new_cleanup,
...@@ -443,7 +421,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode, ...@@ -443,7 +421,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
// Phase 3: Post process the page. // Phase 3: Post process the page.
CleanupInvalidTypedSlotsOfFreeRanges(p, free_ranges_map, sweeping_mode); CleanupInvalidTypedSlotsOfFreeRanges(p, free_ranges_map, sweeping_mode);
ClearMarkBitsAndHandleLivenessStatistics(p, live_bytes, free_list_mode); ClearMarkBitsAndHandleLivenessStatistics(p, live_bytes);
if (active_system_pages_after_sweeping) { if (active_system_pages_after_sweeping) {
// Decrement accounted memory for discarded memory. // Decrement accounted memory for discarded memory.
...@@ -454,7 +432,6 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode, ...@@ -454,7 +432,6 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
if (code_object_registry) code_object_registry->Finalize(); if (code_object_registry) code_object_registry->Finalize();
p->set_concurrent_sweeping_state(Page::ConcurrentSweepingState::kDone); p->set_concurrent_sweeping_state(Page::ConcurrentSweepingState::kDone);
if (free_list_mode == IGNORE_FREE_LIST) return 0;
return static_cast<int>( return static_cast<int>(
p->owner()->free_list()->GuaranteedAllocatable(max_freed_bytes)); p->owner()->free_list()->GuaranteedAllocatable(max_freed_bytes));
...@@ -533,10 +510,10 @@ int Sweeper::ParallelSweepPage(Page* page, AllocationSpace identity, ...@@ -533,10 +510,10 @@ int Sweeper::ParallelSweepPage(Page* page, AllocationSpace identity,
page->concurrent_sweeping_state()); page->concurrent_sweeping_state());
page->set_concurrent_sweeping_state( page->set_concurrent_sweeping_state(
Page::ConcurrentSweepingState::kInProgress); Page::ConcurrentSweepingState::kInProgress);
const FreeSpaceTreatmentMode free_space_mode = const FreeSpaceTreatmentMode free_space_treatment_mode =
Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE; Heap::ShouldZapGarbage() ? FreeSpaceTreatmentMode::kZapFreeSpace
max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode, : FreeSpaceTreatmentMode::kIgnoreFreeSpace;
sweeping_mode, guard); max_freed = RawSweep(page, free_space_treatment_mode, sweeping_mode, guard);
DCHECK(page->SweepingDone()); DCHECK(page->SweepingDone());
} }
...@@ -566,7 +543,6 @@ void Sweeper::EnsurePageIsSwept(Page* page) { ...@@ -566,7 +543,6 @@ void Sweeper::EnsurePageIsSwept(Page* page) {
} }
} else { } else {
DCHECK(page->InNewSpace()); DCHECK(page->InNewSpace());
EnsureIterabilityCompleted();
} }
CHECK(page->SweepingDone()); CHECK(page->SweepingDone());
...@@ -639,87 +615,5 @@ Page* Sweeper::GetSweepingPageSafe(AllocationSpace space) { ...@@ -639,87 +615,5 @@ Page* Sweeper::GetSweepingPageSafe(AllocationSpace space) {
return page; return page;
} }
void Sweeper::EnsureIterabilityCompleted() {
if (!iterability_in_progress_) return;
if (FLAG_concurrent_sweeping && iterability_task_started_) {
if (heap_->isolate()->cancelable_task_manager()->TryAbort(
iterability_task_id_) != TryAbortResult::kTaskAborted) {
iterability_task_semaphore_.Wait();
}
iterability_task_started_ = false;
}
for (Page* page : iterability_list_) {
MakeIterable(page);
}
iterability_list_.clear();
iterability_in_progress_ = false;
}
class Sweeper::IterabilityTask final : public CancelableTask {
public:
IterabilityTask(Isolate* isolate, Sweeper* sweeper,
base::Semaphore* pending_iterability_task)
: CancelableTask(isolate),
sweeper_(sweeper),
pending_iterability_task_(pending_iterability_task),
tracer_(isolate->heap()->tracer()) {}
~IterabilityTask() override = default;
IterabilityTask(const IterabilityTask&) = delete;
IterabilityTask& operator=(const IterabilityTask&) = delete;
private:
void RunInternal() final {
TRACE_GC_EPOCH(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING,
ThreadKind::kBackground);
for (Page* page : sweeper_->iterability_list_) {
sweeper_->MakeIterable(page);
}
sweeper_->iterability_list_.clear();
pending_iterability_task_->Signal();
}
Sweeper* const sweeper_;
base::Semaphore* const pending_iterability_task_;
GCTracer* const tracer_;
};
void Sweeper::StartIterabilityTasks() {
if (!iterability_in_progress_) return;
DCHECK(!iterability_task_started_);
if (FLAG_concurrent_sweeping && !iterability_list_.empty()) {
auto task = std::make_unique<IterabilityTask>(heap_->isolate(), this,
&iterability_task_semaphore_);
iterability_task_id_ = task->id();
iterability_task_started_ = true;
V8::GetCurrentPlatform()->CallOnWorkerThread(std::move(task));
}
}
void Sweeper::AddPageForIterability(Page* page) {
DCHECK(sweeping_in_progress_);
DCHECK(iterability_in_progress_);
DCHECK(!iterability_task_started_);
DCHECK(IsValidIterabilitySpace(page->owner_identity()));
DCHECK_EQ(Page::ConcurrentSweepingState::kDone,
page->concurrent_sweeping_state());
iterability_list_.push_back(page);
page->set_concurrent_sweeping_state(Page::ConcurrentSweepingState::kPending);
}
void Sweeper::MakeIterable(Page* page) {
base::MutexGuard guard(page->mutex());
DCHECK(IsValidIterabilitySpace(page->owner_identity()));
const FreeSpaceTreatmentMode free_space_mode =
Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE;
RawSweep(page, IGNORE_FREE_LIST, free_space_mode,
SweepingMode::kLazyOrConcurrent, guard);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -23,11 +23,10 @@ class Page; ...@@ -23,11 +23,10 @@ class Page;
class PagedSpace; class PagedSpace;
class Space; class Space;
enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE }; enum class FreeSpaceTreatmentMode { kIgnoreFreeSpace, kZapFreeSpace };
class Sweeper { class Sweeper {
public: public:
using IterabilityList = std::vector<Page*>;
using SweepingList = std::vector<Page*>; using SweepingList = std::vector<Page*>;
using SweptList = std::vector<Page*>; using SweptList = std::vector<Page*>;
...@@ -92,8 +91,7 @@ class Sweeper { ...@@ -92,8 +91,7 @@ class Sweeper {
void ScheduleIncrementalSweepingTask(); void ScheduleIncrementalSweepingTask();
int RawSweep(Page* p, FreeListRebuildingMode free_list_mode, int RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode,
FreeSpaceTreatmentMode free_space_mode,
SweepingMode sweeping_mode, const base::MutexGuard& page_guard); SweepingMode sweeping_mode, const base::MutexGuard& page_guard);
// After calling this function sweeping is considered to be in progress // After calling this function sweeping is considered to be in progress
...@@ -110,13 +108,8 @@ class Sweeper { ...@@ -110,13 +108,8 @@ class Sweeper {
Page* GetSweptPageSafe(PagedSpace* space); Page* GetSweptPageSafe(PagedSpace* space);
void AddPageForIterability(Page* page);
void StartIterabilityTasks();
void EnsureIterabilityCompleted();
private: private:
class IncrementalSweeperTask; class IncrementalSweeperTask;
class IterabilityTask;
class SweeperJob; class SweeperJob;
static const int kNumberOfSweepingSpaces = static const int kNumberOfSweepingSpaces =
...@@ -134,10 +127,9 @@ class Sweeper { ...@@ -134,10 +127,9 @@ class Sweeper {
// FreeSpaceTreatmentMode this function may add the free memory to a free // FreeSpaceTreatmentMode this function may add the free memory to a free
// list, make the memory iterable, clear it, and return the free memory to // list, make the memory iterable, clear it, and return the free memory to
// the operating system. // the operating system.
size_t FreeAndProcessFreedMemory(Address free_start, Address free_end, size_t FreeAndProcessFreedMemory(
Page* page, Space* space, Address free_start, Address free_end, Page* page, Space* space,
FreeListRebuildingMode free_list_mode, FreeSpaceTreatmentMode free_space_treatment_mode);
FreeSpaceTreatmentMode free_space_mode);
// Helper function for RawSweep. Handle remembered set entries in the freed // Helper function for RawSweep. Handle remembered set entries in the freed
// memory which require clearing. // memory which require clearing.
...@@ -155,8 +147,7 @@ class Sweeper { ...@@ -155,8 +147,7 @@ class Sweeper {
// Helper function for RawSweep. Clears the mark bits and ensures consistency // Helper function for RawSweep. Clears the mark bits and ensures consistency
// of live bytes. // of live bytes.
void ClearMarkBitsAndHandleLivenessStatistics( void ClearMarkBitsAndHandleLivenessStatistics(Page* page, size_t live_bytes);
Page* page, size_t live_bytes, FreeListRebuildingMode free_list_mode);
// Can only be called on the main thread when no tasks are running. // Can only be called on the main thread when no tasks are running.
bool IsDoneSweeping() const { bool IsDoneSweeping() const {
...@@ -182,12 +173,6 @@ class Sweeper { ...@@ -182,12 +173,6 @@ class Sweeper {
void PrepareToBeSweptPage(AllocationSpace space, Page* page); void PrepareToBeSweptPage(AllocationSpace space, Page* page);
void MakeIterable(Page* page);
bool IsValidIterabilitySpace(AllocationSpace space) {
return space == NEW_SPACE || space == RO_SPACE;
}
static bool IsValidSweepingSpace(AllocationSpace space) { static bool IsValidSweepingSpace(AllocationSpace space) {
return space >= FIRST_GROWABLE_PAGED_SPACE && return space >= FIRST_GROWABLE_PAGED_SPACE &&
space <= LAST_GROWABLE_PAGED_SPACE; space <= LAST_GROWABLE_PAGED_SPACE;
...@@ -209,13 +194,6 @@ class Sweeper { ...@@ -209,13 +194,6 @@ class Sweeper {
// Main thread can finalize sweeping, while background threads allocation slow // Main thread can finalize sweeping, while background threads allocation slow
// path checks this flag to see whether it could support concurrent sweeping. // path checks this flag to see whether it could support concurrent sweeping.
std::atomic<bool> sweeping_in_progress_; std::atomic<bool> sweeping_in_progress_;
// Pages that are only made iterable but have their free lists ignored.
IterabilityList iterability_list_;
CancelableTaskManager::Id iterability_task_id_;
base::Semaphore iterability_task_semaphore_;
bool iterability_in_progress_;
bool iterability_task_started_;
bool should_reduce_memory_; bool should_reduce_memory_;
}; };
......
...@@ -607,7 +607,6 @@ ...@@ -607,7 +607,6 @@
F(MINOR_MC_MARK_WEAK) \ F(MINOR_MC_MARK_WEAK) \
F(MINOR_MC_MARKING_DEQUE) \ F(MINOR_MC_MARKING_DEQUE) \
F(MINOR_MC_RESET_LIVENESS) \ F(MINOR_MC_RESET_LIVENESS) \
F(MINOR_MC_SWEEPING) \
F(SAFEPOINT) \ F(SAFEPOINT) \
F(SCAVENGER) \ F(SCAVENGER) \
F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \ F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment