Commit add50f20 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Avoid iterating all pages when freeing empty buckets

At the end of scavenge, the GC iterates all pages to search for empty
buckets and free them. With this CL the scavenger marks buckets
(and their corresponding pages) that were empty to reduce work.
After finishing scavenging the GC only needs to revisit those marked
buckets.

Unlike (minor) mark-compact, the scavenger can't directly free those
buckets, since the evacuation and pointer updating-phases are
not separated.

Right now the pages are processed sequentially but this could be
parallelized in a subsequent CL.

Change-Id: I47ed8c0e952b06c5d960e39a6f38e745d5618656
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1889884
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64781}
parent 42e8c231
......@@ -4437,11 +4437,6 @@ void MinorMarkCompactCollector::CollectGarbage() {
heap()->new_lo_space()->FreeDeadObjects([](HeapObject) { return true; });
}
RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
heap(), [](MemoryChunk* chunk) {
RememberedSet<OLD_TO_NEW>::FreeEmptyBuckets(chunk);
});
heap()->account_external_memory_concurrently_freed();
}
......
......@@ -12,6 +12,7 @@
#include "src/heap/heap.h"
#include "src/heap/slot-set.h"
#include "src/heap/spaces.h"
#include "src/heap/worklist.h"
namespace v8 {
namespace internal {
......@@ -40,6 +41,20 @@ class RememberedSetOperations {
return slots;
}
template <typename Callback>
static int IterateAndTrackEmptyBuckets(
SlotSet* slot_set, MemoryChunk* chunk, Callback callback,
Worklist<MemoryChunk*, 64>::View empty_chunks) {
int slots = 0;
if (slot_set != nullptr) {
bool found_empty_bucket = false;
slots += slot_set->IterateAndTrackEmptyBuckets(
chunk->address(), chunk->buckets(), callback, &found_empty_bucket);
if (found_empty_bucket) empty_chunks.Push(chunk);
}
return slots;
}
static void Remove(SlotSet* slot_set, MemoryChunk* chunk, Address slot_addr) {
if (slot_set != nullptr) {
uintptr_t offset = slot_addr - chunk->address();
......@@ -149,12 +164,36 @@ class RememberedSet : public AllStatic {
return RememberedSetOperations::Iterate(slot_set, chunk, callback, mode);
}
template <typename Callback>
static int IterateAndTrackEmptyBuckets(
MemoryChunk* chunk, Callback callback,
Worklist<MemoryChunk*, 64>::View empty_chunks) {
SlotSet* slots = chunk->slot_set<type>();
bool empty_bucket_found = false;
int slot_count = RememberedSetOperations::IterateAndTrackEmptyBuckets(
slots, chunk, callback, empty_chunks);
if (empty_bucket_found) empty_chunks.Push(chunk);
return slot_count;
}
static void FreeEmptyBuckets(MemoryChunk* chunk) {
DCHECK(type == OLD_TO_NEW);
SlotSet* slot_set = chunk->slot_set<type>();
if (slot_set != nullptr) {
slot_set->FreeEmptyBuckets(chunk->buckets());
if (slot_set != nullptr && slot_set->FreeEmptyBuckets(chunk->buckets())) {
chunk->ReleaseSlotSet<type>();
}
}
static bool CheckPossiblyEmptyBuckets(MemoryChunk* chunk) {
DCHECK(type == OLD_TO_NEW);
SlotSet* slot_set = chunk->slot_set<type, AccessMode::NON_ATOMIC>();
if (slot_set != nullptr &&
slot_set->CheckPossiblyEmptyBuckets(chunk->buckets())) {
chunk->ReleaseSlotSet<type>();
return true;
}
return false;
}
// Given a page and a typed slot in that page, this function adds the slot
......
......@@ -244,12 +244,14 @@ void ScavengerCollector::CollectGarbage() {
const bool is_logging = isolate_->LogObjectRelocation();
const int num_scavenge_tasks = NumberOfScavengeTasks();
OneshotBarrier barrier(base::TimeDelta::FromMilliseconds(kMaxWaitTimeMs));
Worklist<MemoryChunk*, 64> empty_chunks;
Scavenger::CopiedList copied_list(num_scavenge_tasks);
Scavenger::PromotionList promotion_list(num_scavenge_tasks);
EphemeronTableList ephemeron_table_list(num_scavenge_tasks);
for (int i = 0; i < num_scavenge_tasks; i++) {
scavengers[i] = new Scavenger(this, heap_, is_logging, &copied_list,
&promotion_list, &ephemeron_table_list, i);
scavengers[i] =
new Scavenger(this, heap_, is_logging, &empty_chunks, &copied_list,
&promotion_list, &ephemeron_table_list, i);
job.AddTask(new ScavengingTask(heap_, scavengers[i], &barrier));
}
......@@ -362,11 +364,20 @@ void ScavengerCollector::CollectGarbage() {
{
TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_FREE_REMEMBERED_SET);
MemoryChunk* chunk;
while (empty_chunks.Pop(kMainThreadId, &chunk)) {
RememberedSet<OLD_TO_NEW>::CheckPossiblyEmptyBuckets(chunk);
}
#ifdef DEBUG
RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
heap_, [](MemoryChunk* chunk) {
RememberedSet<OLD_TO_NEW>::FreeEmptyBuckets(chunk);
SlotSet* slot_set = chunk->slot_set<OLD_TO_NEW>();
DCHECK_IMPLIES(slot_set != nullptr,
slot_set->IsPossiblyEmptyCleared());
});
#endif
}
// Update how much has survived scavenge.
......@@ -412,10 +423,12 @@ int ScavengerCollector::NumberOfScavengeTasks() {
}
Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
Worklist<MemoryChunk*, 64>* empty_chunks,
CopiedList* copied_list, PromotionList* promotion_list,
EphemeronTableList* ephemeron_table_list, int task_id)
: collector_(collector),
heap_(heap),
empty_chunks_(empty_chunks, task_id),
promotion_list_(promotion_list, task_id),
copied_list_(copied_list, task_id),
ephemeron_table_list_(ephemeron_table_list, task_id),
......@@ -459,22 +472,28 @@ void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
void Scavenger::ScavengePage(MemoryChunk* page) {
CodePageMemoryModificationScope memory_modification_scope(page);
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(page);
RememberedSet<OLD_TO_NEW>::Iterate(
page,
[this, &filter](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
return CheckAndScavengeObject(heap_, slot);
},
SlotSet::KEEP_EMPTY_BUCKETS);
filter = InvalidatedSlotsFilter::OldToNew(page);
RememberedSetSweeping::Iterate(
page,
[this, &filter](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
return CheckAndScavengeObject(heap_, slot);
},
SlotSet::KEEP_EMPTY_BUCKETS);
if (page->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(page);
RememberedSet<OLD_TO_NEW>::IterateAndTrackEmptyBuckets(
page,
[this, &filter](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
return CheckAndScavengeObject(heap_, slot);
},
empty_chunks_);
}
if (page->sweeping_slot_set<AccessMode::NON_ATOMIC>() != nullptr) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(page);
RememberedSetSweeping::Iterate(
page,
[this, &filter](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
return CheckAndScavengeObject(heap_, slot);
},
SlotSet::KEEP_EMPTY_BUCKETS);
}
if (page->invalidated_slots<OLD_TO_NEW>() != nullptr) {
// The invalidated slots are not needed after old-to-new slots were
......@@ -596,6 +615,7 @@ void Scavenger::Finalize() {
heap()->IncrementPromotedObjectsSize(promoted_size_);
collector_->MergeSurvivingNewLargeObjects(surviving_new_large_objects_);
allocator_.Finalize();
empty_chunks_.FlushToGlobal();
ephemeron_table_list_.FlushToGlobal();
for (auto it = ephemeron_remembered_set_.begin();
it != ephemeron_remembered_set_.end(); ++it) {
......
......@@ -117,7 +117,8 @@ class Scavenger {
using CopiedList = Worklist<ObjectAndSize, kCopiedListSegmentSize>;
Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
CopiedList* copied_list, PromotionList* promotion_list,
Worklist<MemoryChunk*, 64>* empty_chunks, CopiedList* copied_list,
PromotionList* promotion_list,
EphemeronTableList* ephemeron_table_list, int task_id);
// Entry point for scavenging an old generation page. For scavenging single
......@@ -206,6 +207,7 @@ class Scavenger {
ScavengerCollector* const collector_;
Heap* const heap_;
Worklist<MemoryChunk*, 64>::View empty_chunks_;
PromotionList::View promotion_list_;
CopiedList::View copied_list_;
EphemeronTableList::View ephemeron_table_list_;
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment