Commit b0874438 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Remove MarkCompactCollectorBase

Remove the common base class of MarkCompactCollector and
MinorCompactCollector as a cleanup.

Change-Id: Ib6a931b2bd397ac7c9425b0e268b847a38125a57
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3610424Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80243}
parent 359c48d9
......@@ -1638,6 +1638,13 @@ class Heap {
// over all objects.
void MakeHeapIterable();
V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration(size_t size);
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size);
inline bool ShouldReduceMemory() const {
return (current_gc_flags_ & kReduceMemoryFootprintMask) != 0;
}
private:
class AllocationTrackerForDebugging;
......@@ -1753,10 +1760,6 @@ class Heap {
void set_current_gc_flags(int flags) { current_gc_flags_ = flags; }
inline bool ShouldReduceMemory() const {
return (current_gc_flags_ & kReduceMemoryFootprintMask) != 0;
}
int NumberOfScavengeTasks();
// Checks whether a global GC is necessary
......@@ -1998,10 +2001,8 @@ class Heap {
bool always_allocate() { return always_allocate_scope_count_ != 0; }
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size);
V8_EXPORT_PRIVATE bool CanExpandOldGenerationBackground(LocalHeap* local_heap,
size_t size);
V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration(size_t size);
bool ShouldExpandOldGenerationOnSlowAllocation(
LocalHeap* local_heap = nullptr);
......
......@@ -279,7 +279,8 @@ typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
return iterator(chunk_, bitmap_, end_);
}
Isolate* MarkCompactCollectorBase::isolate() { return heap()->isolate(); }
Isolate* MarkCompactCollector::isolate() { return heap()->isolate(); }
Isolate* MinorMarkCompactCollector::isolate() { return heap()->isolate(); }
} // namespace internal
} // namespace v8
......
......@@ -471,11 +471,9 @@ int NumberOfAvailableCores() {
return num_cores;
}
} // namespace
int MarkCompactCollectorBase::NumberOfParallelCompactionTasks() {
int NumberOfParallelCompactionTasks(Heap* heap) {
int tasks = FLAG_parallel_compaction ? NumberOfAvailableCores() : 1;
if (!heap_->CanPromoteYoungAndExpandOldGeneration(
if (!heap->CanPromoteYoungAndExpandOldGeneration(
static_cast<size_t>(tasks * Page::kPageSize))) {
// Optimize for memory usage near the heap limit.
tasks = 1;
......@@ -483,8 +481,10 @@ int MarkCompactCollectorBase::NumberOfParallelCompactionTasks() {
return tasks;
}
} // namespace
MarkCompactCollector::MarkCompactCollector(Heap* heap)
: MarkCompactCollectorBase(heap),
: heap_(heap),
#ifdef DEBUG
state_(IDLE),
#endif
......@@ -3908,17 +3908,19 @@ class PageEvacuationJob : public v8::JobTask {
GCTracer* tracer_;
};
namespace {
template <class Evacuator, class Collector>
size_t MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks(
size_t CreateAndExecuteEvacuationTasks(
Collector* collector,
std::vector<std::pair<ParallelWorkItem, MemoryChunk*>> evacuation_items,
MigrationObserver* migration_observer) {
base::Optional<ProfilingMigrationObserver> profiling_observer;
if (isolate()->LogObjectRelocation()) {
profiling_observer.emplace(heap());
if (collector->isolate()->LogObjectRelocation()) {
profiling_observer.emplace(collector->heap());
}
std::vector<std::unique_ptr<v8::internal::Evacuator>> evacuators;
const int wanted_num_tasks = NumberOfParallelCompactionTasks();
const int wanted_num_tasks =
NumberOfParallelCompactionTasks(collector->heap());
for (int i = 0; i < wanted_num_tasks; i++) {
auto evacuator = std::make_unique<Evacuator>(collector);
if (profiling_observer) {
......@@ -3930,9 +3932,10 @@ size_t MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks(
evacuators.push_back(std::move(evacuator));
}
V8::GetCurrentPlatform()
->PostJob(v8::TaskPriority::kUserBlocking,
std::make_unique<PageEvacuationJob>(
isolate(), &evacuators, std::move(evacuation_items)))
->PostJob(
v8::TaskPriority::kUserBlocking,
std::make_unique<PageEvacuationJob>(collector->isolate(), &evacuators,
std::move(evacuation_items)))
->Join();
for (auto& evacuator : evacuators) {
evacuator->Finalize();
......@@ -3940,19 +3943,18 @@ size_t MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks(
return wanted_num_tasks;
}
bool MarkCompactCollectorBase::ShouldMovePage(
Page* p, intptr_t live_bytes, AlwaysPromoteYoung always_promote_young) {
const bool reduce_memory = heap()->ShouldReduceMemory();
const Address age_mark = heap()->new_space()->age_mark();
bool ShouldMovePage(Page* p, intptr_t live_bytes,
AlwaysPromoteYoung always_promote_young) {
Heap* heap = p->heap();
const bool reduce_memory = heap->ShouldReduceMemory();
const Address age_mark = heap->new_space()->age_mark();
return !reduce_memory && !p->NeverEvacuate() &&
(live_bytes > Evacuator::NewSpacePageEvacuationThreshold()) &&
(always_promote_young == AlwaysPromoteYoung::kYes ||
!p->Contains(age_mark)) &&
heap()->CanExpandOldGeneration(live_bytes);
heap->CanExpandOldGeneration(live_bytes);
}
namespace {
void TraceEvacuation(Isolate* isolate, size_t pages_count,
size_t wanted_num_tasks, size_t live_bytes,
size_t aborted_pages) {
......@@ -4570,10 +4572,11 @@ MarkCompactCollector::CreateRememberedSetUpdatingItem(
heap(), non_atomic_marking_state(), chunk, updating_mode);
}
template <typename IterateableSpace>
int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
std::vector<std::unique_ptr<UpdatingItem>>* items, IterateableSpace* space,
RememberedSetUpdatingMode mode) {
namespace {
template <typename IterateableSpace, typename Collector>
int CollectRememberedSetUpdatingItems(
Collector* collector, std::vector<std::unique_ptr<UpdatingItem>>* items,
IterateableSpace* space, RememberedSetUpdatingMode mode) {
int pages = 0;
for (MemoryChunk* chunk : *space) {
const bool contains_old_to_old_slots =
......@@ -4603,12 +4606,14 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
if (mode == RememberedSetUpdatingMode::ALL || contains_old_to_new_slots ||
contains_old_to_old_invalidated_slots ||
contains_old_to_new_invalidated_slots) {
items->emplace_back(CreateRememberedSetUpdatingItem(chunk, mode));
items->emplace_back(
collector->CreateRememberedSetUpdatingItem(chunk, mode));
pages++;
}
}
return pages;
}
} // namespace
class EphemeronTableUpdatingItem : public UpdatingItem {
public:
......@@ -4686,16 +4691,20 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN);
std::vector<std::unique_ptr<UpdatingItem>> updating_items;
CollectRememberedSetUpdatingItems(&updating_items, heap()->old_space(),
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->old_space(),
RememberedSetUpdatingMode::ALL);
CollectRememberedSetUpdatingItems(&updating_items, heap()->code_space(),
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->code_space(),
RememberedSetUpdatingMode::ALL);
CollectRememberedSetUpdatingItems(&updating_items, heap()->lo_space(),
CollectRememberedSetUpdatingItems(this, &updating_items, heap()->lo_space(),
RememberedSetUpdatingMode::ALL);
CollectRememberedSetUpdatingItems(&updating_items, heap()->code_lo_space(),
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->code_lo_space(),
RememberedSetUpdatingMode::ALL);
if (heap()->map_space()) {
CollectRememberedSetUpdatingItems(&updating_items, heap()->map_space(),
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->map_space(),
RememberedSetUpdatingMode::ALL);
}
......@@ -5190,7 +5199,7 @@ void MinorMarkCompactCollector::TearDown() {}
constexpr size_t MinorMarkCompactCollector::kMaxParallelTasks;
MinorMarkCompactCollector::MinorMarkCompactCollector(Heap* heap)
: MarkCompactCollectorBase(heap),
: heap_(heap),
worklist_(new MinorMarkCompactCollector::MarkingWorklist()),
main_thread_worklist_local_(worklist_),
marking_state_(heap->isolate()),
......@@ -5304,18 +5313,19 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
// Create batches of global handles.
CollectToSpaceUpdatingItems(&updating_items);
CollectRememberedSetUpdatingItems(&updating_items, heap()->old_space(),
CollectRememberedSetUpdatingItems(this, &updating_items, heap()->old_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
CollectRememberedSetUpdatingItems(&updating_items, heap()->code_space(),
CollectRememberedSetUpdatingItems(this, &updating_items, heap()->code_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
if (heap()->map_space()) {
CollectRememberedSetUpdatingItems(
&updating_items, heap()->map_space(),
this, &updating_items, heap()->map_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
}
CollectRememberedSetUpdatingItems(&updating_items, heap()->lo_space(),
CollectRememberedSetUpdatingItems(this, &updating_items, heap()->lo_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
CollectRememberedSetUpdatingItems(&updating_items, heap()->code_lo_space(),
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->code_lo_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
{
......
......@@ -190,57 +190,6 @@ enum class AlwaysPromoteYoung { kYes, kNo };
enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD };
enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
// Base class for minor and full MC collectors.
class MarkCompactCollectorBase {
public:
virtual ~MarkCompactCollectorBase() = default;
virtual void SetUp() = 0;
virtual void TearDown() = 0;
virtual void CollectGarbage() = 0;
inline Heap* heap() const { return heap_; }
inline Isolate* isolate();
protected:
explicit MarkCompactCollectorBase(Heap* heap) : heap_(heap) {}
// Marking operations for objects reachable from roots.
virtual void MarkLiveObjects() = 0;
// Mark objects reachable (transitively) from objects in the marking
// work list.
virtual void DrainMarkingWorklist() = 0;
// Clear non-live references held in side data structures.
virtual void ClearNonLiveReferences() = 0;
virtual void EvacuatePrologue() = 0;
virtual void EvacuateEpilogue() = 0;
virtual void Evacuate() = 0;
virtual void EvacuatePagesInParallel() = 0;
virtual void UpdatePointersAfterEvacuation() = 0;
virtual std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) = 0;
// Returns the number of wanted compaction tasks.
template <class Evacuator, class Collector>
size_t CreateAndExecuteEvacuationTasks(
Collector* collector,
std::vector<std::pair<ParallelWorkItem, MemoryChunk*>> evacuation_items,
MigrationObserver* migration_observer);
// Returns whether this page should be moved according to heuristics.
bool ShouldMovePage(Page* p, intptr_t live_bytes,
AlwaysPromoteYoung promote_young);
template <typename IterateableSpace>
int CollectRememberedSetUpdatingItems(
std::vector<std::unique_ptr<UpdatingItem>>* items,
IterateableSpace* space, RememberedSetUpdatingMode mode);
int NumberOfParallelCompactionTasks();
Heap* heap_;
};
class MinorMarkingState final
: public MarkingStateBase<MinorMarkingState, AccessMode::ATOMIC> {
public:
......@@ -442,7 +391,7 @@ class MainMarkingVisitor final
};
// Collector for young and old generation.
class MarkCompactCollector final : public MarkCompactCollectorBase {
class MarkCompactCollector final {
public:
using MarkingState = MajorMarkingState;
using AtomicMarkingState = MajorAtomicMarkingState;
......@@ -475,10 +424,13 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
return &non_atomic_marking_state_;
}
void SetUp() override;
void TearDown() override;
inline Heap* heap() const { return heap_; }
inline Isolate* isolate();
void SetUp();
void TearDown();
// Performs a global garbage collection.
void CollectGarbage() override;
void CollectGarbage();
void CollectEvacuationCandidates(PagedSpace* space);
......@@ -603,7 +555,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
}
explicit MarkCompactCollector(Heap* heap);
~MarkCompactCollector() override;
~MarkCompactCollector();
// Used by wrapper tracing.
V8_INLINE void MarkExternallyReferencedObject(HeapObject obj);
......@@ -619,6 +571,9 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
MarkingWorklistProcessingMode::kDefault>
std::pair<size_t, size_t> ProcessMarkingWorklist(size_t bytes_to_process);
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode);
private:
void ComputeEvacuationHeuristics(size_t area_size,
int* target_fragmentation_percent,
......@@ -635,7 +590,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Free unmarked entries in the ExternalPointerTable.
void SweepExternalPointerTable();
void MarkLiveObjects() override;
void MarkLiveObjects();
// Marks the object grey and adds it to the marking work list.
// This is for non-incremental marking only.
......@@ -667,7 +622,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Drains the main thread marking work list. Will mark all pending objects
// if no concurrent threads are running.
void DrainMarkingWorklist() override;
void DrainMarkingWorklist();
// Implements ephemeron semantics: Marks value if key is already reachable.
// Returns true if value was actually marked.
......@@ -695,7 +650,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Clear non-live references in weak cells, transition and descriptor arrays,
// and deoptimize dependent code of non-live maps.
void ClearNonLiveReferences() override;
void ClearNonLiveReferences();
void MarkDependentCodeForDeoptimization();
// Checks if the given weak cell is a simple transition from the parent map
// of the given dead target. If so it clears the transition and trims
......@@ -744,14 +699,11 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
void StartSweepSpaces();
void StartSweepSpace(PagedSpace* space);
void EvacuatePrologue() override;
void EvacuateEpilogue() override;
void Evacuate() override;
void EvacuatePagesInParallel() override;
void UpdatePointersAfterEvacuation() override;
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
void EvacuatePrologue();
void EvacuateEpilogue();
void Evacuate();
void EvacuatePagesInParallel();
void UpdatePointersAfterEvacuation();
void ReleaseEvacuationCandidates();
// Returns number of aborted pages.
......@@ -770,6 +722,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
base::Mutex mutex_;
base::Semaphore page_parallel_job_semaphore_{0};
Heap* heap_;
#ifdef DEBUG
enum CollectorState{IDLE,
PREPARE_GC,
......@@ -850,7 +804,7 @@ class V8_NODISCARD EvacuationScope {
};
// Collector for young-generation only.
class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
class MinorMarkCompactCollector final {
public:
using MarkingState = MinorMarkingState;
using NonAtomicMarkingState = MinorNonAtomicMarkingState;
......@@ -858,7 +812,10 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
static constexpr size_t kMaxParallelTasks = 8;
explicit MinorMarkCompactCollector(Heap* heap);
~MinorMarkCompactCollector() override;
~MinorMarkCompactCollector();
inline Heap* heap() const { return heap_; }
inline Isolate* isolate();
MarkingState* marking_state() { return &marking_state_; }
......@@ -866,13 +823,16 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
return &non_atomic_marking_state_;
}
void SetUp() override;
void TearDown() override;
void CollectGarbage() override;
void SetUp();
void TearDown();
void CollectGarbage();
void MakeIterable(Page* page, FreeSpaceTreatmentMode free_space_mode);
void CleanupPromotedPages();
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode);
private:
using MarkingWorklist =
::heap::base::Worklist<HeapObject, 64 /* segment size */>;
......@@ -887,30 +847,30 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
return main_marking_visitor_;
}
void MarkLiveObjects() override;
void MarkLiveObjects();
void MarkRootSetInParallel(RootMarkingVisitor* root_visitor);
V8_INLINE void MarkRootObject(HeapObject obj);
void DrainMarkingWorklist() override;
void DrainMarkingWorklist();
void TraceFragmentation();
void ClearNonLiveReferences() override;
void ClearNonLiveReferences();
void EvacuatePrologue() override;
void EvacuateEpilogue() override;
void Evacuate() override;
void EvacuatePagesInParallel() override;
void UpdatePointersAfterEvacuation() override;
void EvacuatePrologue();
void EvacuateEpilogue();
void Evacuate();
void EvacuatePagesInParallel();
void UpdatePointersAfterEvacuation();
std::unique_ptr<UpdatingItem> CreateToSpaceUpdatingItem(MemoryChunk* chunk,
Address start,
Address end);
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
int CollectToSpaceUpdatingItems(
std::vector<std::unique_ptr<UpdatingItem>>* items);
void SweepArrayBufferExtensions();
Heap* heap_;
MarkingWorklist* worklist_;
MarkingWorklist::Local main_thread_worklist_local_;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment