Commit 4e8b60af authored by Leon Bettscheider's avatar Leon Bettscheider Committed by V8 LUCI CQ

[heap] Introduce CollectorBase class

This CL is part of an effort to enable concurrent marking in MinorMC.

For this purpose we plan to reuse the IncrementalMarking class which
already implements a part of the concurrent marking code for MajorMC
(and is currently coupled with MarkCompactCollector).

We plan to parameterize IncrementalMarking with CollectorBase, which
can be either MinorMarkCompactCollector or MarkCompactCollector, in
a subsequent CL.

Bug: v8:13012
Change-Id: I595bfdcb6e1abaa270d8037d889620433f26a416
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3749183
Commit-Queue: Leon Bettscheider <bettscheider@google.com>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81797}
parent 263db307
......@@ -2641,6 +2641,7 @@ void Heap::MinorMarkCompact() {
CppHeap::PauseConcurrentMarkingScope pause_cpp_marking(
CppHeap::From(cpp_heap_));
minor_mark_compact_collector_->Prepare();
minor_mark_compact_collector_->CollectGarbage();
SetGCState(NOT_IN_GC);
......
......@@ -43,7 +43,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
if (Heap::InYoungGeneration(obj) &&
non_atomic_marking_state_.WhiteToBlack(obj)) {
main_thread_worklists_local_->Push(obj);
local_marking_worklists_->Push(obj);
}
}
......@@ -279,8 +279,7 @@ typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
return iterator(chunk_, bitmap_, end_);
}
Isolate* MarkCompactCollector::isolate() { return heap()->isolate(); }
Isolate* MinorMarkCompactCollector::isolate() { return heap()->isolate(); }
Isolate* CollectorBase::isolate() { return heap()->isolate(); }
} // namespace internal
} // namespace v8
......
......@@ -476,17 +476,26 @@ int NumberOfParallelCompactionTasks(Heap* heap) {
}
return tasks;
}
} // namespace
MarkCompactCollector::MarkCompactCollector(Heap* heap)
CollectorBase::CollectorBase(Heap* heap, GarbageCollector collector)
: heap_(heap),
garbage_collector_(collector),
marking_state_(heap->isolate()),
non_atomic_marking_state_(heap->isolate()) {
DCHECK_NE(GarbageCollector::SCAVENGER, garbage_collector_);
}
bool CollectorBase::IsMajorMC() {
return !heap_->IsYoungGenerationCollector(garbage_collector_);
}
MarkCompactCollector::MarkCompactCollector(Heap* heap)
: CollectorBase(heap, GarbageCollector::MARK_COMPACTOR),
#ifdef DEBUG
state_(IDLE),
#endif
is_shared_heap_(heap->IsShared()),
marking_state_(heap->isolate()),
non_atomic_marking_state_(heap->isolate()),
sweeper_(new Sweeper(heap, non_atomic_marking_state())) {
}
......@@ -2346,9 +2355,8 @@ void MarkCompactCollector::MarkTransitiveClosureLinear() {
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING);
// Drain marking worklist and push all discovered objects into
// newly_discovered.
ProcessMarkingWorklist<
MarkCompactCollector::MarkingWorklistProcessingMode::
kTrackNewlyDiscoveredObjects>(0);
ProcessMarkingWorklist(
0, MarkingWorklistProcessingMode::kTrackNewlyDiscoveredObjects);
}
while (local_weak_objects()->discovered_ephemerons_local.Pop(&ephemeron)) {
......@@ -2426,9 +2434,14 @@ void MarkCompactCollector::PerformWrapperTracing() {
}
}
template <MarkCompactCollector::MarkingWorklistProcessingMode mode>
std::pair<size_t, size_t> MarkCompactCollector::ProcessMarkingWorklist(
size_t bytes_to_process) {
return ProcessMarkingWorklist(bytes_to_process,
MarkingWorklistProcessingMode::kDefault);
}
std::pair<size_t, size_t> MarkCompactCollector::ProcessMarkingWorklist(
size_t bytes_to_process, MarkingWorklistProcessingMode mode) {
HeapObject object;
size_t bytes_processed = 0;
size_t objects_processed = 0;
......@@ -2485,14 +2498,6 @@ std::pair<size_t, size_t> MarkCompactCollector::ProcessMarkingWorklist(
return std::make_pair(bytes_processed, objects_processed);
}
// Generate definitions for use in other files.
template std::pair<size_t, size_t> MarkCompactCollector::ProcessMarkingWorklist<
MarkCompactCollector::MarkingWorklistProcessingMode::kDefault>(
size_t bytes_to_process);
template std::pair<size_t, size_t> MarkCompactCollector::ProcessMarkingWorklist<
MarkCompactCollector::MarkingWorklistProcessingMode::
kTrackNewlyDiscoveredObjects>(size_t bytes_to_process);
bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) {
if (marking_state()->IsBlackOrGrey(key)) {
if (marking_state()->WhiteToGrey(value)) {
......@@ -5470,11 +5475,16 @@ void MinorMarkCompactCollector::TearDown() {}
constexpr size_t MinorMarkCompactCollector::kMaxParallelTasks;
MinorMarkCompactCollector::MinorMarkCompactCollector(Heap* heap)
: heap_(heap),
marking_state_(heap->isolate()),
non_atomic_marking_state_(heap->isolate()),
: CollectorBase(heap, GarbageCollector::MINOR_MARK_COMPACTOR),
page_parallel_job_semaphore_(0) {}
std::pair<size_t, size_t> MinorMarkCompactCollector::ProcessMarkingWorklist(
size_t bytes_to_process) {
// TODO(v8:13012): Implement this later. It should be similar to
// MinorMarkCompactCollector::DrainMarkingWorklist.
return std::pair<size_t, size_t>(0, 0);
}
void MinorMarkCompactCollector::CleanupPromotedPages() {
for (Page* p : promoted_pages_) {
p->ClearFlag(Page::PAGE_NEW_NEW_PROMOTION);
......@@ -5494,7 +5504,18 @@ void MinorMarkCompactCollector::CleanupPromotedPages() {
promoted_large_pages_.clear();
}
void MinorMarkCompactCollector::VisitObject(HeapObject obj) {
main_marking_visitor_->Visit(obj.map(), obj);
}
void MinorMarkCompactCollector::RevisitObject(HeapObject obj) {
// TODO(v8:13012): Implement.
UNREACHABLE();
}
void MinorMarkCompactCollector::SweepArrayBufferExtensions() {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_FINISH_SWEEP_ARRAY_BUFFERS);
heap_->array_buffer_sweeper()->RequestSweep(
ArrayBufferSweeper::SweepingType::kYoung);
}
......@@ -5654,6 +5675,27 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
MinorMarkCompactCollector* const collector_;
};
void MinorMarkCompactCollector::Prepare() {
// Probably requires more.
if (!heap()->incremental_marking()->IsMarking()) {
StartMarking();
}
}
void MinorMarkCompactCollector::StartMarking() {
local_marking_worklists_ =
std::make_unique<MarkingWorklists::Local>(&marking_worklists_);
main_marking_visitor_ = std::make_unique<YoungGenerationMarkingVisitor>(
heap()->isolate(), marking_state(), local_marking_worklists());
}
void MinorMarkCompactCollector::Finish() {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_FINISH);
local_marking_worklists_.reset();
main_marking_visitor_.reset();
}
void MinorMarkCompactCollector::CollectGarbage() {
DCHECK(!heap()->mark_compact_collector()->in_use());
#ifdef VERIFY_HEAP
......@@ -5676,6 +5718,8 @@ void MinorMarkCompactCollector::CollectGarbage() {
#endif // VERIFY_HEAP
Evacuate();
Finish();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
YoungGenerationEvacuationVerifier verifier(heap());
......@@ -6080,15 +6124,16 @@ void MinorMarkCompactCollector::MarkRootSetInParallel(
// The main thread might hold local items, while GlobalPoolSize() ==
// 0. Flush to ensure these items are visible globally and picked up
// by the job.
main_thread_worklists_local_->Publish();
local_marking_worklists_->Publish();
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS);
V8::GetCurrentPlatform()
->PostJob(v8::TaskPriority::kUserBlocking,
std::make_unique<YoungGenerationMarkingJob>(
isolate(), this, worklists(), std::move(marking_items)))
isolate(), this, marking_worklists(),
std::move(marking_items)))
->Join();
DCHECK(main_thread_worklists_local_->IsEmpty());
DCHECK(local_marking_worklists_->IsEmpty());
}
}
}
......@@ -6096,10 +6141,8 @@ void MinorMarkCompactCollector::MarkRootSetInParallel(
void MinorMarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK);
main_thread_worklists_local_ =
std::make_unique<MarkingWorklists::Local>(&worklists_);
main_marking_visitor_ = std::make_unique<YoungGenerationMarkingVisitor>(
heap()->isolate(), marking_state(), main_thread_worklists_local());
DCHECK_NOT_NULL(local_marking_worklists_);
DCHECK_NOT_NULL(main_marking_visitor_);
PostponeInterruptsScope postpone(isolate());
......@@ -6123,22 +6166,19 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
if (FLAG_minor_mc_trace_fragmentation) {
TraceFragmentation();
}
main_thread_worklists_local_.reset();
main_marking_visitor_.reset();
}
void MinorMarkCompactCollector::DrainMarkingWorklist() {
PtrComprCageBase cage_base(isolate());
HeapObject object;
while (main_thread_worklists_local_->Pop(&object)) {
while (local_marking_worklists_->Pop(&object)) {
DCHECK(!object.IsFreeSpaceOrFiller(cage_base));
DCHECK(object.IsHeapObject());
DCHECK(heap()->Contains(object));
DCHECK(non_atomic_marking_state()->IsBlack(object));
main_marking_visitor_->Visit(object);
}
DCHECK(main_thread_worklists_local_->IsEmpty());
DCHECK(local_marking_worklists_->IsEmpty());
}
void MinorMarkCompactCollector::TraceFragmentation() {
......
......@@ -325,18 +325,78 @@ class MainMarkingVisitor final
bool revisiting_object_;
};
class CollectorBase {
public:
GarbageCollector garbage_collector() { return garbage_collector_; }
virtual void SetUp() {}
virtual void TearDown() {}
virtual void CollectGarbage() = 0;
virtual void Prepare() = 0;
virtual void StartMarking() = 0;
MarkingState* marking_state() { return &marking_state_; }
NonAtomicMarkingState* non_atomic_marking_state() {
return &non_atomic_marking_state_;
}
inline Heap* heap() const { return heap_; }
inline Isolate* isolate();
MarkingWorklists* marking_worklists() { return &marking_worklists_; }
MarkingWorklists::Local* local_marking_worklists() {
return local_marking_worklists_.get();
}
// Drains the main thread marking worklist until the specified number of
// bytes are processed. If the number of bytes is zero, then the worklist
// is drained until it is empty.
virtual std::pair<size_t, size_t> ProcessMarkingWorklist(
size_t bytes_to_process) = 0;
// Used by incremental marking for object that change their layout.
virtual void VisitObject(HeapObject obj) = 0;
// Used by incremental marking for black-allocated objects.
virtual void RevisitObject(HeapObject obj) = 0;
virtual bool sweeping_in_progress() const = 0;
virtual void Finish() = 0;
bool IsMajorMC();
private:
std::vector<Page*> new_space_evacuation_pages_;
std::vector<LargePage*> promoted_large_pages_;
protected:
Heap* heap_;
GarbageCollector garbage_collector_;
MarkingWorklists marking_worklists_;
std::unique_ptr<MarkingWorklists::Local> local_marking_worklists_;
MarkingState marking_state_;
NonAtomicMarkingState non_atomic_marking_state_;
explicit CollectorBase(Heap* heap, GarbageCollector collector);
virtual ~CollectorBase() = default;
};
// Collector for young and old generation.
class MarkCompactCollector final {
class MarkCompactCollector final : public CollectorBase {
public:
using MarkingVisitor = MainMarkingVisitor<MarkingState>;
class RootMarkingVisitor;
class CustomRootBodyMarkingVisitor;
class SharedHeapObjectVisitor;
class RootMarkingVisitor;
enum IterationMode {
kKeepMarking,
kClearMarkbits,
enum class StartCompactionMode {
kIncremental,
kAtomic,
};
enum class MarkingWorklistProcessingMode {
......@@ -344,24 +404,21 @@ class MarkCompactCollector final {
kTrackNewlyDiscoveredObjects
};
enum class StartCompactionMode {
kIncremental,
kAtomic,
};
static MarkCompactCollector* From(CollectorBase* collector) {
return static_cast<MarkCompactCollector*>(collector);
}
MarkingState* marking_state() { return &marking_state_; }
std::pair<size_t, size_t> ProcessMarkingWorklist(
size_t bytes_to_process) final;
NonAtomicMarkingState* non_atomic_marking_state() {
return &non_atomic_marking_state_;
}
std::pair<size_t, size_t> ProcessMarkingWorklist(
size_t bytes_to_process, MarkingWorklistProcessingMode mode);
inline Heap* heap() const { return heap_; }
inline Isolate* isolate();
void SetUp() final;
void TearDown() final;
void SetUp();
void TearDown();
// Performs a global garbage collection.
void CollectGarbage();
void CollectGarbage() final;
void CollectEvacuationCandidates(PagedSpace* space);
......@@ -369,7 +426,7 @@ class MarkCompactCollector final {
// Prepares for GC by resetting relocation info in old and map spaces and
// choosing spaces to compact.
void Prepare();
void Prepare() final;
// Stop concurrent marking (either by preempting it right away or waiting for
// it to complete as requested by |stop_request|).
......@@ -380,7 +437,7 @@ class MarkCompactCollector final {
void AbortCompaction();
void StartMarking();
void StartMarking() final;
static inline bool IsOnEvacuationCandidate(Object obj) {
return Page::FromAddress(obj.ptr())->IsEvacuationCandidate();
......@@ -428,40 +485,16 @@ class MarkCompactCollector final {
void DrainSweepingWorklistForSpace(AllocationSpace space);
// Checks if sweeping is in progress right now on any space.
bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); }
bool sweeping_in_progress() const final {
return sweeper_->sweeping_in_progress();
}
void set_evacuation(bool evacuation) { evacuation_ = evacuation; }
bool evacuation() const { return evacuation_; }
MarkingWorklists* marking_worklists() { return &marking_worklists_; }
MarkingWorklists::Local* local_marking_worklists() {
return local_marking_worklists_.get();
}
WeakObjects* weak_objects() { return &weak_objects_; }
WeakObjects::Local* local_weak_objects() { return local_weak_objects_.get(); }
inline void AddTransitionArray(TransitionArray array);
void AddNewlyDiscovered(HeapObject object) {
if (ephemeron_marking_.newly_discovered_overflowed) return;
if (ephemeron_marking_.newly_discovered.size() <
ephemeron_marking_.newly_discovered_limit) {
ephemeron_marking_.newly_discovered.push_back(object);
} else {
ephemeron_marking_.newly_discovered_overflowed = true;
}
}
void ResetNewlyDiscovered() {
ephemeron_marking_.newly_discovered_overflowed = false;
ephemeron_marking_.newly_discovered.clear();
}
Sweeper* sweeper() { return sweeper_; }
#ifdef DEBUG
......@@ -485,22 +518,33 @@ class MarkCompactCollector final {
return code_flush_mode_;
}
WeakObjects* weak_objects() { return &weak_objects_; }
WeakObjects::Local* local_weak_objects() { return local_weak_objects_.get(); }
void VisitObject(HeapObject obj) final;
void RevisitObject(HeapObject obj) final;
void AddNewlyDiscovered(HeapObject object) {
if (ephemeron_marking_.newly_discovered_overflowed) return;
if (ephemeron_marking_.newly_discovered.size() <
ephemeron_marking_.newly_discovered_limit) {
ephemeron_marking_.newly_discovered.push_back(object);
} else {
ephemeron_marking_.newly_discovered_overflowed = true;
}
}
void ResetNewlyDiscovered() {
ephemeron_marking_.newly_discovered_overflowed = false;
ephemeron_marking_.newly_discovered.clear();
}
explicit MarkCompactCollector(Heap* heap);
~MarkCompactCollector();
~MarkCompactCollector() final;
// Used by wrapper tracing.
V8_INLINE void MarkExternallyReferencedObject(HeapObject obj);
// Used by incremental marking for object that change their layout.
void VisitObject(HeapObject obj);
// Used by incremental marking for black-allocated objects.
void RevisitObject(HeapObject obj);
// Drains the main thread marking worklist until the specified number of
// bytes are processed. If the number of bytes is zero, then the worklist
// is drained until it is empty.
template <MarkingWorklistProcessingMode mode =
MarkingWorklistProcessingMode::kDefault>
std::pair<size_t, size_t> ProcessMarkingWorklist(size_t bytes_to_process);
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode);
......@@ -521,7 +565,7 @@ class MarkCompactCollector final {
void RecordObjectStats();
// Finishes GC, performs heap verification if enabled.
void Finish();
void Finish() final;
// Free unmarked ArrayBufferExtensions.
void SweepArrayBufferExtensions();
......@@ -668,8 +712,6 @@ class MarkCompactCollector final {
base::Mutex mutex_;
base::Semaphore page_parallel_job_semaphore_{0};
Heap* heap_;
#ifdef DEBUG
enum CollectorState{IDLE,
PREPARE_GC,
......@@ -693,13 +735,10 @@ class MarkCompactCollector final {
bool have_code_to_deoptimize_ = false;
bool parallel_marking_ = false;
MarkingWorklists marking_worklists_;
WeakObjects weak_objects_;
EphemeronMarking ephemeron_marking_;
std::unique_ptr<MarkingVisitor> marking_visitor_;
std::unique_ptr<MarkingWorklists::Local> local_marking_worklists_;
std::unique_ptr<WeakObjects::Local> local_weak_objects_;
NativeContextInferrer native_context_inferrer_;
NativeContextStats native_context_stats_;
......@@ -715,9 +754,6 @@ class MarkCompactCollector final {
aborted_evacuation_candidates_due_to_flags_;
std::vector<LargePage*> promoted_large_pages_;
MarkingState marking_state_;
NonAtomicMarkingState non_atomic_marking_state_;
Sweeper* sweeper_;
// Counts the number of major mark-compact collections. The counter is
......@@ -751,25 +787,25 @@ class V8_NODISCARD EvacuationScope {
};
// Collector for young-generation only.
class MinorMarkCompactCollector final {
class MinorMarkCompactCollector final : public CollectorBase {
public:
static constexpr size_t kMaxParallelTasks = 8;
explicit MinorMarkCompactCollector(Heap* heap);
~MinorMarkCompactCollector();
inline Heap* heap() const { return heap_; }
inline Isolate* isolate();
static MinorMarkCompactCollector* From(CollectorBase* collector) {
return static_cast<MinorMarkCompactCollector*>(collector);
}
MarkingState* marking_state() { return &marking_state_; }
explicit MinorMarkCompactCollector(Heap* heap);
~MinorMarkCompactCollector() final;
NonAtomicMarkingState* non_atomic_marking_state() {
return &non_atomic_marking_state_;
}
std::pair<size_t, size_t> ProcessMarkingWorklist(
size_t bytes_to_process) final;
void SetUp();
void TearDown();
void CollectGarbage();
void SetUp() final;
void TearDown() final;
void CollectGarbage() final;
void Prepare() final;
void StartMarking() final;
void MakeIterable(Page* page, FreeSpaceTreatmentMode free_space_mode);
void CleanupPromotedPages();
......@@ -777,18 +813,22 @@ class MinorMarkCompactCollector final {
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode);
MarkingWorklists::Local* main_thread_worklists_local() {
return main_thread_worklists_local_.get();
void Finish() final;
bool sweeping_in_progress() const final {
// TODO(v8:13012): Fix this once sweeping is implemented.
return false;
}
void VisitObject(HeapObject obj) final;
void RevisitObject(HeapObject obj) final;
private:
class RootMarkingVisitor;
static const int kNumMarkers = 8;
static const int kMainMarker = 0;
inline MarkingWorklists* worklists() { return &worklists_; }
void MarkLiveObjects();
void MarkRootSetInParallel(RootMarkingVisitor* root_visitor);
V8_INLINE void MarkRootObject(HeapObject obj);
......@@ -811,15 +851,8 @@ class MinorMarkCompactCollector final {
void SweepArrayBufferExtensions();
Heap* heap_;
MarkingWorklists worklists_;
std::unique_ptr<MarkingWorklists::Local> main_thread_worklists_local_;
std::unique_ptr<YoungGenerationMarkingVisitor> main_marking_visitor_;
MarkingState marking_state_;
NonAtomicMarkingState non_atomic_marking_state_;
base::Semaphore page_parallel_job_semaphore_;
std::vector<Page*> new_space_evacuation_pages_;
std::vector<Page*> promoted_pages_;
......
......@@ -612,6 +612,8 @@
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS) \
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS) \
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK) \
F(MINOR_MC_FINISH) \
F(MINOR_MC_FINISH_SWEEP_ARRAY_BUFFERS) \
F(MINOR_MC_MARK) \
F(MINOR_MC_MARK_GLOBAL_HANDLES) \
F(MINOR_MC_MARK_PARALLEL) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment