Commit 817539a3 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

cppgc: Untangle MarkingVisitor

- Untangles MarkingVisitor from Marker.
- Adds ConservativeTracingVisitor encapsulating conservative
  tracing.

This enables the following architecture:
- Marking visitors (unified + stand-alone) inherit from
  MarkingVisitor;
- Markers (unified + stand-alone) inherit (or directly use) Marker

Bug: chromium:1056170
Change-Id: I05304c231d2983dab5611d05cf4aa8bfa3ed5e20
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2245600Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68366}
parent 5e29a9bc
......@@ -69,7 +69,7 @@ void Heap::CollectGarbage(Config config) {
epoch_++;
// "Marking".
marker_ = std::make_unique<Marker>(this);
marker_ = std::make_unique<Marker>(AsBase());
const Marker::MarkingConfig marking_config{config.stack_state,
config.marking_type};
marker_->StartMarking(marking_config);
......
......@@ -34,6 +34,9 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
cppgc::Heap::HeapOptions options);
~Heap() final;
HeapBase& AsBase() { return *this; }
const HeapBase& AsBase() const { return *this; }
void CollectGarbage(Config config) final;
size_t epoch() const final { return epoch_; }
......
......@@ -21,25 +21,29 @@ namespace internal {
namespace {
void EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config, Heap* heap) {
void EnterIncrementalMarkingIfNeeded(
Marker::MarkingConfig config,
HeapBase& heap) { // NOLINT(runtime/references)
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
config.marking_type ==
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
ProcessHeap::EnterIncrementalOrConcurrentMarking();
}
#if defined(CPPGC_CAGED_HEAP)
heap->caged_heap().local_data().is_marking_in_progress = true;
heap.caged_heap().local_data().is_marking_in_progress = true;
#endif
}
void ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config, Heap* heap) {
void ExitIncrementalMarkingIfNeeded(
Marker::MarkingConfig config,
HeapBase& heap) { // NOLINT(runtime/references)
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
config.marking_type ==
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
ProcessHeap::ExitIncrementalOrConcurrentMarking();
}
#if defined(CPPGC_CAGED_HEAP)
heap->caged_heap().local_data().is_marking_in_progress = false;
heap.caged_heap().local_data().is_marking_in_progress = false;
#endif
}
......@@ -66,7 +70,7 @@ bool DrainWorklistWithDeadline(v8::base::TimeTicks deadline, Worklist* worklist,
constexpr int Marker::kMutatorThreadId;
Marker::Marker(Heap* heap)
Marker::Marker(HeapBase& heap)
: heap_(heap), marking_visitor_(CreateMutatorThreadMarkingVisitor()) {}
Marker::~Marker() {
......@@ -92,33 +96,40 @@ Marker::~Marker() {
}
void Marker::StartMarking(MarkingConfig config) {
heap()->stats_collector()->NotifyMarkingStarted();
heap().stats_collector()->NotifyMarkingStarted();
config_ = config;
VisitRoots();
EnterIncrementalMarkingIfNeeded(config, heap());
}
void Marker::FinishMarking(MarkingConfig config) {
void Marker::EnterAtomicPause(MarkingConfig config) {
ExitIncrementalMarkingIfNeeded(config_, heap());
config_ = config;
// Reset LABs before trying to conservatively mark in-construction objects.
// This is also needed in preparation for sweeping.
heap_->object_allocator().ResetLinearAllocationBuffers();
heap().object_allocator().ResetLinearAllocationBuffers();
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
FlushNotFullyConstructedObjects();
} else {
MarkNotFullyConstructedObjects();
}
AdvanceMarkingWithDeadline(v8::base::TimeDelta::Max());
}
heap()->stats_collector()->NotifyMarkingCompleted(
void Marker::LeaveAtomicPause() {
heap().stats_collector()->NotifyMarkingCompleted(
marking_visitor_->marked_bytes());
}
void Marker::FinishMarking(MarkingConfig config) {
EnterAtomicPause(config);
AdvanceMarkingWithDeadline(v8::base::TimeDelta::Max());
LeaveAtomicPause();
}
void Marker::ProcessWeakness() {
heap_->GetWeakPersistentRegion().Trace(marking_visitor_.get());
heap().GetWeakPersistentRegion().Trace(marking_visitor_.get());
// Call weak callbacks on objects that may now be pointing to dead objects.
WeakCallbackItem item;
......@@ -134,11 +145,12 @@ void Marker::ProcessWeakness() {
void Marker::VisitRoots() {
// Reset LABs before scanning roots. LABs are cleared to allow
// ObjectStartBitmap handling without considering LABs.
heap_->object_allocator().ResetLinearAllocationBuffers();
heap().object_allocator().ResetLinearAllocationBuffers();
heap_->GetStrongPersistentRegion().Trace(marking_visitor_.get());
if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers)
heap_->stack()->IteratePointers(marking_visitor_.get());
heap().GetStrongPersistentRegion().Trace(marking_visitor_.get());
if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers) {
heap().stack()->IteratePointers(marking_visitor_.get());
}
}
std::unique_ptr<MutatorThreadMarkingVisitor>
......@@ -206,8 +218,7 @@ void Marker::MarkNotFullyConstructedObjects() {
NotFullyConstructedWorklist::View view(&not_fully_constructed_worklist_,
kMutatorThreadId);
while (view.Pop(&item)) {
marking_visitor_->ConservativelyMarkAddress(
BasePage::FromPayload(item), reinterpret_cast<ConstAddress>(item));
heap().stack()->IteratePointers(marking_visitor_.get());
}
}
......
......@@ -16,10 +16,19 @@
namespace cppgc {
namespace internal {
class Heap;
class HeapBase;
class HeapObjectHeader;
class MutatorThreadMarkingVisitor;
// Marking algorithm. Example for a valid call sequence creating the marking
// phase:
// 1. StartMarking()
// 2. AdvanceMarkingWithDeadline() [Optional, depending on environment.]
// 3. EnterAtomicPause()
// 4. AdvanceMarkingWithDeadline()
// 5. LeaveAtomicPause()
//
// Alternatively, FinishMarking combines steps 3.-5.
class V8_EXPORT_PRIVATE Marker {
static constexpr int kNumConcurrentMarkers = 0;
static constexpr int kNumMarkers = 1 + kNumConcurrentMarkers;
......@@ -59,7 +68,7 @@ class V8_EXPORT_PRIVATE Marker {
MarkingType marking_type = MarkingType::kAtomic;
};
explicit Marker(Heap* heap);
explicit Marker(HeapBase& heap); // NOLINT(runtime/references)
virtual ~Marker();
Marker(const Marker&) = delete;
......@@ -68,14 +77,16 @@ class V8_EXPORT_PRIVATE Marker {
// Initialize marking according to the given config. This method will
// trigger incremental/concurrent marking if needed.
void StartMarking(MarkingConfig config);
// Finalize marking. This method stops incremental/concurrent marking
// if exists and performs atomic pause marking. FinishMarking may
// update the MarkingConfig, e.g. if the stack state has changed.
// Combines:
// - EnterAtomicPause()
// - AdvanceMarkingWithDeadline()
// - LeaveAtomicPause()
void FinishMarking(MarkingConfig config);
void ProcessWeakness();
Heap* heap() { return heap_; }
HeapBase& heap() { return heap_; }
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
NotFullyConstructedWorklist* not_fully_constructed_worklist() {
return &not_fully_constructed_worklist_;
......@@ -97,14 +108,26 @@ class V8_EXPORT_PRIVATE Marker {
virtual std::unique_ptr<MutatorThreadMarkingVisitor>
CreateMutatorThreadMarkingVisitor();
// Signals entering the atomic marking pause. The method
// - stops incremental/concurrent marking;
// - flushes back any in-construction worklists if needed;
// - Updates the MarkingConfig if the stack state has changed;
void EnterAtomicPause(MarkingConfig config);
// Makes marking progress.
virtual bool AdvanceMarkingWithDeadline(v8::base::TimeDelta);
// Signals leaving the atomic marking pause. This method expects no more
// objects to be marked and merely updates marking states if needed.
void LeaveAtomicPause();
private:
void VisitRoots();
bool AdvanceMarkingWithDeadline(v8::base::TimeDelta);
void FlushNotFullyConstructedObjects();
void MarkNotFullyConstructedObjects();
Heap* const heap_;
HeapBase& heap_;
MarkingConfig config_ = MarkingConfig::Default();
std::unique_ptr<MutatorThreadMarkingVisitor> marking_visitor_;
......
......@@ -8,8 +8,6 @@
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-page-inl.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/page-memory-inl.h"
#include "src/heap/cppgc/sanitizers.h"
namespace cppgc {
namespace internal {
......@@ -19,13 +17,14 @@ bool MarkingVisitor::IsInConstruction(const HeapObjectHeader& header) {
return header.IsInConstruction<HeapObjectHeader::AccessMode::kNonAtomic>();
}
MarkingVisitor::MarkingVisitor(Marker* marking_handler, int task_id)
: marker_(marking_handler),
marking_worklist_(marking_handler->marking_worklist(), task_id),
not_fully_constructed_worklist_(
marking_handler->not_fully_constructed_worklist(), task_id),
weak_callback_worklist_(marking_handler->weak_callback_worklist(),
task_id) {}
MarkingVisitor::MarkingVisitor(
HeapBase& heap, Marker::MarkingWorklist* marking_worklist,
Marker::NotFullyConstructedWorklist* not_fully_constructed_worklist,
Marker::WeakCallbackWorklist* weak_callback_worklist, int task_id)
: ConservativeTracingVisitor(heap, *heap.page_backend()),
marking_worklist_(marking_worklist, task_id),
not_fully_constructed_worklist_(not_fully_constructed_worklist, task_id),
weak_callback_worklist_(weak_callback_worklist, task_id) {}
void MarkingVisitor::AccountMarkedBytes(const HeapObjectHeader& header) {
marked_bytes_ +=
......@@ -81,6 +80,17 @@ void MarkingVisitor::VisitWeakRoot(const void* object, TraceDescriptor desc,
weak_callback(LivenessBrokerFactory::Create(), weak_root);
}
void MarkingVisitor::VisitPointer(const void* address) {
TraceConservativelyIfNeeded(address);
}
void MarkingVisitor::VisitConservatively(HeapObjectHeader& header,
TraceConservativelyCallback callback) {
MarkHeaderNoTracing(&header);
callback(this, header);
AccountMarkedBytes(header);
}
void MarkingVisitor::MarkHeader(HeapObjectHeader* header,
TraceDescriptor desc) {
DCHECK(header);
......@@ -96,7 +106,7 @@ void MarkingVisitor::MarkHeader(HeapObjectHeader* header,
bool MarkingVisitor::MarkHeaderNoTracing(HeapObjectHeader* header) {
DCHECK(header);
// A GC should only mark the objects that belong in its heap.
DCHECK_EQ(marker_->heap(), BasePage::FromPayload(header)->heap());
DCHECK_EQ(&heap_, BasePage::FromPayload(header)->heap());
// Never mark free space objects. This would e.g. hint to marking a promptly
// freed backing store.
DCHECK(!header->IsFree());
......@@ -127,68 +137,11 @@ void MarkingVisitor::DynamicallyMarkAddress(ConstAddress address) {
}
}
void MarkingVisitor::ConservativelyMarkAddress(const BasePage* page,
ConstAddress address) {
HeapObjectHeader* const header =
page->TryObjectHeaderFromInnerAddress(const_cast<Address>(address));
if (!header || header->IsMarked()) return;
// Simple case for fully constructed objects. This just adds the object to the
// regular marking worklist.
if (!IsInConstruction(*header)) {
MarkHeader(
header,
{header->Payload(),
GlobalGCInfoTable::GCInfoFromIndex(header->GetGCInfoIndex()).trace});
return;
}
// This case is reached for not-fully-constructed objects with vtables.
// We can differentiate multiple cases:
// 1. No vtable set up. Example:
// class A : public GarbageCollected<A> { virtual void f() = 0; };
// class B : public A { B() : A(foo()) {}; };
// The vtable for A is not set up if foo() allocates and triggers a GC.
//
// 2. Vtables properly set up (non-mixin case).
// 3. Vtables not properly set up (mixin) if GC is allowed during mixin
// construction.
//
// We use a simple conservative approach for these cases as they are not
// performance critical.
MarkHeaderNoTracing(header);
Address* payload = reinterpret_cast<Address*>(header->Payload());
const size_t payload_size = header->GetSize();
for (size_t i = 0; i < (payload_size / sizeof(Address)); ++i) {
Address maybe_ptr = payload[i];
#if defined(MEMORY_SANITIZER)
// |payload| may be uninitialized by design or just contain padding bytes.
// Copy into a local variable that is unpoisoned for conservative marking.
// Copy into a temporary variable to maintain the original MSAN state.
MSAN_UNPOISON(&maybe_ptr, sizeof(maybe_ptr));
#endif
if (maybe_ptr) VisitPointer(maybe_ptr);
}
AccountMarkedBytes(*header);
}
void MarkingVisitor::VisitPointer(const void* address) {
// TODO(chromium:1056170): Add page bloom filter
const BasePage* page =
reinterpret_cast<const BasePage*>(marker_->heap()->page_backend()->Lookup(
static_cast<ConstAddress>(address)));
if (!page) return;
DCHECK_EQ(marker_->heap(), page->heap());
ConservativelyMarkAddress(page, reinterpret_cast<ConstAddress>(address));
}
MutatorThreadMarkingVisitor::MutatorThreadMarkingVisitor(Marker* marker)
: MarkingVisitor(marker, Marker::kMutatorThreadId) {}
: MarkingVisitor(marker->heap(), marker->marking_worklist(),
marker->not_fully_constructed_worklist(),
marker->weak_callback_worklist(),
Marker::kMutatorThreadId) {}
} // namespace internal
} // namespace cppgc
......@@ -8,6 +8,7 @@
#include "include/cppgc/source-location.h"
#include "include/cppgc/trace-trait.h"
#include "include/v8config.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/marker.h"
......@@ -20,9 +21,11 @@ namespace internal {
class BasePage;
class HeapObjectHeader;
class MarkingVisitor : public VisitorBase, public StackVisitor {
class MarkingVisitor : public ConservativeTracingVisitor, public StackVisitor {
public:
MarkingVisitor(Marker*, int);
MarkingVisitor(HeapBase&, Marker::MarkingWorklist*,
Marker::NotFullyConstructedWorklist*,
Marker::WeakCallbackWorklist*, int);
virtual ~MarkingVisitor() = default;
MarkingVisitor(const MarkingVisitor&) = delete;
......@@ -31,7 +34,6 @@ class MarkingVisitor : public VisitorBase, public StackVisitor {
void FlushWorklists();
void DynamicallyMarkAddress(ConstAddress);
void ConservativelyMarkAddress(const BasePage*, ConstAddress);
void AccountMarkedBytes(const HeapObjectHeader&);
size_t marked_bytes() const { return marked_bytes_; }
......@@ -45,7 +47,10 @@ class MarkingVisitor : public VisitorBase, public StackVisitor {
void VisitRoot(const void*, TraceDescriptor) override;
void VisitWeakRoot(const void*, TraceDescriptor, WeakCallback,
const void*) override;
void VisitConservatively(HeapObjectHeader&,
TraceConservativelyCallback) override;
// StackMarker interface.
void VisitPointer(const void*) override;
private:
......@@ -53,7 +58,6 @@ class MarkingVisitor : public VisitorBase, public StackVisitor {
bool MarkHeaderNoTracing(HeapObjectHeader*);
void RegisterWeakCallback(WeakCallback, const void*) override;
Marker* const marker_;
Marker::MarkingWorklist::View marking_worklist_;
Marker::NotFullyConstructedWorklist::View not_fully_constructed_worklist_;
Marker::WeakCallbackWorklist::View weak_callback_worklist_;
......
......@@ -4,6 +4,12 @@
#include "src/heap/cppgc/visitor.h"
#include "src/heap/cppgc/gc-info-table.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/page-memory-inl.h"
#include "src/heap/cppgc/sanitizers.h"
namespace cppgc {
#ifdef V8_ENABLE_CHECKS
......@@ -13,4 +19,58 @@ void Visitor::CheckObjectNotInConstruction(const void* address) {
}
#endif // V8_ENABLE_CHECKS
namespace internal {
ConservativeTracingVisitor::ConservativeTracingVisitor(
HeapBase& heap, PageBackend& page_backend)
: heap_(heap), page_backend_(page_backend) {}
namespace {
void TraceConservatively(ConservativeTracingVisitor* visitor,
const HeapObjectHeader& header) {
Address* payload = reinterpret_cast<Address*>(header.Payload());
const size_t payload_size = header.GetSize();
for (size_t i = 0; i < (payload_size / sizeof(Address)); ++i) {
Address maybe_ptr = payload[i];
#if defined(MEMORY_SANITIZER)
// |payload| may be uninitialized by design or just contain padding bytes.
// Copy into a local variable that is not poisoned for conservative marking.
// Copy into a temporary variable to maintain the original MSAN state.
MSAN_UNPOISON(&maybe_ptr, sizeof(maybe_ptr));
#endif
if (maybe_ptr) {
visitor->TraceConservativelyIfNeeded(maybe_ptr);
}
}
}
} // namespace
void ConservativeTracingVisitor::TraceConservativelyIfNeeded(
const void* address) {
// TODO(chromium:1056170): Add page bloom filter
const BasePage* page = reinterpret_cast<const BasePage*>(
page_backend_.Lookup(static_cast<ConstAddress>(address)));
if (!page) return;
DCHECK_EQ(&heap_, page->heap());
auto* header = page->TryObjectHeaderFromInnerAddress(
const_cast<Address>(reinterpret_cast<ConstAddress>(address)));
if (!header) return;
if (!header->IsInConstruction<HeapObjectHeader::AccessMode::kNonAtomic>()) {
Visit(header->Payload(),
{header->Payload(),
GlobalGCInfoTable::GCInfoFromIndex(header->GetGCInfoIndex()).trace});
} else {
VisitConservatively(*header, TraceConservatively);
}
}
} // namespace internal
} // namespace cppgc
......@@ -7,10 +7,15 @@
#include "include/cppgc/persistent.h"
#include "include/cppgc/visitor.h"
#include "src/heap/cppgc/heap-object-header.h"
namespace cppgc {
namespace internal {
class HeapBase;
class HeapObjectHeader;
class PageBackend;
// Base visitor that is allowed to create a public cppgc::Visitor object and
// use its internals.
class VisitorBase : public cppgc::Visitor {
......@@ -29,6 +34,23 @@ class VisitorBase : public cppgc::Visitor {
}
};
// Regular visitor that additionally allows for conservative tracing.
class ConservativeTracingVisitor : public VisitorBase {
public:
ConservativeTracingVisitor(HeapBase&, PageBackend&);
void TraceConservativelyIfNeeded(const void*);
protected:
using TraceConservativelyCallback = void(ConservativeTracingVisitor*,
const HeapObjectHeader&);
virtual void VisitConservatively(HeapObjectHeader&,
TraceConservativelyCallback) {}
HeapBase& heap_;
PageBackend& page_backend_;
};
} // namespace internal
} // namespace cppgc
......
......@@ -24,7 +24,7 @@ class MarkerTest : public testing::TestWithHeap {
void DoMarking(MarkingConfig config) {
auto* heap = Heap::From(GetHeap());
Marker marker(heap);
Marker marker(heap->AsBase());
marker.StartMarking(config);
marker.FinishMarking(config);
marker.ProcessWeakness();
......@@ -213,7 +213,7 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
} // namespace
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
Marker marker(Heap::From(GetHeap()));
Marker marker(Heap::From(GetHeap())->AsBase());
marker.StartMarking({MarkingConfig::StackState::kMayContainHeapPointers});
GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
......@@ -226,7 +226,7 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
}
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
Marker marker(Heap::From(GetHeap()));
Marker marker(Heap::From(GetHeap())->AsBase());
marker.StartMarking({MarkingConfig::StackState::kMayContainHeapPointers});
MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
......
......@@ -22,7 +22,7 @@ namespace {
class MarkingVisitorTest : public testing::TestWithHeap {
public:
MarkingVisitorTest()
: marker_(std::make_unique<Marker>(Heap::From(GetHeap()))) {}
: marker_(std::make_unique<Marker>(Heap::From(GetHeap())->AsBase())) {}
~MarkingVisitorTest() { marker_->ClearAllWorklistsForTesting(); }
Marker* GetMarker() { return marker_.get(); }
......
......@@ -141,8 +141,8 @@ class GCed : public GarbageCollected<GCed> {
class WriteBarrierTest : public testing::TestWithHeap {
public:
WriteBarrierTest() : iheap_(Heap::From(GetHeap())) {
GetMarkerRef() = std::make_unique<Marker>(iheap_);
WriteBarrierTest() : internal_heap_(Heap::From(GetHeap())) {
GetMarkerRef() = std::make_unique<Marker>(internal_heap_->AsBase());
marker_ = GetMarkerRef().get();
}
......@@ -154,7 +154,7 @@ class WriteBarrierTest : public testing::TestWithHeap {
Marker* marker() const { return marker_; }
private:
Heap* iheap_;
Heap* internal_heap_;
Marker* marker_;
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment