Commit 8cf4ca8f authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

cppgc: Refactor visitation 3/3

Split off MarkingWorklists and from Marker and introduce MarkerBase.

MarkerBase refers just to interfaces types for passing along visitors.
The concrete Marker provides the impl for these interfaces. Unified
heap marker uses different marking visitors internally but provides an
implementation for the same interface.

Change-Id: Ibc4b2c88e2e69bd303a95da7d167a701934f4a07
Bug: chromium:1056170
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2270539
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68676}
parent 5ab27690
...@@ -4215,6 +4215,8 @@ v8_source_set("cppgc_base") { ...@@ -4215,6 +4215,8 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/marking-state.h", "src/heap/cppgc/marking-state.h",
"src/heap/cppgc/marking-visitor.cc", "src/heap/cppgc/marking-visitor.cc",
"src/heap/cppgc/marking-visitor.h", "src/heap/cppgc/marking-visitor.h",
"src/heap/cppgc/marking-worklists.cc",
"src/heap/cppgc/marking-worklists.h",
"src/heap/cppgc/object-allocator.cc", "src/heap/cppgc/object-allocator.cc",
"src/heap/cppgc/object-allocator.h", "src/heap/cppgc/object-allocator.h",
"src/heap/cppgc/object-start-bitmap.h", "src/heap/cppgc/object-start-bitmap.h",
......
...@@ -59,21 +59,34 @@ class CppgcPlatformAdapter final : public cppgc::Platform { ...@@ -59,21 +59,34 @@ class CppgcPlatformAdapter final : public cppgc::Platform {
v8::Isolate* isolate_; v8::Isolate* isolate_;
}; };
class UnifiedHeapMarker : public cppgc::internal::Marker { class UnifiedHeapMarker : public cppgc::internal::MarkerBase {
public: public:
explicit UnifiedHeapMarker(cppgc::internal::HeapBase& heap); explicit UnifiedHeapMarker(cppgc::internal::HeapBase& heap);
void AddObject(void*); void AddObject(void*);
// TODO(chromium:1056170): Implement unified heap specific protected:
// CreateMutatorThreadMarkingVisitor and AdvanceMarkingWithDeadline. cppgc::Visitor& visitor() final { return marking_visitor_; }
cppgc::internal::ConservativeTracingVisitor& conservative_visitor() final {
return conservative_marking_visitor_;
}
heap::base::StackVisitor& stack_visitor() final {
return conservative_marking_visitor_;
}
private:
// TODO(chromium:1056170): Implement unified heap specific marking visitors.
cppgc::internal::MarkingVisitor marking_visitor_;
cppgc::internal::ConservativeMarkingVisitor conservative_marking_visitor_;
}; };
UnifiedHeapMarker::UnifiedHeapMarker(cppgc::internal::HeapBase& heap) UnifiedHeapMarker::UnifiedHeapMarker(cppgc::internal::HeapBase& heap)
: cppgc::internal::Marker(heap) {} : cppgc::internal::MarkerBase(heap),
marking_visitor_(heap, marking_state()),
conservative_marking_visitor_(heap, marking_state(), marking_visitor_) {}
void UnifiedHeapMarker::AddObject(void* object) { void UnifiedHeapMarker::AddObject(void* object) {
mutator_marking_state_->MarkAndPush( mutator_marking_state_.MarkAndPush(
cppgc::internal::HeapObjectHeader::FromPayload(object)); cppgc::internal::HeapObjectHeader::FromPayload(object));
} }
...@@ -97,7 +110,7 @@ void CppHeap::RegisterV8References( ...@@ -97,7 +110,7 @@ void CppHeap::RegisterV8References(
} }
void CppHeap::TracePrologue(TraceFlags flags) { void CppHeap::TracePrologue(TraceFlags flags) {
marker_ = std::make_unique<UnifiedHeapMarker>(AsBase()); marker_.reset(new UnifiedHeapMarker(AsBase()));
const UnifiedHeapMarker::MarkingConfig marking_config{ const UnifiedHeapMarker::MarkingConfig marking_config{
UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor, UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
cppgc::Heap::StackState::kNoHeapPointers, cppgc::Heap::StackState::kNoHeapPointers,
......
...@@ -35,7 +35,7 @@ namespace testing { ...@@ -35,7 +35,7 @@ namespace testing {
class TestWithHeap; class TestWithHeap;
} }
class Marker; class MarkerBase;
class PageBackend; class PageBackend;
class PreFinalizerHandler; class PreFinalizerHandler;
class StatsCollector; class StatsCollector;
...@@ -90,7 +90,7 @@ class V8_EXPORT_PRIVATE HeapBase { ...@@ -90,7 +90,7 @@ class V8_EXPORT_PRIVATE HeapBase {
return prefinalizer_handler_.get(); return prefinalizer_handler_.get();
} }
Marker* marker() const { return marker_.get(); } MarkerBase* marker() const { return marker_.get(); }
ObjectAllocator& object_allocator() { return object_allocator_; } ObjectAllocator& object_allocator() { return object_allocator_; }
...@@ -128,7 +128,7 @@ class V8_EXPORT_PRIVATE HeapBase { ...@@ -128,7 +128,7 @@ class V8_EXPORT_PRIVATE HeapBase {
std::unique_ptr<StatsCollector> stats_collector_; std::unique_ptr<StatsCollector> stats_collector_;
std::unique_ptr<heap::base::Stack> stack_; std::unique_ptr<heap::base::Stack> stack_;
std::unique_ptr<PreFinalizerHandler> prefinalizer_handler_; std::unique_ptr<PreFinalizerHandler> prefinalizer_handler_;
std::unique_ptr<Marker> marker_; std::unique_ptr<MarkerBase> marker_;
ObjectAllocator object_allocator_; ObjectAllocator object_allocator_;
Sweeper sweeper_; Sweeper sweeper_;
......
...@@ -50,7 +50,7 @@ void ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config, ...@@ -50,7 +50,7 @@ void ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
} }
// Visit remembered set that was recorded in the generational barrier. // Visit remembered set that was recorded in the generational barrier.
void VisitRememberedSlots(HeapBase& heap, MarkingVisitor* visitor) { void VisitRememberedSlots(HeapBase& heap, MarkingState& marking_state) {
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
for (void* slot : heap.remembered_slots()) { for (void* slot : heap.remembered_slots()) {
auto& slot_header = BasePage::FromInnerAddress(&heap, slot) auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
...@@ -60,10 +60,11 @@ void VisitRememberedSlots(HeapBase& heap, MarkingVisitor* visitor) { ...@@ -60,10 +60,11 @@ void VisitRememberedSlots(HeapBase& heap, MarkingVisitor* visitor) {
// top level (with the guarantee that no objects are currently being in // top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points // construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization. // or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!MarkingVisitor::IsInConstruction(slot_header)); DCHECK(
!header.IsInConstruction<HeapObjectHeader::AccessMode::kNonAtomic>());
void* value = *reinterpret_cast<void**>(slot); void* value = *reinterpret_cast<void**>(slot);
visitor->DynamicallyMarkAddress(static_cast<Address>(value)); marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
} }
#endif #endif
} }
...@@ -99,28 +100,25 @@ bool DrainWorklistWithDeadline(v8::base::TimeTicks deadline, Worklist* worklist, ...@@ -99,28 +100,25 @@ bool DrainWorklistWithDeadline(v8::base::TimeTicks deadline, Worklist* worklist,
} // namespace } // namespace
constexpr int Marker::kMutatorThreadId; MarkerBase::MarkerBase(HeapBase& heap)
Marker::Marker(HeapBase& heap)
: heap_(heap), : heap_(heap),
mutator_marking_state_(std::make_unique<MarkingState>( mutator_marking_state_(
heap, &marking_worklist_, &not_fully_constructed_worklist_, heap, marking_worklists_.marking_worklist(),
&weak_callback_worklist_, Marker::kMutatorThreadId)), marking_worklists_.not_fully_constructed_worklist(),
marking_visitor_(CreateMutatorThreadMarkingVisitor()), marking_worklists_.weak_callback_worklist(),
conservative_marking_visitor_( MarkingWorklists::kMutatorThreadId) {}
std::make_unique<ConservativeMarkingVisitor>(
heap, *mutator_marking_state_.get(), *marking_visitor_.get())) {} MarkerBase::~MarkerBase() {
Marker::~Marker() {
// The fixed point iteration may have found not-fully-constructed objects. // The fixed point iteration may have found not-fully-constructed objects.
// Such objects should have already been found through the stack scan though // Such objects should have already been found through the stack scan though
// and should thus already be marked. // and should thus already be marked.
if (!not_fully_constructed_worklist_.IsEmpty()) { if (!marking_worklists_.not_fully_constructed_worklist()->IsEmpty()) {
#if DEBUG #if DEBUG
DCHECK_NE(MarkingConfig::StackState::kNoHeapPointers, config_.stack_state); DCHECK_NE(MarkingConfig::StackState::kNoHeapPointers, config_.stack_state);
NotFullyConstructedItem item; MarkingWorklists::NotFullyConstructedItem item;
NotFullyConstructedWorklist::View view(&not_fully_constructed_worklist_, MarkingWorklists::NotFullyConstructedWorklist::View view(
kMutatorThreadId); marking_worklists_.not_fully_constructed_worklist(),
MarkingWorklists::kMutatorThreadId);
while (view.Pop(&item)) { while (view.Pop(&item)) {
const HeapObjectHeader& header = const HeapObjectHeader& header =
BasePage::FromPayload(item)->ObjectHeaderFromInnerAddress( BasePage::FromPayload(item)->ObjectHeaderFromInnerAddress(
...@@ -128,12 +126,12 @@ Marker::~Marker() { ...@@ -128,12 +126,12 @@ Marker::~Marker() {
DCHECK(header.IsMarked()); DCHECK(header.IsMarked());
} }
#else #else
not_fully_constructed_worklist_.Clear(); marking_worklists_.not_fully_constructed_worklist()->Clear();
#endif #endif
} }
} }
void Marker::StartMarking(MarkingConfig config) { void MarkerBase::StartMarking(MarkingConfig config) {
heap().stats_collector()->NotifyMarkingStarted(); heap().stats_collector()->NotifyMarkingStarted();
config_ = config; config_ = config;
...@@ -141,66 +139,62 @@ void Marker::StartMarking(MarkingConfig config) { ...@@ -141,66 +139,62 @@ void Marker::StartMarking(MarkingConfig config) {
EnterIncrementalMarkingIfNeeded(config, heap()); EnterIncrementalMarkingIfNeeded(config, heap());
} }
void Marker::EnterAtomicPause(MarkingConfig config) { void MarkerBase::EnterAtomicPause(MarkingConfig config) {
ExitIncrementalMarkingIfNeeded(config_, heap()); ExitIncrementalMarkingIfNeeded(config_, heap());
config_ = config; config_ = config;
// VisitRoots also resets the LABs. // VisitRoots also resets the LABs.
VisitRoots(); VisitRoots();
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) { if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
FlushNotFullyConstructedObjects(); marking_worklists_.FlushNotFullyConstructedObjects();
} else { } else {
MarkNotFullyConstructedObjects(); MarkNotFullyConstructedObjects();
} }
} }
void Marker::LeaveAtomicPause() { void MarkerBase::LeaveAtomicPause() {
ResetRememberedSet(heap()); ResetRememberedSet(heap());
heap().stats_collector()->NotifyMarkingCompleted( heap().stats_collector()->NotifyMarkingCompleted(
mutator_marking_state_->marked_bytes()); mutator_marking_state_.marked_bytes());
} }
void Marker::FinishMarking(MarkingConfig config) { void MarkerBase::FinishMarking(MarkingConfig config) {
EnterAtomicPause(config); EnterAtomicPause(config);
AdvanceMarkingWithDeadline(v8::base::TimeDelta::Max()); AdvanceMarkingWithDeadline(v8::base::TimeDelta::Max());
LeaveAtomicPause(); LeaveAtomicPause();
} }
void Marker::ProcessWeakness() { void MarkerBase::ProcessWeakness() {
heap().GetWeakPersistentRegion().Trace(marking_visitor_.get()); heap().GetWeakPersistentRegion().Trace(&visitor());
// Call weak callbacks on objects that may now be pointing to dead objects. // Call weak callbacks on objects that may now be pointing to dead objects.
WeakCallbackItem item; MarkingWorklists::WeakCallbackItem item;
LivenessBroker broker = LivenessBrokerFactory::Create(); LivenessBroker broker = LivenessBrokerFactory::Create();
WeakCallbackWorklist::View view(&weak_callback_worklist_, kMutatorThreadId); MarkingWorklists::WeakCallbackWorklist::View view(
marking_worklists_.weak_callback_worklist(),
MarkingWorklists::kMutatorThreadId);
while (view.Pop(&item)) { while (view.Pop(&item)) {
item.callback(broker, item.parameter); item.callback(broker, item.parameter);
} }
// Weak callbacks should not add any new objects for marking. // Weak callbacks should not add any new objects for marking.
DCHECK(marking_worklist_.IsEmpty()); DCHECK(marking_worklists_.marking_worklist()->IsEmpty());
} }
void Marker::VisitRoots() { void MarkerBase::VisitRoots() {
// Reset LABs before scanning roots. LABs are cleared to allow // Reset LABs before scanning roots. LABs are cleared to allow
// ObjectStartBitmap handling without considering LABs. // ObjectStartBitmap handling without considering LABs.
heap().object_allocator().ResetLinearAllocationBuffers(); heap().object_allocator().ResetLinearAllocationBuffers();
heap().GetStrongPersistentRegion().Trace(marking_visitor_.get()); heap().GetStrongPersistentRegion().Trace(&visitor());
if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers) { if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers) {
heap().stack()->IteratePointers(conservative_marking_visitor_.get()); heap().stack()->IteratePointers(&stack_visitor());
} }
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) { if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
VisitRememberedSlots(heap(), marking_visitor_.get()); VisitRememberedSlots(heap(), mutator_marking_state_);
} }
} }
std::unique_ptr<MutatorThreadMarkingVisitor> bool MarkerBase::AdvanceMarkingWithDeadline(v8::base::TimeDelta duration) {
Marker::CreateMutatorThreadMarkingVisitor() {
return std::make_unique<MutatorThreadMarkingVisitor>(this);
}
bool Marker::AdvanceMarkingWithDeadline(v8::base::TimeDelta duration) {
MutatorThreadMarkingVisitor* visitor = marking_visitor_.get();
v8::base::TimeTicks deadline = v8::base::TimeTicks::Now() + duration; v8::base::TimeTicks deadline = v8::base::TimeTicks::Now() + duration;
do { do {
...@@ -208,70 +202,65 @@ bool Marker::AdvanceMarkingWithDeadline(v8::base::TimeDelta duration) { ...@@ -208,70 +202,65 @@ bool Marker::AdvanceMarkingWithDeadline(v8::base::TimeDelta duration) {
// |marking_worklist_|. This merely re-adds items with the proper // |marking_worklist_|. This merely re-adds items with the proper
// callbacks. // callbacks.
if (!DrainWorklistWithDeadline( if (!DrainWorklistWithDeadline(
deadline, &previously_not_fully_constructed_worklist_, deadline,
[this](NotFullyConstructedItem& item) { marking_worklists_.previously_not_fully_constructed_worklist(),
mutator_marking_state_->DynamicallyMarkAddress( [this](MarkingWorklists::NotFullyConstructedItem& item) {
mutator_marking_state_.DynamicallyMarkAddress(
reinterpret_cast<ConstAddress>(item)); reinterpret_cast<ConstAddress>(item));
}, },
kMutatorThreadId)) MarkingWorklists::kMutatorThreadId))
return false; return false;
if (!DrainWorklistWithDeadline( if (!DrainWorklistWithDeadline(
deadline, &marking_worklist_, deadline, marking_worklists_.marking_worklist(),
[this, visitor](const MarkingItem& item) { [this](const MarkingWorklists::MarkingItem& item) {
const HeapObjectHeader& header = const HeapObjectHeader& header =
HeapObjectHeader::FromPayload(item.base_object_payload); HeapObjectHeader::FromPayload(item.base_object_payload);
DCHECK(!header.IsInConstruction< DCHECK(!header.IsInConstruction<
HeapObjectHeader::AccessMode::kNonAtomic>()); HeapObjectHeader::AccessMode::kNonAtomic>());
item.callback(visitor, item.base_object_payload); item.callback(&visitor(), item.base_object_payload);
mutator_marking_state_->AccountMarkedBytes(header); mutator_marking_state_.AccountMarkedBytes(header);
}, },
kMutatorThreadId)) MarkingWorklists::kMutatorThreadId))
return false; return false;
if (!DrainWorklistWithDeadline( if (!DrainWorklistWithDeadline(
deadline, &write_barrier_worklist_, deadline, marking_worklists_.write_barrier_worklist(),
[this, visitor](HeapObjectHeader* header) { [this](HeapObjectHeader* header) {
DCHECK(header); DCHECK(header);
DCHECK(!header->IsInConstruction< DCHECK(!header->IsInConstruction<
HeapObjectHeader::AccessMode::kNonAtomic>()); HeapObjectHeader::AccessMode::kNonAtomic>());
const GCInfo& gcinfo = const GCInfo& gcinfo =
GlobalGCInfoTable::GCInfoFromIndex(header->GetGCInfoIndex()); GlobalGCInfoTable::GCInfoFromIndex(header->GetGCInfoIndex());
gcinfo.trace(visitor, header->Payload()); gcinfo.trace(&visitor(), header->Payload());
mutator_marking_state_->AccountMarkedBytes(*header); mutator_marking_state_.AccountMarkedBytes(*header);
}, },
kMutatorThreadId)) MarkingWorklists::kMutatorThreadId))
return false; return false;
} while (!marking_worklist_.IsLocalViewEmpty(kMutatorThreadId)); } while (!marking_worklists_.marking_worklist()->IsLocalViewEmpty(
MarkingWorklists::kMutatorThreadId));
return true; return true;
} }
void Marker::FlushNotFullyConstructedObjects() { void MarkerBase::MarkNotFullyConstructedObjects() {
if (!not_fully_constructed_worklist_.IsLocalViewEmpty(kMutatorThreadId)) { MarkingWorklists::NotFullyConstructedItem item;
not_fully_constructed_worklist_.FlushToGlobal(kMutatorThreadId); MarkingWorklists::NotFullyConstructedWorklist::View view(
previously_not_fully_constructed_worklist_.MergeGlobalPool( marking_worklists_.not_fully_constructed_worklist(),
&not_fully_constructed_worklist_); MarkingWorklists::kMutatorThreadId);
}
DCHECK(not_fully_constructed_worklist_.IsLocalViewEmpty(kMutatorThreadId));
}
void Marker::MarkNotFullyConstructedObjects() {
NotFullyConstructedItem item;
NotFullyConstructedWorklist::View view(&not_fully_constructed_worklist_,
kMutatorThreadId);
while (view.Pop(&item)) { while (view.Pop(&item)) {
conservative_marking_visitor_->TraceConservativelyIfNeeded(item); conservative_visitor().TraceConservativelyIfNeeded(item);
} }
} }
void Marker::ClearAllWorklistsForTesting() { void MarkerBase::ClearAllWorklistsForTesting() {
marking_worklist_.Clear(); marking_worklists_.ClearForTesting();
not_fully_constructed_worklist_.Clear();
previously_not_fully_constructed_worklist_.Clear();
write_barrier_worklist_.Clear();
weak_callback_worklist_.Clear();
} }
Marker::Marker(HeapBase& heap)
: MarkerBase(heap),
marking_visitor_(heap, marking_state()),
conservative_marking_visitor_(heap, marking_state(), marking_visitor_) {}
} // namespace internal } // namespace internal
} // namespace cppgc } // namespace cppgc
...@@ -10,17 +10,17 @@ ...@@ -10,17 +10,17 @@
#include "include/cppgc/heap.h" #include "include/cppgc/heap.h"
#include "include/cppgc/trace-trait.h" #include "include/cppgc/trace-trait.h"
#include "include/cppgc/visitor.h" #include "include/cppgc/visitor.h"
#include "src/base/macros.h"
#include "src/base/platform/time.h" #include "src/base/platform/time.h"
#include "src/heap/cppgc/marking-state.h"
#include "src/heap/cppgc/marking-visitor.h" #include "src/heap/cppgc/marking-visitor.h"
#include "src/heap/cppgc/marking-worklists.h"
#include "src/heap/cppgc/worklist.h" #include "src/heap/cppgc/worklist.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
class HeapBase; class HeapBase;
class HeapObjectHeader;
class MarkingState;
class MutatorThreadMarkingVisitor;
// Marking algorithm. Example for a valid call sequence creating the marking // Marking algorithm. Example for a valid call sequence creating the marking
// phase: // phase:
...@@ -31,31 +31,8 @@ class MutatorThreadMarkingVisitor; ...@@ -31,31 +31,8 @@ class MutatorThreadMarkingVisitor;
// 5. LeaveAtomicPause() // 5. LeaveAtomicPause()
// //
// Alternatively, FinishMarking combines steps 3.-5. // Alternatively, FinishMarking combines steps 3.-5.
class V8_EXPORT_PRIVATE Marker { class V8_EXPORT_PRIVATE MarkerBase {
static constexpr int kNumConcurrentMarkers = 0;
static constexpr int kNumMarkers = 1 + kNumConcurrentMarkers;
public: public:
static constexpr int kMutatorThreadId = 0;
using MarkingItem = cppgc::TraceDescriptor;
using NotFullyConstructedItem = const void*;
struct WeakCallbackItem {
cppgc::WeakCallback callback;
const void* parameter;
};
// Segment size of 512 entries necessary to avoid throughput regressions.
// Since the work list is currently a temporary object this is not a problem.
using MarkingWorklist =
Worklist<MarkingItem, 512 /* local entries */, kNumMarkers>;
using NotFullyConstructedWorklist =
Worklist<NotFullyConstructedItem, 16 /* local entries */, kNumMarkers>;
using WeakCallbackWorklist =
Worklist<WeakCallbackItem, 64 /* local entries */, kNumMarkers>;
using WriteBarrierWorklist =
Worklist<HeapObjectHeader*, 64 /*local entries */, kNumMarkers>;
struct MarkingConfig { struct MarkingConfig {
enum class CollectionType : uint8_t { enum class CollectionType : uint8_t {
kMinor, kMinor,
...@@ -75,11 +52,10 @@ class V8_EXPORT_PRIVATE Marker { ...@@ -75,11 +52,10 @@ class V8_EXPORT_PRIVATE Marker {
MarkingType marking_type = MarkingType::kAtomic; MarkingType marking_type = MarkingType::kAtomic;
}; };
explicit Marker(HeapBase& heap); virtual ~MarkerBase();
virtual ~Marker();
Marker(const Marker&) = delete; MarkerBase(const MarkerBase&) = delete;
Marker& operator=(const Marker&) = delete; MarkerBase& operator=(const MarkerBase&) = delete;
// Initialize marking according to the given config. This method will // Initialize marking according to the given config. This method will
// trigger incremental/concurrent marking if needed. // trigger incremental/concurrent marking if needed.
...@@ -107,45 +83,46 @@ class V8_EXPORT_PRIVATE Marker { ...@@ -107,45 +83,46 @@ class V8_EXPORT_PRIVATE Marker {
void ProcessWeakness(); void ProcessWeakness();
HeapBase& heap() { return heap_; } HeapBase& heap() { return heap_; }
MarkingWorklist* marking_worklist() { return &marking_worklist_; } MarkingState& marking_state() { return mutator_marking_state_; }
NotFullyConstructedWorklist* not_fully_constructed_worklist() { MarkingWorklists& marking_worklists() { return marking_worklists_; }
return &not_fully_constructed_worklist_;
}
WriteBarrierWorklist* write_barrier_worklist() {
return &write_barrier_worklist_;
}
WeakCallbackWorklist* weak_callback_worklist() {
return &weak_callback_worklist_;
}
MarkingState& marking_state() const { return *mutator_marking_state_.get(); }
cppgc::Visitor& VisitorForTesting() { return visitor(); }
void ClearAllWorklistsForTesting(); void ClearAllWorklistsForTesting();
MutatorThreadMarkingVisitor* GetMarkingVisitorForTesting() {
return marking_visitor_.get();
}
protected: protected:
virtual std::unique_ptr<MutatorThreadMarkingVisitor> explicit MarkerBase(HeapBase& heap);
CreateMutatorThreadMarkingVisitor();
virtual cppgc::Visitor& visitor() = 0;
virtual ConservativeTracingVisitor& conservative_visitor() = 0;
virtual heap::base::StackVisitor& stack_visitor() = 0;
void VisitRoots(); void VisitRoots();
void FlushNotFullyConstructedObjects();
void MarkNotFullyConstructedObjects(); void MarkNotFullyConstructedObjects();
HeapBase& heap_; HeapBase& heap_;
MarkingConfig config_ = MarkingConfig::Default(); MarkingConfig config_ = MarkingConfig::Default();
std::unique_ptr<MarkingState> mutator_marking_state_; MarkingWorklists marking_worklists_;
std::unique_ptr<MutatorThreadMarkingVisitor> marking_visitor_; MarkingState mutator_marking_state_;
std::unique_ptr<ConservativeMarkingVisitor> conservative_marking_visitor_; };
class V8_EXPORT_PRIVATE Marker final : public MarkerBase {
public:
explicit Marker(HeapBase&);
protected:
cppgc::Visitor& visitor() final { return marking_visitor_; }
ConservativeTracingVisitor& conservative_visitor() final {
return conservative_marking_visitor_;
}
heap::base::StackVisitor& stack_visitor() final {
return conservative_marking_visitor_;
}
MarkingWorklist marking_worklist_; private:
NotFullyConstructedWorklist not_fully_constructed_worklist_; MarkingVisitor marking_visitor_;
NotFullyConstructedWorklist previously_not_fully_constructed_worklist_; ConservativeMarkingVisitor conservative_marking_visitor_;
WriteBarrierWorklist write_barrier_worklist_;
WeakCallbackWorklist weak_callback_worklist_;
}; };
} // namespace internal } // namespace internal
......
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
#include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/liveness-broker.h" #include "src/heap/cppgc/liveness-broker.h"
#include "src/heap/cppgc/marker.h" #include "src/heap/cppgc/marking-worklists.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
...@@ -18,9 +18,9 @@ namespace internal { ...@@ -18,9 +18,9 @@ namespace internal {
// C++ marking implementation. // C++ marking implementation.
class MarkingState { class MarkingState {
public: public:
inline MarkingState(HeapBase& heap, Marker::MarkingWorklist*, inline MarkingState(HeapBase& heap, MarkingWorklists::MarkingWorklist*,
Marker::NotFullyConstructedWorklist*, MarkingWorklists::NotFullyConstructedWorklist*,
Marker::WeakCallbackWorklist*, int); MarkingWorklists::WeakCallbackWorklist*, int);
MarkingState(const MarkingState&) = delete; MarkingState(const MarkingState&) = delete;
MarkingState& operator=(const MarkingState&) = delete; MarkingState& operator=(const MarkingState&) = delete;
...@@ -47,17 +47,19 @@ class MarkingState { ...@@ -47,17 +47,19 @@ class MarkingState {
HeapBase& heap_; HeapBase& heap_;
#endif // DEBUG #endif // DEBUG
Marker::MarkingWorklist::View marking_worklist_; MarkingWorklists::MarkingWorklist::View marking_worklist_;
Marker::NotFullyConstructedWorklist::View not_fully_constructed_worklist_; MarkingWorklists::NotFullyConstructedWorklist::View
Marker::WeakCallbackWorklist::View weak_callback_worklist_; not_fully_constructed_worklist_;
MarkingWorklists::WeakCallbackWorklist::View weak_callback_worklist_;
size_t marked_bytes_ = 0; size_t marked_bytes_ = 0;
}; };
MarkingState::MarkingState( MarkingState::MarkingState(
HeapBase& heap, Marker::MarkingWorklist* marking_worklist, HeapBase& heap, MarkingWorklists::MarkingWorklist* marking_worklist,
Marker::NotFullyConstructedWorklist* not_fully_constructed_worklist, MarkingWorklists::NotFullyConstructedWorklist*
Marker::WeakCallbackWorklist* weak_callback_worklist, int task_id) not_fully_constructed_worklist,
MarkingWorklists::WeakCallbackWorklist* weak_callback_worklist, int task_id)
: :
#ifdef DEBUG #ifdef DEBUG
heap_(heap), heap_(heap),
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/marking-worklists.h"
namespace cppgc {
namespace internal {
constexpr int MarkingWorklists::kMutatorThreadId;
void MarkingWorklists::ClearForTesting() {
marking_worklist_.Clear();
not_fully_constructed_worklist_.Clear();
previously_not_fully_constructed_worklist_.Clear();
write_barrier_worklist_.Clear();
weak_callback_worklist_.Clear();
}
void MarkingWorklists::FlushNotFullyConstructedObjects() {
if (!not_fully_constructed_worklist_.IsLocalViewEmpty(kMutatorThreadId)) {
not_fully_constructed_worklist_.FlushToGlobal(kMutatorThreadId);
previously_not_fully_constructed_worklist_.MergeGlobalPool(
&not_fully_constructed_worklist_);
}
DCHECK(not_fully_constructed_worklist_.IsLocalViewEmpty(
MarkingWorklists::kMutatorThreadId));
}
} // namespace internal
} // namespace cppgc
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
#define V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
#include "include/cppgc/visitor.h"
#include "src/heap/cppgc/worklist.h"
namespace cppgc {
namespace internal {
class HeapObjectHeader;
class MarkingWorklists {
static constexpr int kNumConcurrentMarkers = 0;
static constexpr int kNumMarkers = 1 + kNumConcurrentMarkers;
public:
static constexpr int kMutatorThreadId = 0;
using MarkingItem = cppgc::TraceDescriptor;
using NotFullyConstructedItem = const void*;
struct WeakCallbackItem {
cppgc::WeakCallback callback;
const void* parameter;
};
// Segment size of 512 entries necessary to avoid throughput regressions.
// Since the work list is currently a temporary object this is not a problem.
using MarkingWorklist =
Worklist<MarkingItem, 512 /* local entries */, kNumMarkers>;
using NotFullyConstructedWorklist =
Worklist<NotFullyConstructedItem, 16 /* local entries */, kNumMarkers>;
using WeakCallbackWorklist =
Worklist<WeakCallbackItem, 64 /* local entries */, kNumMarkers>;
using WriteBarrierWorklist =
Worklist<HeapObjectHeader*, 64 /*local entries */, kNumMarkers>;
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
NotFullyConstructedWorklist* not_fully_constructed_worklist() {
return &not_fully_constructed_worklist_;
}
NotFullyConstructedWorklist* previously_not_fully_constructed_worklist() {
return &previously_not_fully_constructed_worklist_;
}
WriteBarrierWorklist* write_barrier_worklist() {
return &write_barrier_worklist_;
}
WeakCallbackWorklist* weak_callback_worklist() {
return &weak_callback_worklist_;
}
// Moves objects in not_fully_constructed_worklist_ to
// previously_not_full_constructed_worklists_.
void FlushNotFullyConstructedObjects();
void ClearForTesting();
private:
MarkingWorklist marking_worklist_;
NotFullyConstructedWorklist not_fully_constructed_worklist_;
NotFullyConstructedWorklist previously_not_fully_constructed_worklist_;
WriteBarrierWorklist write_barrier_worklist_;
WeakCallbackWorklist weak_callback_worklist_;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
...@@ -21,7 +21,7 @@ namespace internal { ...@@ -21,7 +21,7 @@ namespace internal {
namespace { namespace {
void MarkValue(const BasePage* page, Marker* marker, const void* value) { void MarkValue(const BasePage* page, MarkerBase* marker, const void* value) {
#if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_CAGED_HEAP)
DCHECK(reinterpret_cast<CagedHeapLocalData*>( DCHECK(reinterpret_cast<CagedHeapLocalData*>(
reinterpret_cast<uintptr_t>(value) & reinterpret_cast<uintptr_t>(value) &
...@@ -40,14 +40,17 @@ void MarkValue(const BasePage* page, Marker* marker, const void* value) { ...@@ -40,14 +40,17 @@ void MarkValue(const BasePage* page, Marker* marker, const void* value) {
// It is assumed that objects on not_fully_constructed_worklist_ are not // It is assumed that objects on not_fully_constructed_worklist_ are not
// marked. // marked.
header.Unmark(); header.Unmark();
Marker::NotFullyConstructedWorklist::View not_fully_constructed_worklist( MarkingWorklists::NotFullyConstructedWorklist::View
marker->not_fully_constructed_worklist(), Marker::kMutatorThreadId); not_fully_constructed_worklist(
marker->marking_worklists().not_fully_constructed_worklist(),
MarkingWorklists::kMutatorThreadId);
not_fully_constructed_worklist.Push(header.Payload()); not_fully_constructed_worklist.Push(header.Payload());
return; return;
} }
Marker::WriteBarrierWorklist::View write_barrier_worklist( MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist(
marker->write_barrier_worklist(), Marker::kMutatorThreadId); marker->marking_worklists().write_barrier_worklist(),
MarkingWorklists::kMutatorThreadId);
write_barrier_worklist.Push(&header); write_barrier_worklist.Push(&header);
} }
......
...@@ -222,7 +222,7 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) { ...@@ -222,7 +222,7 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>( GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [&marker](GCedWithCallback* obj) { GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj); Member<GCedWithCallback> member(obj);
marker.GetMarkingVisitorForTesting()->Trace(member); marker.VisitorForTesting().Trace(member);
}); });
EXPECT_FALSE(HeapObjectHeader::FromPayload(object).IsMarked()); EXPECT_FALSE(HeapObjectHeader::FromPayload(object).IsMarked());
marker.FinishMarking({MarkingConfig::CollectionType::kMajor, marker.FinishMarking({MarkingConfig::CollectionType::kMajor,
...@@ -239,7 +239,7 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) { ...@@ -239,7 +239,7 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
MakeGarbageCollected<GCedWithCallback>( MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [&marker](GCedWithCallback* obj) { GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj); Member<GCedWithCallback> member(obj);
marker.GetMarkingVisitorForTesting()->Trace(member); marker.VisitorForTesting().Trace(member);
EXPECT_FALSE(HeapObjectHeader::FromPayload(obj).IsMarked()); EXPECT_FALSE(HeapObjectHeader::FromPayload(obj).IsMarked());
marker.FinishMarking(config); marker.FinishMarking(config);
EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked());
......
...@@ -43,7 +43,7 @@ class TestWithHeap : public TestWithPlatform { ...@@ -43,7 +43,7 @@ class TestWithHeap : public TestWithPlatform {
return allocation_handle_; return allocation_handle_;
} }
std::unique_ptr<Marker>& GetMarkerRef() { std::unique_ptr<MarkerBase>& GetMarkerRef() {
return Heap::From(GetHeap())->marker_; return Heap::From(GetHeap())->marker_;
} }
......
...@@ -21,7 +21,7 @@ namespace { ...@@ -21,7 +21,7 @@ namespace {
class IncrementalMarkingScope { class IncrementalMarkingScope {
public: public:
explicit IncrementalMarkingScope(Marker* marker) : marker_(marker) { explicit IncrementalMarkingScope(MarkerBase* marker) : marker_(marker) {
marker_->StartMarking(kIncrementalConfig); marker_->StartMarking(kIncrementalConfig);
} }
...@@ -35,18 +35,21 @@ class IncrementalMarkingScope { ...@@ -35,18 +35,21 @@ class IncrementalMarkingScope {
Marker::MarkingConfig::StackState::kNoHeapPointers, Marker::MarkingConfig::StackState::kNoHeapPointers,
Marker::MarkingConfig::MarkingType::kIncremental}; Marker::MarkingConfig::MarkingType::kIncremental};
Marker* marker_; MarkerBase* marker_;
}; };
constexpr Marker::MarkingConfig IncrementalMarkingScope::kIncrementalConfig; constexpr Marker::MarkingConfig IncrementalMarkingScope::kIncrementalConfig;
class ExpectWriteBarrierFires final : private IncrementalMarkingScope { class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
public: public:
ExpectWriteBarrierFires(Marker* marker, std::initializer_list<void*> objects) ExpectWriteBarrierFires(MarkerBase* marker,
std::initializer_list<void*> objects)
: IncrementalMarkingScope(marker), : IncrementalMarkingScope(marker),
marking_worklist_(marker->marking_worklist(), Marker::kMutatorThreadId), marking_worklist_(marker->marking_worklists().marking_worklist(),
write_barrier_worklist_(marker->write_barrier_worklist(), MarkingWorklists::kMutatorThreadId),
Marker::kMutatorThreadId), write_barrier_worklist_(
marker->marking_worklists().write_barrier_worklist(),
MarkingWorklists::kMutatorThreadId),
objects_(objects) { objects_(objects) {
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty()); EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty()); EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
...@@ -58,7 +61,7 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope { ...@@ -58,7 +61,7 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
~ExpectWriteBarrierFires() V8_NOEXCEPT { ~ExpectWriteBarrierFires() V8_NOEXCEPT {
{ {
Marker::MarkingItem item; MarkingWorklists::MarkingItem item;
while (marking_worklist_.Pop(&item)) { while (marking_worklist_.Pop(&item)) {
auto pos = std::find(objects_.begin(), objects_.end(), auto pos = std::find(objects_.begin(), objects_.end(),
item.base_object_payload); item.base_object_payload);
...@@ -82,20 +85,22 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope { ...@@ -82,20 +85,22 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
} }
private: private:
Marker::MarkingWorklist::View marking_worklist_; MarkingWorklists::MarkingWorklist::View marking_worklist_;
Marker::WriteBarrierWorklist::View write_barrier_worklist_; MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist_;
std::vector<void*> objects_; std::vector<void*> objects_;
std::vector<HeapObjectHeader*> headers_; std::vector<HeapObjectHeader*> headers_;
}; };
class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope { class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope {
public: public:
ExpectNoWriteBarrierFires(Marker* marker, ExpectNoWriteBarrierFires(MarkerBase* marker,
std::initializer_list<void*> objects) std::initializer_list<void*> objects)
: IncrementalMarkingScope(marker), : IncrementalMarkingScope(marker),
marking_worklist_(marker->marking_worklist(), Marker::kMutatorThreadId), marking_worklist_(marker->marking_worklists().marking_worklist(),
write_barrier_worklist_(marker->write_barrier_worklist(), MarkingWorklists::kMutatorThreadId),
Marker::kMutatorThreadId) { write_barrier_worklist_(
marker->marking_worklists().write_barrier_worklist(),
MarkingWorklists::kMutatorThreadId) {
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty()); EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty()); EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
for (void* object : objects) { for (void* object : objects) {
...@@ -113,8 +118,8 @@ class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope { ...@@ -113,8 +118,8 @@ class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope {
} }
private: private:
Marker::MarkingWorklist::View marking_worklist_; MarkingWorklists::MarkingWorklist::View marking_worklist_;
Marker::WriteBarrierWorklist::View write_barrier_worklist_; MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist_;
std::vector<std::pair<HeapObjectHeader*, bool /* was marked */>> headers_; std::vector<std::pair<HeapObjectHeader*, bool /* was marked */>> headers_;
}; };
...@@ -151,11 +156,11 @@ class WriteBarrierTest : public testing::TestWithHeap { ...@@ -151,11 +156,11 @@ class WriteBarrierTest : public testing::TestWithHeap {
GetMarkerRef().reset(); GetMarkerRef().reset();
} }
Marker* marker() const { return marker_; } MarkerBase* marker() const { return marker_; }
private: private:
Heap* internal_heap_; Heap* internal_heap_;
Marker* marker_; MarkerBase* marker_;
}; };
// ============================================================================= // =============================================================================
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment