Commit 20e1ba28 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

cppgc: Move ProcessWeakness into FinishMarking

For cross-thread handling we require the atomic marking pause to
provide an atomically consistent view of markbits and weak references.
This is ensured by locking the whole atomic pause from entering to
weak processing.

This CL move ProcessWeakness() into FinishMarking() which allows to
nicely scope the upcomming lock from EnterAtomicPause() to
LeaveAtomicPause(). The alternative is requiring the caller to ensure
proper locking which is harder than ensuring that the Marker is
consistent.

Bug: chromium:1056170
Change-Id: Ib6028a0d76fcf9422c4a0d422fec3d568f106bf2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2442620
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70259}
parent 4e7621fc
......@@ -86,8 +86,7 @@ class Visitor {
return;
}
// TODO(chromium:1056170): DCHECK (or similar) for deleted values as they
// should come in at a different path.
CPPGC_DCHECK(value != kSentinelPointer);
VisitWeak(value, TraceTrait<T>::GetTraceDescriptor(value),
&HandleWeak<WeakMember<T>>, &weak_member);
}
......
......@@ -152,12 +152,12 @@ void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {
CHECK(marking_done_);
marker_->LeaveAtomicPause();
{
// Pre finalizers are forbidden from allocating objects
// Weakness callbacks and pre-finalizers are forbidden from allocating
// objects.
cppgc::internal::ObjectAllocator::NoAllocationScope no_allocation_scope_(
object_allocator_);
marker()->ProcessWeakness();
marker_->LeaveAtomicPause();
prefinalizer_handler()->InvokePreFinalizers();
}
marker_.reset();
......
......@@ -151,11 +151,12 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
DCHECK(!in_no_gc_scope());
config_.stack_state = stack_state;
DCHECK(marker_);
marker_->FinishMarking(stack_state);
{
// Pre finalizers are forbidden from allocating objects.
// Pre finalizers are forbidden from allocating objects. Note that this also
// guard atomic pause marking below, meaning that no internal method or
// external callbacks are allowed to allocate new objects.
ObjectAllocator::NoAllocationScope no_allocation_scope_(object_allocator_);
marker_->ProcessWeakness();
marker_->FinishMarking(stack_state);
prefinalizer_handler_->InvokePreFinalizers();
}
marker_.reset();
......
......@@ -242,6 +242,8 @@ void MarkerBase::LeaveAtomicPause() {
heap().stats_collector()->NotifyMarkingCompleted(
// GetOverallMarkedBytes also includes concurrently marked bytes.
schedule_.GetOverallMarkedBytes());
is_marking_started_ = false;
ProcessWeakness();
}
void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
......@@ -251,7 +253,6 @@ void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
v8::base::TimeTicks::Max());
mutator_marking_state_.Publish();
LeaveAtomicPause();
is_marking_started_ = false;
}
void MarkerBase::ProcessWeakness() {
......
......@@ -82,6 +82,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
// Combines:
// - EnterAtomicPause()
// - AdvanceMarkingWithDeadline()
// - ProcessWeakness()
// - LeaveAtomicPause()
void FinishMarking(MarkingConfig::StackState);
......
......@@ -28,7 +28,6 @@ class MarkerTest : public testing::TestWithHeap {
auto* heap = Heap::From(GetHeap());
InitializeMarker(*heap, GetPlatformHandle().get(), config);
marker_->FinishMarking(stack_state);
marker_->ProcessWeakness();
// Pretend do finish sweeping as StatsCollector verifies that Notify*
// methods are called in the right order.
heap->stats_collector()->NotifySweepingCompleted();
......@@ -258,14 +257,20 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
TEST_F(MarkerTest, SentinelNotClearedOnWeakPersistentHandling) {
static const Marker::MarkingConfig config = {
MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kNoHeapPointers};
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
MarkingConfig::StackState::kNoHeapPointers,
MarkingConfig::MarkingType::kIncremental};
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
auto* tmp = MakeGarbageCollected<GCed>(GetAllocationHandle());
root->SetWeakChild(tmp);
marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers);
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
while (!marker()->IncrementalMarkingStepForTesting(
MarkingConfig::StackState::kNoHeapPointers)) {
}
// {root} object must be marked at this point because we do not allow
// encountering kSentinelPointer in WeakMember on regular Trace() calls.
ASSERT_TRUE(HeapObjectHeader::FromPayload(root.Get()).IsMarked());
root->SetWeakChild(kSentinelPointer);
marker()->ProcessWeakness();
marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers);
EXPECT_EQ(kSentinelPointer, root->weak_child());
}
......@@ -290,7 +295,6 @@ class IncrementalMarkingTest : public testing::TestWithHeap {
void FinishMarking() {
marker_->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers);
marker_->ProcessWeakness();
// Pretend do finish sweeping as StatsCollector verifies that Notify*
// methods are called in the right order.
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment