Commit b7b3abe8 authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Replace worklist implementation with new worklist

This CL migrates cppgc to use Ulan's new worklist implementation.

Since there is no central segments array anymore, we cannot rely on
getting the same view (now renamed to Local) given the same task id.
To avoid creating many short lived segments (e.g. for write barriers)
marking state now holds local views for all worklists and provides
access to them.

Bug: chromium:1056170
Change-Id: Id19fe1196b79ed251810e91074046998dc2a9177
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2390771
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69767}
parent 80b1d7ff
......@@ -4337,6 +4337,7 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/logging.cc",
"src/heap/cppgc/marker.cc",
"src/heap/cppgc/marker.h",
"src/heap/cppgc/marking-state.cc",
"src/heap/cppgc/marking-state.h",
"src/heap/cppgc/marking-verifier.cc",
"src/heap/cppgc/marking-verifier.h",
......
......@@ -86,13 +86,13 @@ void ResetRememberedSet(HeapBase& heap) {
static constexpr size_t kDefaultDeadlineCheckInterval = 150u;
template <size_t kDeadlineCheckInterval = kDefaultDeadlineCheckInterval,
typename Worklist, typename Callback, typename Predicate>
bool DrainWorklistWithDeadline(Predicate should_yield, Worklist* worklist,
Callback callback, int task_id) {
typename WorklistLocal, typename Callback, typename Predicate>
bool DrainWorklistWithDeadline(Predicate should_yield,
WorklistLocal& worklist_local,
Callback callback) {
size_t processed_callback_count = 0;
typename Worklist::View view(worklist, task_id);
typename Worklist::EntryType item;
while (view.Pop(&item)) {
typename WorklistLocal::ItemType item;
while (worklist_local.Pop(&item)) {
callback(item);
if (processed_callback_count-- == 0) {
if (should_yield()) {
......@@ -105,18 +105,18 @@ bool DrainWorklistWithDeadline(Predicate should_yield, Worklist* worklist,
}
template <size_t kDeadlineCheckInterval = kDefaultDeadlineCheckInterval,
typename Worklist, typename Callback>
typename WorklistLocal, typename Callback>
bool DrainWorklistWithBytesAndTimeDeadline(MarkingState& marking_state,
size_t marked_bytes_deadline,
v8::base::TimeTicks time_deadline,
Worklist* worklist,
Callback callback, int task_id) {
WorklistLocal& worklist_local,
Callback callback) {
return DrainWorklistWithDeadline(
[&marking_state, marked_bytes_deadline, time_deadline]() {
return (marked_bytes_deadline <= marking_state.marked_bytes()) ||
(time_deadline <= v8::base::TimeTicks::Now());
},
worklist, callback, task_id);
worklist_local, callback);
}
void TraceMarkedObject(Visitor* visitor, const HeapObjectHeader* header) {
......@@ -168,11 +168,7 @@ MarkerBase::MarkerBase(Key, HeapBase& heap, cppgc::Platform* platform,
config_(config),
platform_(platform),
foreground_task_runner_(platform_->GetForegroundTaskRunner()),
mutator_marking_state_(
heap, marking_worklists_.marking_worklist(),
marking_worklists_.not_fully_constructed_worklist(),
marking_worklists_.weak_callback_worklist(),
MarkingWorklists::kMutatorThreadId) {}
mutator_marking_state_(heap, marking_worklists_) {}
MarkerBase::~MarkerBase() {
// The fixed point iteration may have found not-fully-constructed objects.
......@@ -182,10 +178,9 @@ MarkerBase::~MarkerBase() {
#if DEBUG
DCHECK_NE(MarkingConfig::StackState::kNoHeapPointers, config_.stack_state);
HeapObjectHeader* header;
MarkingWorklists::NotFullyConstructedWorklist::View view(
marking_worklists_.not_fully_constructed_worklist(),
MarkingWorklists::kMutatorThreadId);
while (view.Pop(&header)) {
MarkingWorklists::NotFullyConstructedWorklist::Local& local =
mutator_marking_state_.not_fully_constructed_worklist();
while (local.Pop(&header)) {
DCHECK(header->IsMarked());
}
#else
......@@ -218,7 +213,7 @@ void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
// VisitRoots also resets the LABs.
VisitRoots(config_.stack_state);
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
marking_worklists_.FlushNotFullyConstructedObjects();
mutator_marking_state_.FlushNotFullyConstructedObjects();
} else {
MarkNotFullyConstructedObjects();
}
......@@ -237,6 +232,7 @@ void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
EnterAtomicPause(stack_state);
ProcessWorklistsWithDeadline(std::numeric_limits<size_t>::max(),
v8::base::TimeDelta::Max());
mutator_marking_state_.Publish();
LeaveAtomicPause();
is_marking_started_ = false;
}
......@@ -247,10 +243,9 @@ void MarkerBase::ProcessWeakness() {
// Call weak callbacks on objects that may now be pointing to dead objects.
MarkingWorklists::WeakCallbackItem item;
LivenessBroker broker = LivenessBrokerFactory::Create();
MarkingWorklists::WeakCallbackWorklist::View view(
marking_worklists_.weak_callback_worklist(),
MarkingWorklists::kMutatorThreadId);
while (view.Pop(&item)) {
MarkingWorklists::WeakCallbackWorklist::Local& local =
mutator_marking_state_.weak_callback_worklist();
while (local.Pop(&item)) {
item.callback(broker, item.parameter);
}
// Weak callbacks should not add any new objects for marking.
......@@ -285,7 +280,7 @@ bool MarkerBase::IncrementalMarkingStepForTesting(
bool MarkerBase::IncrementalMarkingStep(MarkingConfig::StackState stack_state) {
if (stack_state == MarkingConfig::StackState::kNoHeapPointers) {
marking_worklists_.FlushNotFullyConstructedObjects();
mutator_marking_state_.FlushNotFullyConstructedObjects();
}
config_.stack_state = stack_state;
......@@ -315,6 +310,7 @@ bool MarkerBase::AdvanceMarkingWithDeadline(v8::base::TimeDelta max_duration) {
DCHECK_NE(MarkingConfig::MarkingType::kAtomic, config_.marking_type);
ScheduleIncrementalMarkingTask();
}
mutator_marking_state_.Publish();
return is_done;
}
......@@ -329,18 +325,17 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
// callbacks.
if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline,
marking_worklists_.previously_not_fully_constructed_worklist(),
mutator_marking_state_.previously_not_fully_constructed_worklist(),
[this](HeapObjectHeader* header) {
TraceMarkedObject(&visitor(), header);
mutator_marking_state_.AccountMarkedBytes(*header);
},
MarkingWorklists::kMutatorThreadId)) {
})) {
return false;
}
if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline,
marking_worklists_.marking_worklist(),
mutator_marking_state_.marking_worklist(),
[this](const MarkingWorklists::MarkingItem& item) {
const HeapObjectHeader& header =
HeapObjectHeader::FromPayload(item.base_object_payload);
......@@ -350,32 +345,28 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
header.IsMarked<HeapObjectHeader::AccessMode::kNonAtomic>());
item.callback(&visitor(), item.base_object_payload);
mutator_marking_state_.AccountMarkedBytes(header);
},
MarkingWorklists::kMutatorThreadId)) {
})) {
return false;
}
if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline,
marking_worklists_.write_barrier_worklist(),
mutator_marking_state_.write_barrier_worklist(),
[this](HeapObjectHeader* header) {
TraceMarkedObject(&visitor(), header);
mutator_marking_state_.AccountMarkedBytes(*header);
},
MarkingWorklists::kMutatorThreadId)) {
})) {
return false;
}
} while (!marking_worklists_.marking_worklist()->IsLocalViewEmpty(
MarkingWorklists::kMutatorThreadId));
} while (!mutator_marking_state_.marking_worklist().IsLocalAndGlobalEmpty());
return true;
}
void MarkerBase::MarkNotFullyConstructedObjects() {
HeapObjectHeader* header;
MarkingWorklists::NotFullyConstructedWorklist::View view(
marking_worklists_.not_fully_constructed_worklist(),
MarkingWorklists::kMutatorThreadId);
while (view.Pop(&header)) {
MarkingWorklists::NotFullyConstructedWorklist::Local& local =
mutator_marking_state_.not_fully_constructed_worklist();
while (local.Pop(&header)) {
DCHECK(header);
DCHECK(header->IsMarked<HeapObjectHeader::AccessMode::kNonAtomic>());
// TraceConservativelyIfNeeded will either push to a worklist
......
......@@ -200,18 +200,11 @@ class V8_EXPORT_PRIVATE Marker final : public MarkerBase {
};
void MarkerBase::WriteBarrierForInConstructionObject(HeapObjectHeader& header) {
MarkingWorklists::NotFullyConstructedWorklist::View
not_fully_constructed_worklist(
marking_worklists_.not_fully_constructed_worklist(),
MarkingWorklists::kMutatorThreadId);
not_fully_constructed_worklist.Push(&header);
mutator_marking_state_.not_fully_constructed_worklist().Push(&header);
}
void MarkerBase::WriteBarrierForObject(HeapObjectHeader& header) {
MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist(
marking_worklists_.write_barrier_worklist(),
MarkingWorklists::kMutatorThreadId);
write_barrier_worklist.Push(&header);
mutator_marking_state_.write_barrier_worklist().Push(&header);
}
} // namespace internal
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/marking-state.h"
namespace cppgc {
namespace internal {
void MarkingState::FlushNotFullyConstructedObjects() {
not_fully_constructed_worklist().Publish();
if (!not_fully_constructed_worklist_.IsGlobalEmpty()) {
previously_not_fully_constructed_worklist_.Merge(
&not_fully_constructed_worklist_);
}
DCHECK(not_fully_constructed_worklist_.IsGlobalEmpty());
}
} // namespace internal
} // namespace cppgc
......@@ -18,9 +18,7 @@ namespace internal {
// C++ marking implementation.
class MarkingState {
public:
inline MarkingState(HeapBase& heap, MarkingWorklists::MarkingWorklist*,
MarkingWorklists::NotFullyConstructedWorklist*,
MarkingWorklists::WeakCallbackWorklist*, int);
inline MarkingState(HeapBase& heap, MarkingWorklists&);
MarkingState(const MarkingState&) = delete;
MarkingState& operator=(const MarkingState&) = delete;
......@@ -44,31 +42,64 @@ class MarkingState {
inline void AccountMarkedBytes(const HeapObjectHeader&);
size_t marked_bytes() const { return marked_bytes_; }
void Publish() {
marking_worklist_.Publish();
not_fully_constructed_worklist_.Publish();
previously_not_fully_constructed_worklist_.Publish();
weak_callback_worklist_.Publish();
write_barrier_worklist_.Publish();
}
// Moves objects in not_fully_constructed_worklist_ to
// previously_not_full_constructed_worklists_.
void FlushNotFullyConstructedObjects();
MarkingWorklists::MarkingWorklist::Local& marking_worklist() {
return marking_worklist_;
}
MarkingWorklists::NotFullyConstructedWorklist::Local&
not_fully_constructed_worklist() {
return not_fully_constructed_worklist_;
}
MarkingWorklists::NotFullyConstructedWorklist::Local&
previously_not_fully_constructed_worklist() {
return previously_not_fully_constructed_worklist_;
}
MarkingWorklists::WeakCallbackWorklist::Local& weak_callback_worklist() {
return weak_callback_worklist_;
}
MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist() {
return write_barrier_worklist_;
}
private:
#ifdef DEBUG
HeapBase& heap_;
#endif // DEBUG
MarkingWorklists::MarkingWorklist::View marking_worklist_;
MarkingWorklists::NotFullyConstructedWorklist::View
MarkingWorklists::MarkingWorklist::Local marking_worklist_;
MarkingWorklists::NotFullyConstructedWorklist::Local
not_fully_constructed_worklist_;
MarkingWorklists::WeakCallbackWorklist::View weak_callback_worklist_;
MarkingWorklists::NotFullyConstructedWorklist::Local
previously_not_fully_constructed_worklist_;
MarkingWorklists::WeakCallbackWorklist::Local weak_callback_worklist_;
MarkingWorklists::WriteBarrierWorklist::Local write_barrier_worklist_;
size_t marked_bytes_ = 0;
};
MarkingState::MarkingState(
HeapBase& heap, MarkingWorklists::MarkingWorklist* marking_worklist,
MarkingWorklists::NotFullyConstructedWorklist*
not_fully_constructed_worklist,
MarkingWorklists::WeakCallbackWorklist* weak_callback_worklist, int task_id)
MarkingState::MarkingState(HeapBase& heap, MarkingWorklists& marking_worklists)
:
#ifdef DEBUG
heap_(heap),
#endif // DEBUG
marking_worklist_(marking_worklist, task_id),
not_fully_constructed_worklist_(not_fully_constructed_worklist, task_id),
weak_callback_worklist_(weak_callback_worklist, task_id) {
marking_worklist_(marking_worklists.marking_worklist()),
not_fully_constructed_worklist_(
marking_worklists.not_fully_constructed_worklist()),
previously_not_fully_constructed_worklist_(
marking_worklists.previously_not_fully_constructed_worklist()),
weak_callback_worklist_(marking_worklists.weak_callback_worklist()),
write_barrier_worklist_(marking_worklists.write_barrier_worklist()) {
}
void MarkingState::MarkAndPush(const void* object, TraceDescriptor desc) {
......
......@@ -23,7 +23,7 @@ class V8_EXPORT_PRIVATE MarkingVisitor : public VisitorBase {
MarkingVisitor(HeapBase&, MarkingState&);
~MarkingVisitor() override = default;
private:
protected:
void Visit(const void*, TraceDescriptor) final;
void VisitWeak(const void*, TraceDescriptor, WeakCallback, const void*) final;
void VisitRoot(const void*, TraceDescriptor) final;
......
......@@ -18,15 +18,5 @@ void MarkingWorklists::ClearForTesting() {
weak_callback_worklist_.Clear();
}
void MarkingWorklists::FlushNotFullyConstructedObjects() {
if (!not_fully_constructed_worklist_.IsLocalViewEmpty(kMutatorThreadId)) {
not_fully_constructed_worklist_.FlushToGlobal(kMutatorThreadId);
previously_not_fully_constructed_worklist_.MergeGlobalPool(
&not_fully_constructed_worklist_);
}
DCHECK(not_fully_constructed_worklist_.IsLocalViewEmpty(
MarkingWorklists::kMutatorThreadId));
}
} // namespace internal
} // namespace cppgc
......@@ -14,9 +14,6 @@ namespace internal {
class HeapObjectHeader;
class MarkingWorklists {
static constexpr int kNumConcurrentMarkers = 0;
static constexpr int kNumMarkers = 1 + kNumConcurrentMarkers;
public:
static constexpr int kMutatorThreadId = 0;
......@@ -28,14 +25,13 @@ class MarkingWorklists {
// Segment size of 512 entries necessary to avoid throughput regressions.
// Since the work list is currently a temporary object this is not a problem.
using MarkingWorklist =
Worklist<MarkingItem, 512 /* local entries */, kNumMarkers>;
using MarkingWorklist = Worklist<MarkingItem, 512 /* local entries */>;
using NotFullyConstructedWorklist =
Worklist<HeapObjectHeader*, 16 /* local entries */, kNumMarkers>;
Worklist<HeapObjectHeader*, 16 /* local entries */>;
using WeakCallbackWorklist =
Worklist<WeakCallbackItem, 64 /* local entries */, kNumMarkers>;
Worklist<WeakCallbackItem, 64 /* local entries */>;
using WriteBarrierWorklist =
Worklist<HeapObjectHeader*, 64 /*local entries */, kNumMarkers>;
Worklist<HeapObjectHeader*, 64 /*local entries */>;
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
NotFullyConstructedWorklist* not_fully_constructed_worklist() {
......@@ -51,10 +47,6 @@ class MarkingWorklists {
return &weak_callback_worklist_;
}
// Moves objects in not_fully_constructed_worklist_ to
// previously_not_full_constructed_worklists_.
void FlushNotFullyConstructedObjects();
void ClearForTesting();
private:
......
This diff is collapsed.
......@@ -48,6 +48,7 @@ class TestMarkingVisitor : public MarkingVisitor {
public:
explicit TestMarkingVisitor(Marker* marker)
: MarkingVisitor(marker->heap(), marker->MarkingStateForTesting()) {}
~TestMarkingVisitor() { marking_state_.Publish(); }
};
} // namespace
......
......@@ -43,15 +43,12 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
ExpectWriteBarrierFires(MarkerBase* marker,
std::initializer_list<void*> objects)
: IncrementalMarkingScope(marker),
marking_worklist_(
marker->MarkingWorklistsForTesting().marking_worklist(),
MarkingWorklists::kMutatorThreadId),
marking_worklist_(marker->MarkingStateForTesting().marking_worklist()),
write_barrier_worklist_(
marker->MarkingWorklistsForTesting().write_barrier_worklist(),
MarkingWorklists::kMutatorThreadId),
marker->MarkingStateForTesting().write_barrier_worklist()),
objects_(objects) {
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
for (void* object : objects) {
headers_.push_back(&HeapObjectHeader::FromPayload(object));
EXPECT_FALSE(headers_.back()->IsMarked());
......@@ -79,13 +76,13 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
EXPECT_TRUE(header->IsMarked());
header->Unmark();
}
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
}
private:
MarkingWorklists::MarkingWorklist::View marking_worklist_;
MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist_;
MarkingWorklists::MarkingWorklist::Local& marking_worklist_;
MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist_;
std::vector<void*> objects_;
std::vector<HeapObjectHeader*> headers_;
};
......@@ -95,14 +92,11 @@ class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope {
ExpectNoWriteBarrierFires(MarkerBase* marker,
std::initializer_list<void*> objects)
: IncrementalMarkingScope(marker),
marking_worklist_(
marker->MarkingWorklistsForTesting().marking_worklist(),
MarkingWorklists::kMutatorThreadId),
marking_worklist_(marker->MarkingStateForTesting().marking_worklist()),
write_barrier_worklist_(
marker->MarkingWorklistsForTesting().write_barrier_worklist(),
MarkingWorklists::kMutatorThreadId) {
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
marker->MarkingStateForTesting().write_barrier_worklist()) {
EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
for (void* object : objects) {
auto* header = &HeapObjectHeader::FromPayload(object);
headers_.emplace_back(header, header->IsMarked());
......@@ -110,16 +104,16 @@ class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope {
}
~ExpectNoWriteBarrierFires() {
EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
for (const auto& pair : headers_) {
EXPECT_EQ(pair.second, pair.first->IsMarked());
}
}
private:
MarkingWorklists::MarkingWorklist::View marking_worklist_;
MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist_;
MarkingWorklists::MarkingWorklist::Local& marking_worklist_;
MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist_;
std::vector<std::pair<HeapObjectHeader*, bool /* was marked */>> headers_;
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment