Commit 889e6bb6 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

[heap] Another round of Worklist simplifications

- Swap() was not necessary as all uses merely required Merge()
- Remove unused empty Local ctor
- Use refrence for backref as it's always supposed to be non-null

Bug: v8:13193
Change-Id: Ide0a0de15185a67d028890371ae30528fd55a058
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3846863Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82648}
parent a6551590
......@@ -8,8 +8,8 @@
#include <cstddef>
#include <utility>
#include "src/base/atomic-utils.h"
#include "src/base/logging.h"
#include "src/base/macros.h"
#include "src/base/platform/mutex.h"
#include "src/base/platform/platform.h"
......@@ -40,8 +40,9 @@ class V8_EXPORT_PRIVATE SegmentBase {
// - Entries in the worklist are of type `EntryType`.
// - Segments have a capacity of at least `MinSegmentSize` but possibly more.
//
// All methods on the worklist itself only consider the list of segments.
// Unpublished work in local views is not visible.
// All methods on the worklist itself are safe for concurrent usage but only
// consider published segments. Unpublished work in views using `Local` is not
// visible.
template <typename EntryType, uint16_t MinSegmentSize>
class Worklist final {
public:
......@@ -65,11 +66,9 @@ class Worklist final {
// concurrently for an approximation.
size_t Size() const;
// Moves the segments from `other` into this worklist.
void Merge(Worklist<EntryType, MinSegmentSize>* other);
// Swaps the segments with `other`.
void Swap(Worklist<EntryType, MinSegmentSize>* other);
// Moves the segments from `other` into this worklist, leaving behind `other`
// as empty.
void Merge(Worklist<EntryType, MinSegmentSize>& other);
// Removes all segments from the worklist.
void Clear();
......@@ -178,44 +177,30 @@ void Worklist<EntryType, MinSegmentSize>::Iterate(Callback callback) const {
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Merge(
Worklist<EntryType, MinSegmentSize>* other) {
Segment* top = nullptr;
size_t other_size = 0;
Worklist<EntryType, MinSegmentSize>& other) {
Segment* other_top;
size_t other_size;
{
v8::base::MutexGuard guard(&other->lock_);
if (!other->top_) return;
top = other->top_;
other_size = other->size_.load(std::memory_order_relaxed);
other->size_.store(0, std::memory_order_relaxed);
other->top_ = nullptr;
v8::base::MutexGuard guard(&other.lock_);
if (!other.top_) return;
other_top = std::exchange(other.top_, nullptr);
other_size = other.size_.exchange(0, std::memory_order_relaxed);
}
// It's safe to iterate through these segments because the top was
// extracted from |other|.
Segment* end = top;
// extracted from `other`.
Segment* end = other_top;
while (end->next()) end = end->next();
{
v8::base::MutexGuard guard(&lock_);
size_.fetch_add(other_size, std::memory_order_relaxed);
end->set_next(top_);
top_ = top;
top_ = other_top;
}
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Swap(
Worklist<EntryType, MinSegmentSize>* other) {
v8::base::MutexGuard guard1(&lock_);
v8::base::MutexGuard guard2(&other->lock_);
Segment* top = top_;
top_ = other->top_;
other->top_ = top;
size_t other_size = other->size_.exchange(
size_.load(std::memory_order_relaxed), std::memory_order_relaxed);
size_.store(other_size, std::memory_order_relaxed);
}
template <typename EntryType, uint16_t MinSegmentSize>
class Worklist<EntryType, MinSegmentSize>::Segment final
: public internal::SegmentBase {
......@@ -300,11 +285,7 @@ class Worklist<EntryType, MinSegmentSize>::Local final {
public:
using ItemType = EntryType;
// An empty local view does not have any segments and is not attached to a
// worklist. As such it will crash on any operation until it is initialized
// properly via move constructor.
Local() = default;
explicit Local(Worklist<EntryType, MinSegmentSize>* worklist);
explicit Local(Worklist<EntryType, MinSegmentSize>& worklist);
~Local();
// Moving needs to specify whether the `worklist_` pointer is preserved or
......@@ -326,7 +307,8 @@ class Worklist<EntryType, MinSegmentSize>::Local final {
size_t PushSegmentSize() const { return push_segment_->Size(); }
void Publish();
void Merge(Worklist<EntryType, MinSegmentSize>::Local* other);
void Merge(Worklist<EntryType, MinSegmentSize>::Local& other);
void Clear();
......@@ -364,14 +346,14 @@ class Worklist<EntryType, MinSegmentSize>::Local final {
return static_cast<const Segment*>(pop_segment_);
}
Worklist<EntryType, MinSegmentSize>* worklist_ = nullptr;
Worklist<EntryType, MinSegmentSize>& worklist_;
internal::SegmentBase* push_segment_ = nullptr;
internal::SegmentBase* pop_segment_ = nullptr;
};
template <typename EntryType, uint16_t MinSegmentSize>
Worklist<EntryType, MinSegmentSize>::Local::Local(
Worklist<EntryType, MinSegmentSize>* worklist)
Worklist<EntryType, MinSegmentSize>& worklist)
: worklist_(worklist),
push_segment_(internal::SegmentBase::GetSentinelSegmentAddress()),
pop_segment_(internal::SegmentBase::GetSentinelSegmentAddress()) {}
......@@ -417,7 +399,7 @@ bool Worklist<EntryType, MinSegmentSize>::Local::IsLocalEmpty() const {
template <typename EntryType, uint16_t MinSegmentSize>
bool Worklist<EntryType, MinSegmentSize>::Local::IsGlobalEmpty() const {
return worklist_->IsEmpty();
return worklist_.IsEmpty();
}
template <typename EntryType, uint16_t MinSegmentSize>
......@@ -428,30 +410,30 @@ void Worklist<EntryType, MinSegmentSize>::Local::Publish() {
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Local::Merge(
Worklist<EntryType, MinSegmentSize>::Local* other) {
other->Publish();
worklist_->Merge(other->worklist_);
Worklist<EntryType, MinSegmentSize>::Local& other) {
other.Publish();
worklist_.Merge(other.worklist_);
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Local::PublishPushSegment() {
if (push_segment_ != internal::SegmentBase::GetSentinelSegmentAddress())
worklist_->Push(push_segment());
worklist_.Push(push_segment());
push_segment_ = NewSegment();
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Local::PublishPopSegment() {
if (pop_segment_ != internal::SegmentBase::GetSentinelSegmentAddress())
worklist_->Push(pop_segment());
worklist_.Push(pop_segment());
pop_segment_ = NewSegment();
}
template <typename EntryType, uint16_t MinSegmentSize>
bool Worklist<EntryType, MinSegmentSize>::Local::StealPopSegment() {
if (worklist_->IsEmpty()) return false;
if (worklist_.IsEmpty()) return false;
Segment* new_segment = nullptr;
if (worklist_->Pop(&new_segment)) {
if (worklist_.Pop(&new_segment)) {
DeleteSegment(pop_segment_);
pop_segment_ = new_segment;
return true;
......
......@@ -510,7 +510,7 @@ Compactor::CompactableSpaceHandling Compactor::CompactSpacesIfEnabled() {
MovableReferences movable_references(*heap_.heap());
CompactionWorklists::MovableReferencesWorklist::Local local(
compaction_worklists_->movable_slots_worklist());
*compaction_worklists_->movable_slots_worklist());
CompactionWorklists::MovableReference* slot;
while (local.Pop(&slot)) {
movable_references.AddOrFilter(slot);
......
......@@ -338,7 +338,7 @@ class WeakCallbackJobTask final : public cppgc::JobTask {
StatsCollector::EnabledConcurrentScope stats_scope(
marker_->heap().stats_collector(),
StatsCollector::kConcurrentWeakCallback);
MarkingWorklists::WeakCallbackWorklist::Local local(callback_worklist_);
MarkingWorklists::WeakCallbackWorklist::Local local(*callback_worklist_);
MarkingWorklists::WeakCallbackItem item;
while (local.Pop(&item)) {
item.callback(broker_, item.parameter);
......
......@@ -27,7 +27,7 @@ void MutatorMarkingState::FlushDiscoveredEphemeronPairs() {
discovered_ephemeron_pairs_worklist_.Publish();
if (!discovered_ephemeron_pairs_worklist_.IsGlobalEmpty()) {
ephemeron_pairs_for_processing_worklist_.Merge(
&discovered_ephemeron_pairs_worklist_);
discovered_ephemeron_pairs_worklist_);
}
}
......
......@@ -58,7 +58,7 @@ class MarkingStateBase {
MarkingStateBase::MarkingStateBase(HeapBase& heap,
MarkingWorklists& marking_worklists)
: heap_(heap),
marking_worklist_(marking_worklists.marking_worklist()),
marking_worklist_(*marking_worklists.marking_worklist()),
not_fully_constructed_worklist_(
*marking_worklists.not_fully_constructed_worklist()) {}
......@@ -235,24 +235,24 @@ BasicMarkingState::BasicMarkingState(HeapBase& heap,
CompactionWorklists* compaction_worklists)
: MarkingStateBase(heap, marking_worklists),
previously_not_fully_constructed_worklist_(
marking_worklists.previously_not_fully_constructed_worklist()),
weak_callback_worklist_(marking_worklists.weak_callback_worklist()),
*marking_worklists.previously_not_fully_constructed_worklist()),
weak_callback_worklist_(*marking_worklists.weak_callback_worklist()),
parallel_weak_callback_worklist_(
marking_worklists.parallel_weak_callback_worklist()),
write_barrier_worklist_(marking_worklists.write_barrier_worklist()),
*marking_worklists.parallel_weak_callback_worklist()),
write_barrier_worklist_(*marking_worklists.write_barrier_worklist()),
concurrent_marking_bailout_worklist_(
marking_worklists.concurrent_marking_bailout_worklist()),
*marking_worklists.concurrent_marking_bailout_worklist()),
discovered_ephemeron_pairs_worklist_(
marking_worklists.discovered_ephemeron_pairs_worklist()),
*marking_worklists.discovered_ephemeron_pairs_worklist()),
ephemeron_pairs_for_processing_worklist_(
marking_worklists.ephemeron_pairs_for_processing_worklist()),
*marking_worklists.ephemeron_pairs_for_processing_worklist()),
weak_containers_worklist_(*marking_worklists.weak_containers_worklist()),
retrace_marked_objects_worklist_(
marking_worklists.retrace_marked_objects_worklist()) {
*marking_worklists.retrace_marked_objects_worklist()) {
if (compaction_worklists) {
movable_slots_worklist_ =
std::make_unique<CompactionWorklists::MovableReferencesWorklist::Local>(
compaction_worklists->movable_slots_worklist());
*compaction_worklists->movable_slots_worklist());
}
}
......
......@@ -2316,7 +2316,7 @@ bool MarkCompactCollector::MarkTransitiveClosureUntilFixpoint() {
// drain them in this iteration.
DCHECK(
local_weak_objects()->current_ephemerons_local.IsLocalAndGlobalEmpty());
weak_objects_.current_ephemerons.Swap(&weak_objects_.next_ephemerons);
weak_objects_.current_ephemerons.Merge(weak_objects_.next_ephemerons);
heap()->concurrent_marking()->set_another_ephemeron_iteration(false);
{
......@@ -2388,7 +2388,7 @@ void MarkCompactCollector::MarkTransitiveClosureLinear() {
DCHECK(
local_weak_objects()->current_ephemerons_local.IsLocalAndGlobalEmpty());
weak_objects_.current_ephemerons.Swap(&weak_objects_.next_ephemerons);
weak_objects_.current_ephemerons.Merge(weak_objects_.next_ephemerons);
while (local_weak_objects()->current_ephemerons_local.Pop(&ephemeron)) {
ProcessEphemeron(ephemeron.key, ephemeron.value);
......@@ -2574,7 +2574,7 @@ void MarkCompactCollector::VerifyEphemeronMarking() {
DCHECK(
local_weak_objects()->current_ephemerons_local.IsLocalAndGlobalEmpty());
weak_objects_.current_ephemerons.Swap(&weak_objects_.next_ephemerons);
weak_objects_.current_ephemerons.Merge(weak_objects_.next_ephemerons);
while (local_weak_objects()->current_ephemerons_local.Pop(&ephemeron)) {
CHECK(!ProcessEphemeron(ephemeron.key, ephemeron.value));
}
......
......@@ -26,7 +26,7 @@ MarkingBarrier::MarkingBarrier(LocalHeap* local_heap)
: heap_(local_heap->heap()),
collector_(heap_->mark_compact_collector()),
incremental_marking_(heap_->incremental_marking()),
worklist_(collector_->marking_worklists()->shared()),
worklist_(*collector_->marking_worklists()->shared()),
marking_state_(heap_->isolate()),
is_main_thread_barrier_(local_heap->is_main_thread()),
is_shared_heap_(heap_->IsShared()) {}
......
......@@ -94,7 +94,7 @@ GetLocalPerContextMarkingWorklists(bool is_per_context_mode,
worklist_by_context.reserve(global->context_worklists().size());
for (auto& cw : global->context_worklists()) {
worklist_by_context[cw.context] =
std::make_unique<MarkingWorklist::Local>(cw.worklist.get());
std::make_unique<MarkingWorklist::Local>(*cw.worklist.get());
}
return worklist_by_context;
}
......@@ -105,14 +105,14 @@ MarkingWorklists::Local::Local(
MarkingWorklists* global,
std::unique_ptr<CppMarkingState> cpp_marking_state)
: active_(&shared_),
shared_(global->shared()),
on_hold_(global->on_hold()),
wrapper_(global->wrapper()),
shared_(*global->shared()),
on_hold_(*global->on_hold()),
wrapper_(*global->wrapper()),
active_context_(kSharedContext),
is_per_context_mode_(!global->context_worklists().empty()),
worklist_by_context_(
GetLocalPerContextMarkingWorklists(is_per_context_mode_, global)),
other_(global->other()),
other_(*global->other()),
cpp_marking_state_(std::move(cpp_marking_state)) {}
void MarkingWorklists::Local::Publish() {
......@@ -173,7 +173,7 @@ void MarkingWorklists::Local::ShareWork() {
}
}
void MarkingWorklists::Local::MergeOnHold() { shared_.Merge(&on_hold_); }
void MarkingWorklists::Local::MergeOnHold() { shared_.Merge(on_hold_); }
bool MarkingWorklists::Local::PopContext(HeapObject* object) {
DCHECK(is_per_context_mode_);
......
......@@ -460,7 +460,7 @@ void ScavengerCollector::CollectGarbage() {
{
TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_FREE_REMEMBERED_SET);
Scavenger::EmptyChunksList::Local empty_chunks_local(&empty_chunks);
Scavenger::EmptyChunksList::Local empty_chunks_local(empty_chunks);
MemoryChunk* chunk;
while (empty_chunks_local.Pop(&chunk)) {
// Since sweeping was already restarted only check chunks that already got
......@@ -584,9 +584,9 @@ int ScavengerCollector::NumberOfScavengeTasks() {
Scavenger::PromotionList::Local::Local(Scavenger::PromotionList* promotion_list)
: regular_object_promotion_list_local_(
&promotion_list->regular_object_promotion_list_),
promotion_list->regular_object_promotion_list_),
large_object_promotion_list_local_(
&promotion_list->large_object_promotion_list_) {}
promotion_list->large_object_promotion_list_) {}
namespace {
ConcurrentAllocator* CreateSharedOldAllocator(Heap* heap) {
......@@ -603,10 +603,10 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
EphemeronTableList* ephemeron_table_list, int task_id)
: collector_(collector),
heap_(heap),
empty_chunks_local_(empty_chunks),
empty_chunks_local_(*empty_chunks),
promotion_list_local_(promotion_list),
copied_list_local_(copied_list),
ephemeron_table_list_local_(ephemeron_table_list),
copied_list_local_(*copied_list),
ephemeron_table_list_local_(*ephemeron_table_list),
local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
copied_size_(0),
promoted_size_(0),
......
......@@ -20,7 +20,7 @@ namespace internal {
WeakObjects::Local::Local(WeakObjects* weak_objects)
: WeakObjects::UnusedBase()
#define INIT_LOCAL_WORKLIST(_, name, __) , name##_local(&weak_objects->name)
#define INIT_LOCAL_WORKLIST(_, name, __) , name##_local(weak_objects->name)
WEAK_OBJECT_WORKLISTS(INIT_LOCAL_WORKLIST)
#undef INIT_LOCAL_WORKLIST
{
......
......@@ -18,7 +18,7 @@ namespace v8 {
namespace internal {
namespace heap {
void PublishSegment(MarkingWorklist* worklist, HeapObject object) {
void PublishSegment(MarkingWorklist& worklist, HeapObject object) {
MarkingWorklist::Local local(worklist);
for (size_t i = 0; i < MarkingWorklist::kMinSegmentSizeForTesting; i++) {
local.Push(object);
......@@ -42,7 +42,7 @@ TEST(ConcurrentMarking) {
WeakObjects weak_objects;
ConcurrentMarking* concurrent_marking =
new ConcurrentMarking(heap, &marking_worklists, &weak_objects);
PublishSegment(marking_worklists.shared(),
PublishSegment(*marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value());
concurrent_marking->ScheduleJob();
concurrent_marking->Join();
......@@ -65,11 +65,11 @@ TEST(ConcurrentMarkingReschedule) {
WeakObjects weak_objects;
ConcurrentMarking* concurrent_marking =
new ConcurrentMarking(heap, &marking_worklists, &weak_objects);
PublishSegment(marking_worklists.shared(),
PublishSegment(*marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value());
concurrent_marking->ScheduleJob();
concurrent_marking->Join();
PublishSegment(marking_worklists.shared(),
PublishSegment(*marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value());
concurrent_marking->RescheduleJobIfNeeded();
concurrent_marking->Join();
......@@ -93,12 +93,12 @@ TEST(ConcurrentMarkingPreemptAndReschedule) {
ConcurrentMarking* concurrent_marking =
new ConcurrentMarking(heap, &marking_worklists, &weak_objects);
for (int i = 0; i < 5000; i++)
PublishSegment(marking_worklists.shared(),
PublishSegment(*marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value());
concurrent_marking->ScheduleJob();
concurrent_marking->Pause();
for (int i = 0; i < 5000; i++)
PublishSegment(marking_worklists.shared(),
PublishSegment(*marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value());
concurrent_marking->RescheduleJobIfNeeded();
concurrent_marking->Join();
......
......@@ -100,14 +100,14 @@ TEST(WorkListTest, SegmentUpdate) {
TEST(WorkListTest, CreateEmpty) {
TestWorklist worklist;
TestWorklist::Local worklist_local(&worklist);
TestWorklist::Local worklist_local(worklist);
EXPECT_TRUE(worklist_local.IsLocalEmpty());
EXPECT_TRUE(worklist.IsEmpty());
}
TEST(WorkListTest, LocalPushPop) {
TestWorklist worklist;
TestWorklist::Local worklist_local(&worklist);
TestWorklist::Local worklist_local(worklist);
SomeObject dummy;
SomeObject* retrieved = nullptr;
worklist_local.Push(&dummy);
......@@ -118,8 +118,8 @@ TEST(WorkListTest, LocalPushPop) {
TEST(WorkListTest, LocalPushStaysPrivate) {
TestWorklist worklist;
TestWorklist::Local worklist_local1(&worklist);
TestWorklist::Local worklist_local2(&worklist);
TestWorklist::Local worklist_local1(worklist);
TestWorklist::Local worklist_local2(worklist);
SomeObject dummy;
SomeObject* retrieved = nullptr;
EXPECT_TRUE(worklist.IsEmpty());
......@@ -135,7 +135,7 @@ TEST(WorkListTest, LocalPushStaysPrivate) {
TEST(WorkListTest, LocalClear) {
TestWorklist worklist;
TestWorklist::Local worklist_local(&worklist);
TestWorklist::Local worklist_local(worklist);
SomeObject* object;
object = reinterpret_cast<SomeObject*>(&object);
// Check push segment:
......@@ -159,7 +159,7 @@ TEST(WorkListTest, LocalClear) {
TEST(WorkListTest, GlobalUpdateNull) {
TestWorklist worklist;
TestWorklist::Local worklist_local(&worklist);
TestWorklist::Local worklist_local(worklist);
SomeObject* object;
object = reinterpret_cast<SomeObject*>(&object);
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
......@@ -174,7 +174,7 @@ TEST(WorkListTest, GlobalUpdateNull) {
TEST(WorkListTest, GlobalUpdate) {
TestWorklist worklist;
TestWorklist::Local worklist_local(&worklist);
TestWorklist::Local worklist_local(worklist);
SomeObject* objectA = nullptr;
objectA = reinterpret_cast<SomeObject*>(&objectA);
SomeObject* objectB = nullptr;
......@@ -205,8 +205,8 @@ TEST(WorkListTest, GlobalUpdate) {
TEST(WorkListTest, FlushToGlobalPushSegment) {
TestWorklist worklist;
TestWorklist::Local worklist_local0(&worklist);
TestWorklist::Local worklist_local1(&worklist);
TestWorklist::Local worklist_local0(worklist);
TestWorklist::Local worklist_local1(worklist);
SomeObject* object = nullptr;
SomeObject* objectA = nullptr;
objectA = reinterpret_cast<SomeObject*>(&objectA);
......@@ -218,8 +218,8 @@ TEST(WorkListTest, FlushToGlobalPushSegment) {
TEST(WorkListTest, FlushToGlobalPopSegment) {
TestWorklist worklist;
TestWorklist::Local worklist_local0(&worklist);
TestWorklist::Local worklist_local1(&worklist);
TestWorklist::Local worklist_local0(worklist);
TestWorklist::Local worklist_local1(worklist);
SomeObject* object = nullptr;
SomeObject* objectA = nullptr;
objectA = reinterpret_cast<SomeObject*>(&objectA);
......@@ -233,7 +233,7 @@ TEST(WorkListTest, FlushToGlobalPopSegment) {
TEST(WorkListTest, Clear) {
TestWorklist worklist;
TestWorklist::Local worklist_local(&worklist);
TestWorklist::Local worklist_local(worklist);
SomeObject* object;
object = reinterpret_cast<SomeObject*>(&object);
worklist_local.Push(object);
......@@ -246,8 +246,8 @@ TEST(WorkListTest, Clear) {
TEST(WorkListTest, SingleSegmentSteal) {
TestWorklist worklist;
TestWorklist::Local worklist_local1(&worklist);
TestWorklist::Local worklist_local2(&worklist);
TestWorklist::Local worklist_local1(worklist);
TestWorklist::Local worklist_local2(worklist);
SomeObject dummy;
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
worklist_local1.Push(&dummy);
......@@ -267,9 +267,9 @@ TEST(WorkListTest, SingleSegmentSteal) {
TEST(WorkListTest, MultipleSegmentsStolen) {
TestWorklist worklist;
TestWorklist::Local worklist_local1(&worklist);
TestWorklist::Local worklist_local2(&worklist);
TestWorklist::Local worklist_local3(&worklist);
TestWorklist::Local worklist_local1(worklist);
TestWorklist::Local worklist_local2(worklist);
TestWorklist::Local worklist_local3(worklist);
SomeObject dummy1;
SomeObject dummy2;
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
......@@ -306,7 +306,7 @@ TEST(WorkListTest, MultipleSegmentsStolen) {
TEST(WorkListTest, MergeGlobalPool) {
TestWorklist worklist1;
TestWorklist::Local worklist_local1(&worklist1);
TestWorklist::Local worklist_local1(worklist1);
SomeObject dummy;
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
worklist_local1.Push(&dummy);
......@@ -315,9 +315,9 @@ TEST(WorkListTest, MergeGlobalPool) {
worklist_local1.Publish();
// Merging global pool into a new Worklist.
TestWorklist worklist2;
TestWorklist::Local worklist_local2(&worklist2);
TestWorklist::Local worklist_local2(worklist2);
EXPECT_EQ(0U, worklist2.Size());
worklist2.Merge(&worklist1);
worklist2.Merge(worklist1);
EXPECT_EQ(1U, worklist2.Size());
EXPECT_FALSE(worklist2.IsEmpty());
SomeObject* retrieved = nullptr;
......@@ -330,25 +330,5 @@ TEST(WorkListTest, MergeGlobalPool) {
EXPECT_TRUE(worklist2.IsEmpty());
}
TEST(WorkListTest, SwapGlobalPool) {
TestWorklist worklist1;
TestWorklist::Local worklist_local1(&worklist1);
SomeObject dummy;
worklist_local1.Push(&dummy);
worklist_local1.Publish();
TestWorklist worklist2;
EXPECT_FALSE(worklist1.IsEmpty());
EXPECT_TRUE(worklist2.IsEmpty());
worklist1.Swap(&worklist2);
EXPECT_TRUE(worklist1.IsEmpty());
EXPECT_FALSE(worklist2.IsEmpty());
TestWorklist::Local worklist_local2(&worklist2);
SomeObject* retrieved = nullptr;
EXPECT_TRUE(worklist_local2.Pop(&retrieved));
EXPECT_EQ(&dummy, retrieved);
EXPECT_FALSE(worklist_local2.Pop(&retrieved));
EXPECT_TRUE(worklist2.IsEmpty());
}
} // namespace base
} // namespace heap
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment