Commit 54ef0d87 authored by Nico Hartmann's avatar Nico Hartmann Committed by V8 LUCI CQ

Revert "[heap] Rework Worklist base type"

This reverts commit a19316d9.

Reason for revert: https://ci.chromium.org/ui/p/v8/builders/ci/V8%20Linux64%20UBSan/22670/overview

Original change's description:
> [heap] Rework Worklist base type
>
> Worklist uses a singly-linked list of segments to hold entries.
> Segment size was based on a compile-time constant but already stored
> in the segment itself.
>
> Rework the segments to query `malloc_usable_size()` on allocation and
> adjust the capacity properly. For PartitionAlloc, it turns out that
> there's ~20% more capacity available for the 64-element segments.
>
> This slows down actual allocation of the segments with the upside of
> improving utilization and requiring 20% less segments.
>
> Change-Id: Ib8595c3fb9fb75b02e4022f6c525bb59a2df7ab7
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3826047
> Commit-Queue: Anton Bikineev <bikineev@chromium.org>
> Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
> Reviewed-by: Anton Bikineev <bikineev@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#82432}

Change-Id: I14994e11ff5ffaba70b93d977d40dd2f6e9e5d35
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3829474
Owners-Override: Nico Hartmann <nicohartmann@chromium.org>
Commit-Queue: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Auto-Submit: Nico Hartmann <nicohartmann@chromium.org>
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Cr-Commit-Position: refs/heads/main@{#82438}
parent 0cd0e4bb
......@@ -52,9 +52,6 @@
#if V8_OS_DARWIN
#include <mach/mach.h>
#include <malloc/malloc.h>
#else
#include <malloc.h>
#endif
#if V8_OS_LINUX
......@@ -1265,14 +1262,5 @@ Stack::StackSlot Stack::GetCurrentStackPosition() {
#undef MAP_ANONYMOUS
#undef MADV_FREE
// static
size_t Malloc::GetUsableSize(void* ptr) {
#if defined(V8_OS_DARWIN)
return malloc_size(ptr);
#else // defined(V8_OS_DARWIN)
return malloc_usable_size(ptr);
#endif // !defined(V8_OS_DARWIN)
}
} // namespace base
} // namespace v8
......@@ -20,7 +20,6 @@
// This has to come after windows.h.
#include <VersionHelpers.h>
#include <dbghelp.h> // For SymLoadModule64 and al.
#include <malloc.h> // For _msize()
#include <mmsystem.h> // For timeGetTime().
#include <tlhelp32.h> // For Module32First and al.
......@@ -1766,8 +1765,5 @@ Stack::StackSlot Stack::GetCurrentStackPosition() {
#endif
}
// static
size_t Malloc::GetUsableSize(void* ptr) { return _msize(ptr); }
} // namespace base
} // namespace v8
......@@ -661,12 +661,6 @@ class V8_BASE_EXPORT Stack {
}
};
class V8_BASE_EXPORT Malloc final {
public:
// Returns the usable size in bytes for a `ptr` allocated using `malloc()`.
static size_t GetUsableSize(void* ptr);
};
} // namespace base
} // namespace v8
......
......@@ -4,12 +4,16 @@
#include "src/heap/base/worklist.h"
namespace heap::base::internal {
namespace heap {
namespace base {
namespace internal {
// static
SegmentBase* SegmentBase::GetSentinelSegmentAddress() {
static SegmentBase sentinel_segment(0);
return &sentinel_segment;
static SegmentBase kSentinelSegment(0);
return &kSentinelSegment;
}
} // namespace heap::base::internal
} // namespace internal
} // namespace base
} // namespace heap
......@@ -11,19 +11,19 @@
#include "src/base/atomic-utils.h"
#include "src/base/logging.h"
#include "src/base/platform/mutex.h"
#include "src/base/platform/platform.h"
#include "testing/gtest/include/gtest/gtest_prod.h" // nogncheck
namespace heap::base {
namespace internal {
namespace heap {
namespace base {
namespace internal {
class V8_EXPORT_PRIVATE SegmentBase {
public:
static SegmentBase* GetSentinelSegmentAddress();
explicit constexpr SegmentBase(uint16_t capacity) : capacity_(capacity) {}
explicit SegmentBase(uint16_t capacity) : capacity_(capacity) {}
size_t Size() const { return index_; }
size_t Capacity() const { return capacity_; }
bool IsEmpty() const { return index_ == 0; }
bool IsFull() const { return index_ == capacity_; }
void Clear() { index_ = 0; }
......@@ -34,69 +34,56 @@ class V8_EXPORT_PRIVATE SegmentBase {
};
} // namespace internal
// A global worklist based on segments which allows for a thread-local
// producer/consumer pattern with global work stealing.
//
// - Entries in the worklist are of type `EntryType`.
// - Segments have a capacity of at least `MinSegmentSize` but possibly more.
//
// All methods on the worklist itself only consider the list of segments.
// Unpublished work in local views is not visible.
template <typename EntryType, uint16_t MinSegmentSize>
class Worklist final {
// A global marking worklist that is similar the existing Worklist
// but does not reserve space and keep track of the local segments.
// Eventually this will replace Worklist after all its current uses
// are migrated.
template <typename EntryType, uint16_t SegmentSize>
class Worklist {
public:
// A thread-local view on the worklist. Any work that is not published from
// the local view is not visible to the global worklist.
class Local;
static const int kSegmentSize = SegmentSize;
class Segment;
static constexpr int kMinSegmentSizeForTesting = MinSegmentSize;
class Local;
Worklist() = default;
~Worklist() { CHECK(IsEmpty()); }
Worklist(const Worklist&) = delete;
Worklist& operator=(const Worklist&) = delete;
void Push(Segment* segment);
bool Pop(Segment** segment);
// Returns true if the global worklist is empty and false otherwise.
// Returns true if the list of segments is empty.
bool IsEmpty() const;
// Returns the number of segments in the global worklist.
// Returns the number of segments in the list.
size_t Size() const;
// Moves the segments from `other` into this worklist.
void Merge(Worklist<EntryType, MinSegmentSize>* other);
// Moves the segments of the given marking worklist into this
// marking worklist.
void Merge(Worklist<EntryType, SegmentSize>* other);
// Swaps the segments with `other`.
void Swap(Worklist<EntryType, MinSegmentSize>* other);
// Swaps the segments with the given marking worklist.
void Swap(Worklist<EntryType, SegmentSize>* other);
// Removes all segments from the worklist.
// These functions are not thread-safe. They should be called only
// if all local marking worklists that use the current worklist have
// been published and are empty.
void Clear();
// Invokes `callback` on each item. Callback is of type `bool(EntryType&)` and
// should return true if the entry should be kept and false if the entry
// should be removed.
template <typename Callback>
void Update(Callback callback);
// Invokes `callback` on each item. Callback is of type `void(EntryType&)`.
template <typename Callback>
void Iterate(Callback callback) const;
void Iterate(Callback callback);
private:
void set_top(Segment* segment) {
v8::base::AsAtomicPtr(&top_)->store(segment, std::memory_order_relaxed);
}
void Push(Segment* segment);
bool Pop(Segment** segment);
mutable v8::base::Mutex lock_;
v8::base::Mutex lock_;
Segment* top_ = nullptr;
std::atomic<size_t> size_{0};
};
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Push(Segment* segment) {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Push(Segment* segment) {
DCHECK(!segment->IsEmpty());
v8::base::MutexGuard guard(&lock_);
segment->set_next(top_);
......@@ -104,8 +91,8 @@ void Worklist<EntryType, MinSegmentSize>::Push(Segment* segment) {
size_.fetch_add(1, std::memory_order_relaxed);
}
template <typename EntryType, uint16_t MinSegmentSize>
bool Worklist<EntryType, MinSegmentSize>::Pop(Segment** segment) {
template <typename EntryType, uint16_t SegmentSize>
bool Worklist<EntryType, SegmentSize>::Pop(Segment** segment) {
v8::base::MutexGuard guard(&lock_);
if (top_ == nullptr) return false;
DCHECK_LT(0U, size_);
......@@ -115,36 +102,36 @@ bool Worklist<EntryType, MinSegmentSize>::Pop(Segment** segment) {
return true;
}
template <typename EntryType, uint16_t MinSegmentSize>
bool Worklist<EntryType, MinSegmentSize>::IsEmpty() const {
template <typename EntryType, uint16_t SegmentSize>
bool Worklist<EntryType, SegmentSize>::IsEmpty() const {
return v8::base::AsAtomicPtr(&top_)->load(std::memory_order_relaxed) ==
nullptr;
}
template <typename EntryType, uint16_t MinSegmentSize>
size_t Worklist<EntryType, MinSegmentSize>::Size() const {
template <typename EntryType, uint16_t SegmentSize>
size_t Worklist<EntryType, SegmentSize>::Size() const {
// It is safe to read |size_| without a lock since this variable is
// atomic, keeping in mind that threads may not immediately see the new
// value when it is updated.
return size_.load(std::memory_order_relaxed);
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Clear() {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Clear() {
v8::base::MutexGuard guard(&lock_);
size_.store(0, std::memory_order_relaxed);
Segment* current = top_;
while (current != nullptr) {
Segment* tmp = current;
current = current->next();
Segment::Delete(tmp);
delete tmp;
}
set_top(nullptr);
}
template <typename EntryType, uint16_t MinSegmentSize>
template <typename EntryType, uint16_t SegmentSize>
template <typename Callback>
void Worklist<EntryType, MinSegmentSize>::Update(Callback callback) {
void Worklist<EntryType, SegmentSize>::Update(Callback callback) {
v8::base::MutexGuard guard(&lock_);
Segment* prev = nullptr;
Segment* current = top_;
......@@ -161,7 +148,7 @@ void Worklist<EntryType, MinSegmentSize>::Update(Callback callback) {
}
Segment* tmp = current;
current = current->next();
Segment::Delete(tmp);
delete tmp;
} else {
prev = current;
current = current->next();
......@@ -170,18 +157,18 @@ void Worklist<EntryType, MinSegmentSize>::Update(Callback callback) {
size_.fetch_sub(num_deleted, std::memory_order_relaxed);
}
template <typename EntryType, uint16_t MinSegmentSize>
template <typename EntryType, uint16_t SegmentSize>
template <typename Callback>
void Worklist<EntryType, MinSegmentSize>::Iterate(Callback callback) const {
void Worklist<EntryType, SegmentSize>::Iterate(Callback callback) {
v8::base::MutexGuard guard(&lock_);
for (Segment* current = top_; current != nullptr; current = current->next()) {
current->Iterate(callback);
}
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Merge(
Worklist<EntryType, MinSegmentSize>* other) {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Merge(
Worklist<EntryType, SegmentSize>* other) {
Segment* top = nullptr;
size_t other_size = 0;
{
......@@ -206,9 +193,9 @@ void Worklist<EntryType, MinSegmentSize>::Merge(
}
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Swap(
Worklist<EntryType, MinSegmentSize>* other) {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Swap(
Worklist<EntryType, SegmentSize>* other) {
v8::base::MutexGuard guard1(&lock_);
v8::base::MutexGuard guard2(&other->lock_);
Segment* top = top_;
......@@ -219,20 +206,13 @@ void Worklist<EntryType, MinSegmentSize>::Swap(
size_.store(other_size, std::memory_order_relaxed);
}
template <typename EntryType, uint16_t MinSegmentSize>
class Worklist<EntryType, MinSegmentSize>::Segment final
: public internal::SegmentBase {
template <typename EntryType, uint16_t SegmentSize>
class Worklist<EntryType, SegmentSize>::Segment : public internal::SegmentBase {
public:
static Segment* Create(uint16_t min_segment_size) {
void* memory = malloc(MallocSizeForCapacity(min_segment_size));
return new (memory)
Segment(CapacityForMallocSize(v8::base::Malloc::GetUsableSize(memory)));
}
static void Delete(Segment* segment) { free(segment); }
static const uint16_t kSize = SegmentSize;
V8_INLINE void Push(EntryType entry);
V8_INLINE void Pop(EntryType* entry);
void Push(EntryType entry);
void Pop(EntryType* entry);
template <typename Callback>
void Update(Callback callback);
......@@ -243,94 +223,89 @@ class Worklist<EntryType, MinSegmentSize>::Segment final
void set_next(Segment* segment) { next_ = segment; }
private:
static constexpr size_t MallocSizeForCapacity(size_t num_entries) {
return sizeof(Segment) + sizeof(EntryType) * num_entries;
}
static constexpr size_t CapacityForMallocSize(size_t malloc_size) {
return (malloc_size - sizeof(Segment)) / sizeof(EntryType);
}
constexpr explicit Segment(size_t capacity)
: internal::SegmentBase(capacity) {}
EntryType& entry(size_t index) {
return reinterpret_cast<EntryType*>(this + 1)[index];
}
const EntryType& entry(size_t index) const {
return reinterpret_cast<const EntryType*>(this + 1)[index];
}
Segment() : internal::SegmentBase(kSize) {}
Segment* next_ = nullptr;
EntryType entries_[kSize];
friend class Worklist<EntryType, SegmentSize>::Local;
FRIEND_TEST(WorkListTest, SegmentCreate);
FRIEND_TEST(WorkListTest, SegmentPush);
FRIEND_TEST(WorkListTest, SegmentPushPop);
FRIEND_TEST(WorkListTest, SegmentIsEmpty);
FRIEND_TEST(WorkListTest, SegmentIsFull);
FRIEND_TEST(WorkListTest, SegmentClear);
FRIEND_TEST(WorkListTest, SegmentUpdateFalse);
FRIEND_TEST(WorkListTest, SegmentUpdate);
};
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Segment::Push(EntryType e) {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Segment::Push(EntryType entry) {
DCHECK(!IsFull());
entry(index_++) = e;
entries_[index_++] = entry;
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Segment::Pop(EntryType* e) {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Segment::Pop(EntryType* entry) {
DCHECK(!IsEmpty());
*e = entry(--index_);
*entry = entries_[--index_];
}
template <typename EntryType, uint16_t MinSegmentSize>
template <typename EntryType, uint16_t SegmentSize>
template <typename Callback>
void Worklist<EntryType, MinSegmentSize>::Segment::Update(Callback callback) {
void Worklist<EntryType, SegmentSize>::Segment::Update(Callback callback) {
size_t new_index = 0;
for (size_t i = 0; i < index_; i++) {
if (callback(entry(i), &entry(new_index))) {
if (callback(entries_[i], &entries_[new_index])) {
new_index++;
}
}
index_ = new_index;
}
template <typename EntryType, uint16_t MinSegmentSize>
template <typename EntryType, uint16_t SegmentSize>
template <typename Callback>
void Worklist<EntryType, MinSegmentSize>::Segment::Iterate(
void Worklist<EntryType, SegmentSize>::Segment::Iterate(
Callback callback) const {
for (size_t i = 0; i < index_; i++) {
callback(entry(i));
callback(entries_[i]);
}
}
// A thread-local on a given worklist.
template <typename EntryType, uint16_t MinSegmentSize>
class Worklist<EntryType, MinSegmentSize>::Local final {
// A thread-local view of the marking worklist.
template <typename EntryType, uint16_t SegmentSize>
class Worklist<EntryType, SegmentSize>::Local {
public:
using ItemType = EntryType;
// An empty local view does not have any segments and is not attached to a
// worklist. As such it will crash on any operation until it is initialized
// properly via move constructor.
Local() = default;
explicit Local(Worklist<EntryType, MinSegmentSize>* worklist);
explicit Local(Worklist<EntryType, SegmentSize>* worklist);
~Local();
Local(Local&&) V8_NOEXCEPT;
Local& operator=(Local&&) V8_NOEXCEPT;
// Having multiple copies of the same local view may be unsafe.
// Disable copying since having multiple copies of the same
// local marking worklist is unsafe.
Local(const Local&) = delete;
Local& operator=(const Local& other) = delete;
V8_INLINE void Push(EntryType entry);
V8_INLINE bool Pop(EntryType* entry);
void Push(EntryType entry);
bool Pop(EntryType* entry);
bool IsLocalAndGlobalEmpty() const;
bool IsLocalEmpty() const;
bool IsGlobalEmpty() const;
size_t PushSegmentSize() const { return push_segment_->Size(); }
void Publish();
void Merge(Worklist<EntryType, MinSegmentSize>::Local* other);
void Merge(Worklist<EntryType, SegmentSize>::Local* other);
bool IsEmpty() const;
void Clear();
size_t PushSegmentSize() const { return push_segment_->Size(); }
private:
void PublishPushSegment();
void PublishPopSegment();
......@@ -338,11 +313,11 @@ class Worklist<EntryType, MinSegmentSize>::Local final {
Segment* NewSegment() const {
// Bottleneck for filtering in crash dumps.
return Segment::Create(MinSegmentSize);
return new Segment();
}
void DeleteSegment(internal::SegmentBase* segment) const {
if (segment == internal::SegmentBase::GetSentinelSegmentAddress()) return;
Segment::Delete(static_cast<Segment*>(segment));
delete static_cast<Segment*>(segment);
}
inline Segment* push_segment() {
......@@ -365,29 +340,29 @@ class Worklist<EntryType, MinSegmentSize>::Local final {
return static_cast<const Segment*>(pop_segment_);
}
Worklist<EntryType, MinSegmentSize>* worklist_ = nullptr;
Worklist<EntryType, SegmentSize>* worklist_ = nullptr;
internal::SegmentBase* push_segment_ = nullptr;
internal::SegmentBase* pop_segment_ = nullptr;
};
template <typename EntryType, uint16_t MinSegmentSize>
Worklist<EntryType, MinSegmentSize>::Local::Local(
Worklist<EntryType, MinSegmentSize>* worklist)
template <typename EntryType, uint16_t SegmentSize>
Worklist<EntryType, SegmentSize>::Local::Local(
Worklist<EntryType, SegmentSize>* worklist)
: worklist_(worklist),
push_segment_(internal::SegmentBase::GetSentinelSegmentAddress()),
pop_segment_(internal::SegmentBase::GetSentinelSegmentAddress()) {}
template <typename EntryType, uint16_t MinSegmentSize>
Worklist<EntryType, MinSegmentSize>::Local::~Local() {
template <typename EntryType, uint16_t SegmentSize>
Worklist<EntryType, SegmentSize>::Local::~Local() {
CHECK_IMPLIES(push_segment_, push_segment_->IsEmpty());
CHECK_IMPLIES(pop_segment_, pop_segment_->IsEmpty());
DeleteSegment(push_segment_);
DeleteSegment(pop_segment_);
}
template <typename EntryType, uint16_t MinSegmentSize>
Worklist<EntryType, MinSegmentSize>::Local::Local(
Worklist<EntryType, MinSegmentSize>::Local&& other) V8_NOEXCEPT {
template <typename EntryType, uint16_t SegmentSize>
Worklist<EntryType, SegmentSize>::Local::Local(
Worklist<EntryType, SegmentSize>::Local&& other) V8_NOEXCEPT {
worklist_ = other.worklist_;
push_segment_ = other.push_segment_;
pop_segment_ = other.pop_segment_;
......@@ -396,10 +371,10 @@ Worklist<EntryType, MinSegmentSize>::Local::Local(
other.pop_segment_ = nullptr;
}
template <typename EntryType, uint16_t MinSegmentSize>
typename Worklist<EntryType, MinSegmentSize>::Local&
Worklist<EntryType, MinSegmentSize>::Local::operator=(
Worklist<EntryType, MinSegmentSize>::Local&& other) V8_NOEXCEPT {
template <typename EntryType, uint16_t SegmentSize>
typename Worklist<EntryType, SegmentSize>::Local&
Worklist<EntryType, SegmentSize>::Local::operator=(
Worklist<EntryType, SegmentSize>::Local&& other) V8_NOEXCEPT {
if (this != &other) {
DCHECK_NULL(worklist_);
DCHECK_NULL(push_segment_);
......@@ -414,16 +389,16 @@ Worklist<EntryType, MinSegmentSize>::Local::operator=(
return *this;
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Local::Push(EntryType entry) {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::Push(EntryType entry) {
if (V8_UNLIKELY(push_segment_->IsFull())) {
PublishPushSegment();
}
push_segment()->Push(entry);
}
template <typename EntryType, uint16_t MinSegmentSize>
bool Worklist<EntryType, MinSegmentSize>::Local::Pop(EntryType* entry) {
template <typename EntryType, uint16_t SegmentSize>
bool Worklist<EntryType, SegmentSize>::Local::Pop(EntryType* entry) {
if (pop_segment_->IsEmpty()) {
if (!push_segment_->IsEmpty()) {
std::swap(push_segment_, pop_segment_);
......@@ -435,50 +410,50 @@ bool Worklist<EntryType, MinSegmentSize>::Local::Pop(EntryType* entry) {
return true;
}
template <typename EntryType, uint16_t MinSegmentSize>
bool Worklist<EntryType, MinSegmentSize>::Local::IsLocalAndGlobalEmpty() const {
template <typename EntryType, uint16_t SegmentSize>
bool Worklist<EntryType, SegmentSize>::Local::IsLocalAndGlobalEmpty() const {
return IsLocalEmpty() && IsGlobalEmpty();
}
template <typename EntryType, uint16_t MinSegmentSize>
bool Worklist<EntryType, MinSegmentSize>::Local::IsLocalEmpty() const {
template <typename EntryType, uint16_t SegmentSize>
bool Worklist<EntryType, SegmentSize>::Local::IsLocalEmpty() const {
return push_segment_->IsEmpty() && pop_segment_->IsEmpty();
}
template <typename EntryType, uint16_t MinSegmentSize>
bool Worklist<EntryType, MinSegmentSize>::Local::IsGlobalEmpty() const {
template <typename EntryType, uint16_t SegmentSize>
bool Worklist<EntryType, SegmentSize>::Local::IsGlobalEmpty() const {
return worklist_->IsEmpty();
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Local::Publish() {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::Publish() {
if (!push_segment_->IsEmpty()) PublishPushSegment();
if (!pop_segment_->IsEmpty()) PublishPopSegment();
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Local::Merge(
Worklist<EntryType, MinSegmentSize>::Local* other) {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::Merge(
Worklist<EntryType, SegmentSize>::Local* other) {
other->Publish();
worklist_->Merge(other->worklist_);
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Local::PublishPushSegment() {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::PublishPushSegment() {
if (push_segment_ != internal::SegmentBase::GetSentinelSegmentAddress())
worklist_->Push(push_segment());
push_segment_ = NewSegment();
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Local::PublishPopSegment() {
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::PublishPopSegment() {
if (pop_segment_ != internal::SegmentBase::GetSentinelSegmentAddress())
worklist_->Push(pop_segment());
pop_segment_ = NewSegment();
}
template <typename EntryType, uint16_t MinSegmentSize>
bool Worklist<EntryType, MinSegmentSize>::Local::StealPopSegment() {
template <typename EntryType, uint16_t SegmentSize>
bool Worklist<EntryType, SegmentSize>::Local::StealPopSegment() {
if (worklist_->IsEmpty()) return false;
Segment* new_segment = nullptr;
if (worklist_->Pop(&new_segment)) {
......@@ -489,12 +464,18 @@ bool Worklist<EntryType, MinSegmentSize>::Local::StealPopSegment() {
return false;
}
template <typename EntryType, uint16_t MinSegmentSize>
void Worklist<EntryType, MinSegmentSize>::Local::Clear() {
template <typename EntryType, uint16_t SegmentSize>
bool Worklist<EntryType, SegmentSize>::Local::IsEmpty() const {
return push_segment_->IsEmpty() && pop_segment_->IsEmpty();
}
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::Clear() {
push_segment_->Clear();
pop_segment_->Clear();
}
} // namespace heap::base
} // namespace base
} // namespace heap
#endif // V8_HEAP_BASE_WORKLIST_H_
......@@ -704,7 +704,7 @@ void Scavenger::Process(JobDelegate* delegate) {
scavenge_visitor.Visit(object_and_size.first);
done = false;
if (delegate && ((++objects % kInterruptThreshold) == 0)) {
if (!copied_list_local_.IsLocalEmpty()) {
if (!copied_list_local_.IsEmpty()) {
delegate->NotifyConcurrencyIncrease();
}
}
......
......@@ -20,10 +20,10 @@ namespace heap {
void PublishSegment(MarkingWorklist* worklist, HeapObject object) {
MarkingWorklist::Local local(worklist);
for (size_t i = 0; i < MarkingWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i <= MarkingWorklist::kSegmentSize; i++) {
local.Push(object);
}
local.Publish();
CHECK(local.Pop(&object));
}
TEST(ConcurrentMarking) {
......
......@@ -11,90 +11,82 @@ namespace base {
class SomeObject {};
constexpr size_t kMinSegmentSize = 64;
using TestWorklist = Worklist<SomeObject*, kMinSegmentSize>;
using Segment = TestWorklist::Segment;
auto CreateTemporarySegment(size_t min_segment_size) {
return std::unique_ptr<Segment, void (*)(Segment*)>(
Segment::Create(min_segment_size),
[](Segment* s) { Segment::Delete(s); });
}
using TestWorklist = Worklist<SomeObject*, 64>;
TEST(WorkListTest, SegmentCreate) {
auto segment = CreateTemporarySegment(kMinSegmentSize);
EXPECT_TRUE(segment->IsEmpty());
EXPECT_EQ(0u, segment->Size());
EXPECT_FALSE(segment->IsFull());
TestWorklist::Segment segment;
EXPECT_TRUE(segment.IsEmpty());
EXPECT_EQ(0u, segment.Size());
EXPECT_FALSE(segment.IsFull());
}
TEST(WorkListTest, SegmentPush) {
auto segment = CreateTemporarySegment(kMinSegmentSize);
EXPECT_EQ(0u, segment->Size());
segment->Push(nullptr);
EXPECT_EQ(1u, segment->Size());
TestWorklist::Segment segment;
EXPECT_EQ(0u, segment.Size());
segment.Push(nullptr);
EXPECT_EQ(1u, segment.Size());
}
TEST(WorkListTest, SegmentPushPop) {
auto segment = CreateTemporarySegment(kMinSegmentSize);
segment->Push(nullptr);
EXPECT_EQ(1u, segment->Size());
TestWorklist::Segment segment;
segment.Push(nullptr);
EXPECT_EQ(1u, segment.Size());
SomeObject dummy;
SomeObject* object = &dummy;
segment->Pop(&object);
EXPECT_EQ(0u, segment->Size());
segment.Pop(&object);
EXPECT_EQ(0u, segment.Size());
EXPECT_EQ(nullptr, object);
}
TEST(WorkListTest, SegmentIsEmpty) {
auto segment = CreateTemporarySegment(kMinSegmentSize);
EXPECT_TRUE(segment->IsEmpty());
segment->Push(nullptr);
EXPECT_FALSE(segment->IsEmpty());
TestWorklist::Segment segment;
EXPECT_TRUE(segment.IsEmpty());
segment.Push(nullptr);
EXPECT_FALSE(segment.IsEmpty());
}
TEST(WorkListTest, SegmentIsFull) {
auto segment = CreateTemporarySegment(kMinSegmentSize);
EXPECT_FALSE(segment->IsFull());
for (size_t i = 0; i < segment->Capacity(); i++) {
segment->Push(nullptr);
TestWorklist::Segment segment;
EXPECT_FALSE(segment.IsFull());
for (size_t i = 0; i < TestWorklist::Segment::kSize; i++) {
segment.Push(nullptr);
}
EXPECT_TRUE(segment->IsFull());
EXPECT_TRUE(segment.IsFull());
}
TEST(WorkListTest, SegmentClear) {
auto segment = CreateTemporarySegment(kMinSegmentSize);
segment->Push(nullptr);
EXPECT_FALSE(segment->IsEmpty());
segment->Clear();
EXPECT_TRUE(segment->IsEmpty());
for (size_t i = 0; i < segment->Capacity(); i++) {
segment->Push(nullptr);
TestWorklist::Segment segment;
segment.Push(nullptr);
EXPECT_FALSE(segment.IsEmpty());
segment.Clear();
EXPECT_TRUE(segment.IsEmpty());
for (size_t i = 0; i < TestWorklist::Segment::kSize; i++) {
segment.Push(nullptr);
}
}
TEST(WorkListTest, SegmentUpdateFalse) {
auto segment = CreateTemporarySegment(kMinSegmentSize);
TestWorklist::Segment segment;
SomeObject* object;
object = reinterpret_cast<SomeObject*>(&object);
segment->Push(object);
segment->Update([](SomeObject* object, SomeObject** out) { return false; });
EXPECT_TRUE(segment->IsEmpty());
segment.Push(object);
segment.Update([](SomeObject* object, SomeObject** out) { return false; });
EXPECT_TRUE(segment.IsEmpty());
}
TEST(WorkListTest, SegmentUpdate) {
auto segment = CreateTemporarySegment(kMinSegmentSize);
TestWorklist::Segment segment;
SomeObject* objectA;
objectA = reinterpret_cast<SomeObject*>(&objectA);
SomeObject* objectB;
objectB = reinterpret_cast<SomeObject*>(&objectB);
segment->Push(objectA);
segment->Update([objectB](SomeObject* object, SomeObject** out) {
segment.Push(objectA);
segment.Update([objectB](SomeObject* object, SomeObject** out) {
*out = objectB;
return true;
});
SomeObject* object;
segment->Pop(&object);
segment.Pop(&object);
EXPECT_EQ(object, objectB);
}
......@@ -139,22 +131,22 @@ TEST(WorkListTest, LocalClear) {
SomeObject* object;
object = reinterpret_cast<SomeObject*>(&object);
// Check push segment:
EXPECT_TRUE(worklist_local.IsLocalEmpty());
EXPECT_TRUE(worklist_local.IsEmpty());
worklist_local.Push(object);
EXPECT_FALSE(worklist_local.IsLocalEmpty());
EXPECT_FALSE(worklist_local.IsEmpty());
worklist_local.Clear();
EXPECT_TRUE(worklist_local.IsLocalEmpty());
EXPECT_TRUE(worklist_local.IsEmpty());
// Check pop segment:
worklist_local.Push(object);
worklist_local.Push(object);
EXPECT_FALSE(worklist_local.IsLocalEmpty());
EXPECT_FALSE(worklist_local.IsEmpty());
worklist_local.Publish();
EXPECT_TRUE(worklist_local.IsLocalEmpty());
EXPECT_TRUE(worklist_local.IsEmpty());
SomeObject* retrieved;
worklist_local.Pop(&retrieved);
EXPECT_FALSE(worklist_local.IsLocalEmpty());
EXPECT_FALSE(worklist_local.IsEmpty());
worklist_local.Clear();
EXPECT_TRUE(worklist_local.IsLocalEmpty());
EXPECT_TRUE(worklist_local.IsEmpty());
}
TEST(WorkListTest, GlobalUpdateNull) {
......@@ -162,7 +154,7 @@ TEST(WorkListTest, GlobalUpdateNull) {
TestWorklist::Local worklist_local(&worklist);
SomeObject* object;
object = reinterpret_cast<SomeObject*>(&object);
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
worklist_local.Push(object);
}
worklist_local.Push(object);
......@@ -181,10 +173,10 @@ TEST(WorkListTest, GlobalUpdate) {
objectB = reinterpret_cast<SomeObject*>(&objectB);
SomeObject* objectC = nullptr;
objectC = reinterpret_cast<SomeObject*>(&objectC);
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
worklist_local.Push(objectA);
}
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
worklist_local.Push(objectB);
}
worklist_local.Push(objectA);
......@@ -196,7 +188,7 @@ TEST(WorkListTest, GlobalUpdate) {
}
return false;
});
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
SomeObject* object;
EXPECT_TRUE(worklist_local.Pop(&object));
EXPECT_EQ(object, objectC);
......@@ -249,14 +241,17 @@ TEST(WorkListTest, SingleSegmentSteal) {
TestWorklist::Local worklist_local1(&worklist);
TestWorklist::Local worklist_local2(&worklist);
SomeObject dummy;
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
worklist_local1.Push(&dummy);
}
worklist_local1.Publish();
SomeObject* retrieved = nullptr;
// One more push/pop to publish the full segment.
worklist_local1.Push(nullptr);
EXPECT_TRUE(worklist_local1.Pop(&retrieved));
EXPECT_EQ(nullptr, retrieved);
EXPECT_EQ(1U, worklist.Size());
// Stealing.
SomeObject* retrieved = nullptr;
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
EXPECT_TRUE(worklist_local2.Pop(&retrieved));
EXPECT_EQ(&dummy, retrieved);
EXPECT_FALSE(worklist_local1.Pop(&retrieved));
......@@ -272,17 +267,20 @@ TEST(WorkListTest, MultipleSegmentsStolen) {
TestWorklist::Local worklist_local3(&worklist);
SomeObject dummy1;
SomeObject dummy2;
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
worklist_local1.Push(&dummy1);
}
worklist_local1.Publish();
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
worklist_local1.Push(&dummy2);
}
worklist_local1.Publish();
SomeObject* retrieved = nullptr;
SomeObject dummy3;
// One more push/pop to publish the full segment.
worklist_local1.Push(&dummy3);
EXPECT_TRUE(worklist_local1.Pop(&retrieved));
EXPECT_EQ(&dummy3, retrieved);
EXPECT_EQ(2U, worklist.Size());
// Stealing.
SomeObject* retrieved = nullptr;
EXPECT_TRUE(worklist_local2.Pop(&retrieved));
SomeObject* const expect_bag2 = retrieved;
EXPECT_TRUE(worklist_local3.Pop(&retrieved));
......@@ -291,12 +289,12 @@ TEST(WorkListTest, MultipleSegmentsStolen) {
EXPECT_NE(expect_bag2, expect_bag3);
EXPECT_TRUE(expect_bag2 == &dummy1 || expect_bag2 == &dummy2);
EXPECT_TRUE(expect_bag3 == &dummy1 || expect_bag3 == &dummy2);
for (size_t i = 1; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 1; i < TestWorklist::kSegmentSize; i++) {
EXPECT_TRUE(worklist_local2.Pop(&retrieved));
EXPECT_EQ(expect_bag2, retrieved);
EXPECT_FALSE(worklist_local1.Pop(&retrieved));
}
for (size_t i = 1; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 1; i < TestWorklist::kSegmentSize; i++) {
EXPECT_TRUE(worklist_local3.Pop(&retrieved));
EXPECT_EQ(expect_bag3, retrieved);
EXPECT_FALSE(worklist_local1.Pop(&retrieved));
......@@ -308,11 +306,15 @@ TEST(WorkListTest, MergeGlobalPool) {
TestWorklist worklist1;
TestWorklist::Local worklist_local1(&worklist1);
SomeObject dummy;
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
worklist_local1.Push(&dummy);
}
SomeObject* retrieved = nullptr;
// One more push/pop to publish the full segment.
worklist_local1.Publish();
worklist_local1.Push(nullptr);
EXPECT_TRUE(worklist_local1.Pop(&retrieved));
EXPECT_EQ(nullptr, retrieved);
EXPECT_EQ(1U, worklist1.Size());
// Merging global pool into a new Worklist.
TestWorklist worklist2;
TestWorklist::Local worklist_local2(&worklist2);
......@@ -320,8 +322,7 @@ TEST(WorkListTest, MergeGlobalPool) {
worklist2.Merge(&worklist1);
EXPECT_EQ(1U, worklist2.Size());
EXPECT_FALSE(worklist2.IsEmpty());
SomeObject* retrieved = nullptr;
for (size_t i = 0; i < TestWorklist::kMinSegmentSizeForTesting; i++) {
for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
EXPECT_TRUE(worklist_local2.Pop(&retrieved));
EXPECT_EQ(&dummy, retrieved);
EXPECT_FALSE(worklist_local1.Pop(&retrieved));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment