Commit 1a47c1e9 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

cppgc: Cleanup around FreeList:Add

Bug: v8:12295
Change-Id: Ibf18c936215e892edd5009dc59560988453e1203
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3218063Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77408}
parent 5ff309de
...@@ -25,6 +25,9 @@ uint32_t BucketIndexForSize(uint32_t size) { ...@@ -25,6 +25,9 @@ uint32_t BucketIndexForSize(uint32_t size) {
class FreeList::Entry : public HeapObjectHeader { class FreeList::Entry : public HeapObjectHeader {
public: public:
static Entry& CreateAt(void* memory, size_t size) { static Entry& CreateAt(void* memory, size_t size) {
// Make sure the freelist header is writable. SET_MEMORY_ACCESSIBLE is not
// needed as we write the whole payload of Entry.
ASAN_UNPOISON_MEMORY_REGION(memory, sizeof(Entry));
return *new (memory) Entry(size); return *new (memory) Entry(size);
} }
...@@ -64,7 +67,9 @@ FreeList& FreeList::operator=(FreeList&& other) V8_NOEXCEPT { ...@@ -64,7 +67,9 @@ FreeList& FreeList::operator=(FreeList&& other) V8_NOEXCEPT {
return *this; return *this;
} }
Address FreeList::Add(FreeList::Block block) { void FreeList::Add(FreeList::Block block) { AddReturningUnusedBounds(block); }
std::pair<Address, Address> FreeList::AddReturningUnusedBounds(Block block) {
const size_t size = block.size; const size_t size = block.size;
DCHECK_GT(kPageSize, size); DCHECK_GT(kPageSize, size);
DCHECK_LE(sizeof(HeapObjectHeader), size); DCHECK_LE(sizeof(HeapObjectHeader), size);
...@@ -79,12 +84,11 @@ Address FreeList::Add(FreeList::Block block) { ...@@ -79,12 +84,11 @@ Address FreeList::Add(FreeList::Block block) {
USE(filler); USE(filler);
DCHECK_EQ(reinterpret_cast<Address>(block.address) + size, DCHECK_EQ(reinterpret_cast<Address>(block.address) + size,
filler.ObjectEnd()); filler.ObjectEnd());
return reinterpret_cast<Address>(block.address) + size; DCHECK_EQ(reinterpret_cast<Address>(&filler + 1), filler.ObjectEnd());
return {reinterpret_cast<Address>(&filler + 1),
reinterpret_cast<Address>(&filler + 1)};
} }
// Make sure the freelist header is writable. SET_MEMORY_ACCESSIBLE is not
// needed as we write the whole payload of Entry.
ASAN_UNPOISON_MEMORY_REGION(block.address, sizeof(Entry));
Entry& entry = Entry::CreateAt(block.address, size); Entry& entry = Entry::CreateAt(block.address, size);
const size_t index = BucketIndexForSize(static_cast<uint32_t>(size)); const size_t index = BucketIndexForSize(static_cast<uint32_t>(size));
entry.Link(&free_list_heads_[index]); entry.Link(&free_list_heads_[index]);
...@@ -92,7 +96,9 @@ Address FreeList::Add(FreeList::Block block) { ...@@ -92,7 +96,9 @@ Address FreeList::Add(FreeList::Block block) {
if (!entry.Next()) { if (!entry.Next()) {
free_list_tails_[index] = &entry; free_list_tails_[index] = &entry;
} }
return reinterpret_cast<Address>(block.address) + sizeof(Entry); DCHECK_EQ(entry.ObjectEnd(), reinterpret_cast<Address>(&entry) + size);
return {reinterpret_cast<Address>(&entry + 1),
reinterpret_cast<Address>(&entry) + size};
} }
void FreeList::Append(FreeList&& other) { void FreeList::Append(FreeList&& other) {
......
...@@ -18,12 +18,7 @@ namespace internal { ...@@ -18,12 +18,7 @@ namespace internal {
class Filler : public HeapObjectHeader { class Filler : public HeapObjectHeader {
public: public:
static Filler& CreateAt(void* memory, size_t size) { inline static Filler& CreateAt(void* memory, size_t size);
// The memory area only needs to unpoisoned when running with ASAN. Zapped
// values (DEBUG) or uninitialized values (MSAN) are overwritten below.
ASAN_UNPOISON_MEMORY_REGION(memory, sizeof(Filler));
return *new (memory) Filler(size);
}
protected: protected:
explicit Filler(size_t size) : HeapObjectHeader(size, kFreeListGCInfoIndex) {} explicit Filler(size_t size) : HeapObjectHeader(size, kFreeListGCInfoIndex) {}
...@@ -47,10 +42,12 @@ class V8_EXPORT_PRIVATE FreeList { ...@@ -47,10 +42,12 @@ class V8_EXPORT_PRIVATE FreeList {
// Allocates entries which are at least of the provided size. // Allocates entries which are at least of the provided size.
Block Allocate(size_t); Block Allocate(size_t);
// Adds block to the freelist. The minimal block size is two words. // Adds block to the freelist. The minimal block size is a words. Regular
// Returns the start of the free list payload that will not be accessed by // entries have two words and unusable filler entries have a single word.
// the free list itself. void Add(Block);
Address Add(Block); // Same as `Add()` but also returns the bounds of memory that is not required
// for free list management.
std::pair<Address, Address> AddReturningUnusedBounds(Block);
// Append other freelist into this. // Append other freelist into this.
void Append(FreeList&&); void Append(FreeList&&);
...@@ -75,6 +72,14 @@ class V8_EXPORT_PRIVATE FreeList { ...@@ -75,6 +72,14 @@ class V8_EXPORT_PRIVATE FreeList {
size_t biggest_free_list_index_ = 0; size_t biggest_free_list_index_ = 0;
}; };
// static
Filler& Filler::CreateAt(void* memory, size_t size) {
// The memory area only needs to unpoisoned when running with ASAN. Zapped
// values (DEBUG) or uninitialized values (MSAN) are overwritten below.
ASAN_UNPOISON_MEMORY_REGION(memory, sizeof(Filler));
return *new (memory) Filler(size);
}
} // namespace internal } // namespace internal
} // namespace cppgc } // namespace cppgc
......
...@@ -77,11 +77,12 @@ class DiscardingFreeHandler : public FreeHandlerBase { ...@@ -77,11 +77,12 @@ class DiscardingFreeHandler : public FreeHandlerBase {
: page_allocator_(page_allocator), free_list_(free_list), page_(page) {} : page_allocator_(page_allocator), free_list_(free_list), page_(page) {}
void Free(FreeList::Block block) { void Free(FreeList::Block block) {
const auto unused_range = free_list_.AddReturningUnusedBounds(block);
const uintptr_t aligned_begin_unused = const uintptr_t aligned_begin_unused =
RoundUp(reinterpret_cast<uintptr_t>(free_list_.Add(block)), RoundUp(reinterpret_cast<uintptr_t>(unused_range.first),
page_allocator_.CommitPageSize()); page_allocator_.CommitPageSize());
const uintptr_t aligned_end_unused = const uintptr_t aligned_end_unused =
RoundDown(reinterpret_cast<uintptr_t>(block.address) + block.size, RoundDown(reinterpret_cast<uintptr_t>(unused_range.second),
page_allocator_.CommitPageSize()); page_allocator_.CommitPageSize());
if (aligned_begin_unused < aligned_end_unused) { if (aligned_begin_unused < aligned_end_unused) {
const size_t discarded_size = aligned_end_unused - aligned_begin_unused; const size_t discarded_size = aligned_end_unused - aligned_begin_unused;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment