Commit af1bffd3 authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

[heap] Move FreeListCategory memory out of the page header.

Bug: chromium:774108
Change-Id: I5345fed261862b0e20356ec4579b16cdf0ea58a6
Reviewed-on: https://chromium-review.googlesource.com/899148
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51606}
parent 15bf3ae5
......@@ -138,12 +138,6 @@ bool NewSpace::FromSpaceContainsSlow(Address a) {
bool NewSpace::ToSpaceContains(Object* o) { return to_space_.Contains(o); }
bool NewSpace::FromSpaceContains(Object* o) { return from_space_.Contains(o); }
void MemoryChunk::InitializeFreeListCategories() {
for (int i = kFirstCategory; i < kNumberOfCategories; i++) {
categories_[i].Initialize(static_cast<FreeListCategoryType>(i));
}
}
bool PagedSpace::Contains(Address addr) {
if (heap_->lo_space()->FindPage(addr)) return false;
return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this;
......@@ -158,6 +152,7 @@ void PagedSpace::UnlinkFreeListCategories(Page* page) {
DCHECK_EQ(this, page->owner());
page->ForAllFreeListCategories([this](FreeListCategory* category) {
DCHECK_EQ(free_list(), category->owner());
category->set_free_list(nullptr);
free_list()->RemoveCategory(category);
});
}
......@@ -165,7 +160,8 @@ void PagedSpace::UnlinkFreeListCategories(Page* page) {
size_t PagedSpace::RelinkFreeListCategories(Page* page) {
DCHECK_EQ(this, page->owner());
size_t added = 0;
page->ForAllFreeListCategories([&added](FreeListCategory* category) {
page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
category->set_free_list(&free_list_);
added += category->available();
category->Relink();
});
......@@ -257,23 +253,14 @@ MemoryChunk* MemoryChunkIterator::next() {
UNREACHABLE();
}
Page* FreeListCategory::page() const {
return Page::FromAddress(
reinterpret_cast<Address>(const_cast<FreeListCategory*>(this)));
}
Page* FreeList::GetPageForCategoryType(FreeListCategoryType type) {
return top(type) ? top(type)->page() : nullptr;
}
FreeList* FreeListCategory::owner() {
return reinterpret_cast<PagedSpace*>(
Page::FromAddress(reinterpret_cast<Address>(this))->owner())
->free_list();
}
FreeList* FreeListCategory::owner() { return free_list_; }
bool FreeListCategory::is_linked() {
return prev_ != nullptr || next_ != nullptr || owner()->top(type_) == this;
return prev_ != nullptr || next_ != nullptr;
}
AllocationResult LocalAllocationBuffer::AllocateRawAligned(
......
......@@ -626,7 +626,10 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
chunk->InitializeFreeListCategories();
for (int i = kFirstCategory; i < kNumberOfCategories; i++) {
chunk->categories_[i] = nullptr;
}
heap->incremental_marking()->non_atomic_marking_state()->ClearLiveness(chunk);
......@@ -649,6 +652,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
if (reservation != nullptr) {
chunk->reservation_.TakeControl(reservation);
}
return chunk;
}
......@@ -658,6 +662,8 @@ Page* PagedSpace::InitializePage(MemoryChunk* chunk, Executability executable) {
// Make sure that categories are initialized before freeing the area.
page->ResetAllocatedBytes();
heap()->incremental_marking()->SetOldSpacePageFlags(page);
page->AllocateFreeListCategories();
page->InitializeFreeListCategories();
page->InitializationMemoryFence();
return page;
}
......@@ -705,6 +711,28 @@ LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk,
return page;
}
void Page::AllocateFreeListCategories() {
for (int i = kFirstCategory; i < kNumberOfCategories; i++) {
categories_[i] = new FreeListCategory(
reinterpret_cast<PagedSpace*>(owner())->free_list(), this);
}
}
void Page::InitializeFreeListCategories() {
for (int i = kFirstCategory; i < kNumberOfCategories; i++) {
categories_[i]->Initialize(static_cast<FreeListCategoryType>(i));
}
}
void Page::ReleaseFreeListCategories() {
for (int i = kFirstCategory; i < kNumberOfCategories; i++) {
if (categories_[i] != nullptr) {
delete categories_[i];
categories_[i] = nullptr;
}
}
}
Page* Page::ConvertNewToOld(Page* old_page) {
DCHECK(!old_page->is_anchor());
DCHECK(old_page->InNewSpace());
......@@ -722,6 +750,10 @@ size_t MemoryChunk::CommittedPhysicalMemory() {
return high_water_mark_.Value();
}
bool MemoryChunk::IsPagedSpace() const {
return owner()->identity() != LO_SPACE;
}
void MemoryChunk::InsertAfter(MemoryChunk* other) {
MemoryChunk* other_next = other->next_chunk();
......@@ -875,7 +907,6 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
executable, owner, &reservation);
if (chunk->executable()) RegisterExecutableMemoryChunk(chunk);
return chunk;
}
......@@ -1249,6 +1280,11 @@ void MemoryChunk::ReleaseAllocatedMemory() {
ReleaseInvalidatedSlots();
if (local_tracker_ != nullptr) ReleaseLocalTracker();
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
if (IsPagedSpace()) {
Page* page = static_cast<Page*>(this);
page->ReleaseFreeListCategories();
}
}
static SlotSet* AllocateAndInitializeSlotSet(size_t size, Address page_start) {
......@@ -2680,7 +2716,6 @@ void FreeListCategory::Reset() {
FreeSpace* FreeListCategory::PickNodeFromList(size_t* node_size) {
DCHECK(page()->CanAllocate());
FreeSpace* node = top();
if (node == nullptr) return nullptr;
set_top(node->next());
......@@ -2692,7 +2727,6 @@ FreeSpace* FreeListCategory::PickNodeFromList(size_t* node_size) {
FreeSpace* FreeListCategory::TryPickNodeFromList(size_t minimum_size,
size_t* node_size) {
DCHECK(page()->CanAllocate());
FreeSpace* node = PickNodeFromList(node_size);
if ((node != nullptr) && (*node_size < minimum_size)) {
Free(node->address(), *node_size, kLinkCategory);
......@@ -2705,7 +2739,6 @@ FreeSpace* FreeListCategory::TryPickNodeFromList(size_t minimum_size,
FreeSpace* FreeListCategory::SearchForNodeInList(size_t minimum_size,
size_t* node_size) {
DCHECK(page()->CanAllocate());
FreeSpace* prev_non_evac_node = nullptr;
for (FreeSpace* cur_node = top(); cur_node != nullptr;
cur_node = cur_node->next()) {
......@@ -2730,7 +2763,7 @@ FreeSpace* FreeListCategory::SearchForNodeInList(size_t minimum_size,
void FreeListCategory::Free(Address start, size_t size_in_bytes,
FreeMode mode) {
CHECK(page()->CanAllocate());
DCHECK(page()->CanAllocate());
FreeSpace* free_space = FreeSpace::cast(HeapObject::FromAddress(start));
free_space->set_next(top());
set_top(free_space);
......
......@@ -150,15 +150,10 @@ enum RememberedSetType {
// A free list category maintains a linked list of free memory blocks.
class FreeListCategory {
public:
static const int kSize = kIntSize + // FreeListCategoryType type_
kIntSize + // padding for type_
kSizetSize + // size_t available_
kPointerSize + // FreeSpace* top_
kPointerSize + // FreeListCategory* prev_
kPointerSize; // FreeListCategory* next_
FreeListCategory()
: type_(kInvalidCategory),
FreeListCategory(FreeList* free_list, Page* page)
: free_list_(free_list),
page_(page),
type_(kInvalidCategory),
available_(0),
top_(nullptr),
prev_(nullptr),
......@@ -198,11 +193,13 @@ class FreeListCategory {
FreeSpace* SearchForNodeInList(size_t minimum_size, size_t* node_size);
inline FreeList* owner();
inline Page* page() const;
inline Page* page() const { return page_; }
inline bool is_linked();
bool is_empty() { return top() == nullptr; }
size_t available() const { return available_; }
void set_free_list(FreeList* free_list) { free_list_ = free_list; }
#ifdef DEBUG
size_t SumFreeList();
int FreeListLength();
......@@ -220,6 +217,12 @@ class FreeListCategory {
FreeListCategory* next() { return next_; }
void set_next(FreeListCategory* next) { next_ = next; }
// This FreeListCategory is owned by the given free_list_.
FreeList* free_list_;
// This FreeListCategory holds free list entries of the given page_.
Page* const page_;
// |type_|: The type of this free list category.
FreeListCategoryType type_;
......@@ -235,6 +238,8 @@ class FreeListCategory {
friend class FreeList;
friend class PagedSpace;
DISALLOW_IMPLICIT_CONSTRUCTORS(FreeListCategory);
};
// MemoryChunk represents a memory region owned by a specific space.
......@@ -372,7 +377,7 @@ class MemoryChunk {
+ kSizetSize // size_t wasted_memory_
+ kPointerSize // AtomicValue next_chunk_
+ kPointerSize // AtomicValue prev_chunk_
+ FreeListCategory::kSize * kNumberOfCategories
+ kPointerSize * kNumberOfCategories
// FreeListCategory categories_[kNumberOfCategories]
+ kPointerSize // LocalArrayBufferTracker* local_tracker_
+ kIntptrSize // intptr_t young_generation_live_byte_count_
......@@ -612,6 +617,8 @@ class MemoryChunk {
void set_owner(Space* space) { owner_.SetValue(space); }
bool IsPagedSpace() const;
void InsertAfter(MemoryChunk* other);
void Unlink();
......@@ -622,8 +629,6 @@ class MemoryChunk {
void SetReadAndExecutable();
void SetReadAndWritable();
inline void InitializeFreeListCategories();
protected:
static MemoryChunk* Initialize(Heap* heap, Address base, size_t size,
Address area_start, Address area_end,
......@@ -701,7 +706,7 @@ class MemoryChunk {
// prev_chunk_ holds a pointer of type MemoryChunk
base::AtomicValue<MemoryChunk*> prev_chunk_;
FreeListCategory categories_[kNumberOfCategories];
FreeListCategory* categories_[kNumberOfCategories];
LocalArrayBufferTracker* local_tracker_;
......@@ -790,7 +795,7 @@ class Page : public MemoryChunk {
template <typename Callback>
inline void ForAllFreeListCategories(Callback callback) {
for (int i = kFirstCategory; i < kNumberOfCategories; i++) {
callback(&categories_[i]);
callback(categories_[i]);
}
}
......@@ -822,7 +827,7 @@ class Page : public MemoryChunk {
}
FreeListCategory* free_list_category(FreeListCategoryType type) {
return &categories_[type];
return categories_[type];
}
bool is_anchor() { return IsFlagSet(Page::ANCHOR); }
......@@ -847,6 +852,10 @@ class Page : public MemoryChunk {
V8_EXPORT_PRIVATE void CreateBlackArea(Address start, Address end);
void DestroyBlackArea(Address start, Address end);
void InitializeFreeListCategories();
void AllocateFreeListCategories();
void ReleaseFreeListCategories();
#ifdef DEBUG
void Print();
#endif // DEBUG
......@@ -1179,8 +1188,7 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
}
void AddMemoryChunkSafe(MemoryChunk* chunk) {
if ((chunk->size() == Page::kPageSize) &&
(chunk->executable() != EXECUTABLE)) {
if (chunk->IsPagedSpace() && chunk->executable() != EXECUTABLE) {
AddMemoryChunkSafe<kRegular>(chunk);
} else {
AddMemoryChunkSafe<kNonRegular>(chunk);
......
......@@ -80,12 +80,10 @@ class TestCodeRangeScope {
DISALLOW_COPY_AND_ASSIGN(TestCodeRangeScope);
};
static void VerifyMemoryChunk(Isolate* isolate,
Heap* heap,
CodeRange* code_range,
size_t reserve_area_size,
size_t commit_area_size,
Executability executable) {
static void VerifyMemoryChunk(Isolate* isolate, Heap* heap,
CodeRange* code_range, size_t reserve_area_size,
size_t commit_area_size, Executability executable,
Space* space) {
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), 0));
{
......@@ -99,7 +97,7 @@ static void VerifyMemoryChunk(Isolate* isolate,
(executable == EXECUTABLE) ? MemoryAllocator::CodePageGuardSize() : 0;
MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(
reserve_area_size, commit_area_size, executable, nullptr);
reserve_area_size, commit_area_size, executable, space);
size_t alignment = code_range != nullptr && code_range->valid()
? MemoryChunk::kAlignment
: CommitPageSize();
......@@ -178,36 +176,22 @@ TEST(MemoryChunk) {
const size_t code_range_size = 32 * MB;
if (!code_range->SetUp(code_range_size)) return;
VerifyMemoryChunk(isolate,
heap,
code_range,
reserve_area_size,
initial_commit_area_size,
EXECUTABLE);
VerifyMemoryChunk(isolate,
heap,
code_range,
reserve_area_size,
initial_commit_area_size,
NOT_EXECUTABLE);
VerifyMemoryChunk(isolate, heap, code_range, reserve_area_size,
initial_commit_area_size, EXECUTABLE, heap->code_space());
VerifyMemoryChunk(isolate, heap, code_range, reserve_area_size,
initial_commit_area_size, NOT_EXECUTABLE,
heap->old_space());
delete code_range;
// Without a valid CodeRange, i.e., omitting SetUp.
code_range = new CodeRange(isolate);
VerifyMemoryChunk(isolate,
heap,
code_range,
reserve_area_size,
initial_commit_area_size,
EXECUTABLE);
VerifyMemoryChunk(isolate,
heap,
code_range,
reserve_area_size,
initial_commit_area_size,
NOT_EXECUTABLE);
VerifyMemoryChunk(isolate, heap, code_range, reserve_area_size,
initial_commit_area_size, EXECUTABLE, heap->code_space());
VerifyMemoryChunk(isolate, heap, code_range, reserve_area_size,
initial_commit_area_size, NOT_EXECUTABLE,
heap->old_space());
delete code_range;
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment