Commit 2c7e4f21 authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[heap] Make most Space data members private

Makes all but one data member private instead of protected and replaces
all Space::heap_ accesses with Space::heap().

Also moves Executability down from Space into PagedSpace and remove all
references in SemiSpace since it's always initialized with
NOT_EXECUTABLE.

Bug: v8:7754
Change-Id: Ic03ce35a5f970b3c1e25b32da53e4c9717b2ee1e
Reviewed-on: https://chromium-review.googlesource.com/1055510Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dan Elphick <delphick@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53146}
parent a55117d7
......@@ -139,7 +139,7 @@ bool NewSpace::ToSpaceContains(Object* o) { return to_space_.Contains(o); }
bool NewSpace::FromSpaceContains(Object* o) { return from_space_.Contains(o); }
bool PagedSpace::Contains(Address addr) {
if (heap_->lo_space()->FindPage(addr)) return false;
if (heap()->lo_space()->FindPage(addr)) return false;
return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this;
}
......
......@@ -1464,7 +1464,7 @@ intptr_t Space::GetNextInlineAllocationStepSize() {
PagedSpace::PagedSpace(Heap* heap, AllocationSpace space,
Executability executable)
: SpaceWithLinearArea(heap, space, executable), anchor_(this) {
: SpaceWithLinearArea(heap, space), executable_(executable), anchor_(this) {
area_size_ = MemoryAllocator::PageAreaSize(space);
accounting_stats_.Clear();
}
......@@ -1794,7 +1794,7 @@ void PagedSpace::FreeLinearAllocationArea() {
// The code page of the linear allocation area needs to be unprotected
// because we are going to write a filler into that memory area below.
if (identity() == CODE_SPACE) {
heap_->UnprotectAndRegisterMemoryChunk(
heap()->UnprotectAndRegisterMemoryChunk(
MemoryChunk::FromAddress(current_top));
}
Free(current_top, current_limit - current_top,
......@@ -1828,7 +1828,7 @@ void PagedSpace::ReleasePage(Page* page) {
void PagedSpace::SetReadAndExecutable() {
DCHECK(identity() == CODE_SPACE);
for (Page* page : *this) {
CHECK(heap_->memory_allocator()->IsMemoryChunkExecutable(page));
CHECK(heap()->memory_allocator()->IsMemoryChunkExecutable(page));
page->SetReadAndExecutable();
}
}
......@@ -1836,7 +1836,7 @@ void PagedSpace::SetReadAndExecutable() {
void PagedSpace::SetReadAndWritable() {
DCHECK(identity() == CODE_SPACE);
for (Page* page : *this) {
CHECK(heap_->memory_allocator()->IsMemoryChunkExecutable(page));
CHECK(heap()->memory_allocator()->IsMemoryChunkExecutable(page));
page->SetReadAndWritable();
}
}
......@@ -1890,7 +1890,7 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) {
DCHECK_LE(size_in_bytes, limit - start);
if (limit != end) {
if (identity() == CODE_SPACE) {
heap_->UnprotectAndRegisterMemoryChunk(page);
heap()->UnprotectAndRegisterMemoryChunk(page);
}
Free(limit, end - limit, SpaceAccountingMode::kSpaceAccounted);
}
......@@ -2124,7 +2124,7 @@ bool SemiSpace::EnsureCurrentCapacity() {
actual_pages++;
current_page =
heap()->memory_allocator()->AllocatePage<MemoryAllocator::kPooled>(
Page::kAllocatableMemory, this, executable());
Page::kAllocatableMemory, this, NOT_EXECUTABLE);
if (current_page == nullptr) return false;
DCHECK_NOT_NULL(current_page);
current_page->InsertAfter(anchor());
......@@ -2440,7 +2440,7 @@ bool SemiSpace::Commit() {
for (int pages_added = 0; pages_added < num_pages; pages_added++) {
Page* new_page =
heap()->memory_allocator()->AllocatePage<MemoryAllocator::kPooled>(
Page::kAllocatableMemory, this, executable());
Page::kAllocatableMemory, this, NOT_EXECUTABLE);
if (new_page == nullptr) {
RewindPages(current, pages_added);
return false;
......@@ -2499,7 +2499,7 @@ bool SemiSpace::GrowTo(size_t new_capacity) {
for (int pages_added = 0; pages_added < delta_pages; pages_added++) {
Page* new_page =
heap()->memory_allocator()->AllocatePage<MemoryAllocator::kPooled>(
Page::kAllocatableMemory, this, executable());
Page::kAllocatableMemory, this, NOT_EXECUTABLE);
if (new_page == nullptr) {
RewindPages(last_page, pages_added);
return false;
......@@ -3272,7 +3272,7 @@ HeapObject* LargeObjectIterator::Next() {
// LargeObjectSpace
LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
: Space(heap, id, NOT_EXECUTABLE), // Managed on a per-allocation basis
: Space(heap, id), // Managed on a per-allocation basis
first_page_(nullptr),
size_(0),
page_count_(0),
......
......@@ -902,11 +902,10 @@ class LargePage : public MemoryChunk {
// Space is the abstract superclass for all allocation spaces.
class Space : public Malloced {
public:
Space(Heap* heap, AllocationSpace id, Executability executable)
Space(Heap* heap, AllocationSpace id)
: allocation_observers_paused_(false),
heap_(heap),
id_(id),
executable_(executable),
committed_(0),
max_committed_(0) {}
......@@ -914,9 +913,6 @@ class Space : public Malloced {
Heap* heap() const { return heap_; }
// Does the space need executable memory?
Executability executable() { return executable_; }
// Identity used in error reporting.
AllocationSpace identity() { return id_; }
......@@ -989,12 +985,11 @@ class Space : public Malloced {
}
std::vector<AllocationObserver*> allocation_observers_;
bool allocation_observers_paused_;
protected:
private:
bool allocation_observers_paused_;
Heap* heap_;
AllocationSpace id_;
Executability executable_;
// Keeps track of committed memory in a space.
size_t committed_;
......@@ -1976,8 +1971,8 @@ class LocalAllocationBuffer {
class SpaceWithLinearArea : public Space {
public:
SpaceWithLinearArea(Heap* heap, AllocationSpace id, Executability executable)
: Space(heap, id, executable), top_on_previous_step_(0) {
SpaceWithLinearArea(Heap* heap, AllocationSpace id)
: Space(heap, id), top_on_previous_step_(0) {
allocation_info_.Reset(kNullAddress, kNullAddress);
}
......@@ -2057,6 +2052,9 @@ class V8_EXPORT_PRIVATE PagedSpace
inline bool Contains(Object* o);
bool ContainsSlow(Address addr);
// Does the space need executable memory?
Executability executable() { return executable_; }
// During boot the free_space_map is created, and afterwards we may need
// to write it into the free list nodes that were already created.
void RepairFreeListsAfterDeserialization();
......@@ -2123,7 +2121,7 @@ class V8_EXPORT_PRIVATE PagedSpace
size_t Free(Address start, size_t size_in_bytes, SpaceAccountingMode mode) {
if (size_in_bytes == 0) return 0;
heap_->CreateFillerObjectAt(start, static_cast<int>(size_in_bytes),
heap()->CreateFillerObjectAt(start, static_cast<int>(size_in_bytes),
ClearRecordedSlots::kNo);
if (mode == SpaceAccountingMode::kSpaceAccounted) {
return AccountedFree(start, size_in_bytes);
......@@ -2318,6 +2316,8 @@ class V8_EXPORT_PRIVATE PagedSpace
V8_WARN_UNUSED_RESULT bool RawSlowRefillLinearAllocationArea(
int size_in_bytes);
Executability executable_;
size_t area_size_;
// Accounting information for this space.
......@@ -2354,7 +2354,7 @@ class SemiSpace : public Space {
static void Swap(SemiSpace* from, SemiSpace* to);
SemiSpace(Heap* heap, SemiSpaceId semispace)
: Space(heap, NEW_SPACE, NOT_EXECUTABLE),
: Space(heap, NEW_SPACE),
current_capacity_(0),
maximum_capacity_(0),
minimum_capacity_(0),
......@@ -2547,7 +2547,7 @@ class NewSpace : public SpaceWithLinearArea {
typedef PageIterator iterator;
explicit NewSpace(Heap* heap)
: SpaceWithLinearArea(heap, NEW_SPACE, NOT_EXECUTABLE),
: SpaceWithLinearArea(heap, NEW_SPACE),
to_space_(heap, kToSpace),
from_space_(heap, kFromSpace),
reservation_() {}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment