Commit 8211602c authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

[heap] Replace non-null pointers in Space with references

Bug: v8:12612
Change-Id: I4d9de4446d343040ae29e25d23a09cf4c740bde0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3743448
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Auto-Submit: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarToon Verwaest <verwaest@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81874}
parent 62b4d3c1
......@@ -4083,8 +4083,8 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
DCHECK(!heap_.HasBeenSetUp());
heap_.SetUp(main_thread_local_heap());
ReadOnlyHeap::SetUp(this, read_only_snapshot_data, can_rehash);
heap_.SetUpSpaces(&isolate_data_.new_allocation_info_,
&isolate_data_.old_allocation_info_);
heap_.SetUpSpaces(isolate_data_.new_allocation_info_,
isolate_data_.old_allocation_info_);
if (OwnsStringTables()) {
string_table_ = std::make_shared<StringTable>(this);
......
......@@ -5793,8 +5793,8 @@ class StressConcurrentAllocationObserver : public AllocationObserver {
Heap* heap_;
};
void Heap::SetUpSpaces(LinearAllocationArea* new_allocation_info,
LinearAllocationArea* old_allocation_info) {
void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
LinearAllocationArea& old_allocation_info) {
// Ensure SetUpFromReadOnlySpace has been ran.
DCHECK_NOT_NULL(read_only_space_);
const bool has_young_gen = !FLAG_single_generation && !IsShared();
......
......@@ -831,8 +831,8 @@ class Heap {
void ReplaceReadOnlySpace(SharedReadOnlySpace* shared_ro_space);
// Sets up the heap memory without creating any objects.
void SetUpSpaces(LinearAllocationArea* new_allocation_info,
LinearAllocationArea* old_allocation_info);
void SetUpSpaces(LinearAllocationArea& new_allocation_info,
LinearAllocationArea& old_allocation_info);
// Prepares the heap, setting up for deserialization.
void InitializeMainThreadLocalHeap(LocalHeap* main_thread_local_heap);
......
......@@ -95,7 +95,7 @@ HeapObject LargeObjectSpaceObjectIterator::Next() {
// OldLargeObjectSpace
LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
: Space(heap, id, new NoFreeList(), &allocation_counter_),
: Space(heap, id, new NoFreeList(), allocation_counter_),
size_(0),
page_count_(0),
objects_size_(0),
......
......@@ -68,7 +68,7 @@ V8_INLINE bool SemiSpaceNewSpace::EnsureAllocation(
AdvanceAllocationObservers();
Address old_top = allocation_info_->top();
Address old_top = allocation_info_.top();
Address high = to_space_.page_high();
int filler_size = Heap::GetFillToAlign(old_top, alignment);
int aligned_size_in_bytes = size_in_bytes + filler_size;
......@@ -82,7 +82,7 @@ V8_INLINE bool SemiSpaceNewSpace::EnsureAllocation(
return false;
}
old_top = allocation_info_->top();
old_top = allocation_info_.top();
high = to_space_.page_high();
filler_size = Heap::GetFillToAlign(old_top, alignment);
aligned_size_in_bytes = size_in_bytes + filler_size;
......@@ -94,7 +94,7 @@ V8_INLINE bool SemiSpaceNewSpace::EnsureAllocation(
DCHECK(old_top + aligned_size_in_bytes <= high);
UpdateInlineAllocationLimit(aligned_size_in_bytes);
DCHECK_EQ(allocation_info_->start(), allocation_info_->top());
DCHECK_EQ(allocation_info_.start(), allocation_info_.top());
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
return true;
}
......
......@@ -444,9 +444,9 @@ void SemiSpaceObjectIterator::Initialize(Address start, Address end) {
// -----------------------------------------------------------------------------
// NewSpace implementation
NewSpace::NewSpace(Heap* heap, LinearAllocationArea* allocation_info)
NewSpace::NewSpace(Heap* heap, LinearAllocationArea& allocation_info)
: SpaceWithLinearArea(heap, NEW_SPACE, new NoFreeList(),
&allocation_counter_, allocation_info,
allocation_counter_, allocation_info,
linear_area_original_data_) {}
void NewSpace::ResetParkedAllocationBuffers() {
......@@ -454,9 +454,8 @@ void NewSpace::ResetParkedAllocationBuffers() {
}
void NewSpace::MaybeFreeUnusedLab(LinearAllocationArea info) {
if (allocation_info_->MergeIfAdjacent(info)) {
linear_area_original_data_.set_original_top_release(
allocation_info_->top());
if (allocation_info_.MergeIfAdjacent(info)) {
linear_area_original_data_.set_original_top_release(allocation_info_.top());
}
#if DEBUG
......@@ -471,10 +470,10 @@ void NewSpace::VerifyTop() const {
// Ensure that original_top_ always >= LAB start. The delta between start_
// and top_ is still to be processed by allocation observers.
DCHECK_GE(linear_area_original_data_.get_original_top_acquire(),
allocation_info_->start());
allocation_info_.start());
// Ensure that limit() is <= original_limit_.
DCHECK_LE(allocation_info_->limit(),
DCHECK_LE(allocation_info_.limit(),
linear_area_original_data_.get_original_limit_relaxed());
}
#endif // DEBUG
......@@ -573,7 +572,7 @@ void NewSpace::PromotePageToOldSpace(Page* page) {
SemiSpaceNewSpace::SemiSpaceNewSpace(Heap* heap,
size_t initial_semispace_capacity,
size_t max_semispace_capacity,
LinearAllocationArea* allocation_info)
LinearAllocationArea& allocation_info)
: NewSpace(heap, allocation_info),
to_space_(heap, kToSpace),
from_space_(heap, kFromSpace) {
......@@ -591,7 +590,7 @@ SemiSpaceNewSpace::SemiSpaceNewSpace(Heap* heap,
SemiSpaceNewSpace::~SemiSpaceNewSpace() {
// Tears down the space. Heap memory was not allocated by the space, so it
// is not deallocated here.
allocation_info_->Reset(kNullAddress, kNullAddress);
allocation_info_.Reset(kNullAddress, kNullAddress);
to_space_.TearDown();
from_space_.TearDown();
......@@ -631,7 +630,7 @@ void SemiSpaceNewSpace::Shrink() {
size_t SemiSpaceNewSpace::CommittedPhysicalMemory() const {
if (!base::OS::HasLazyCommits()) return CommittedMemory();
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_->top());
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_.top());
size_t size = to_space_.CommittedPhysicalMemory();
if (from_space_.IsCommitted()) {
size += from_space_.CommittedPhysicalMemory();
......@@ -649,8 +648,8 @@ void SemiSpaceNewSpace::UpdateLinearAllocationArea(Address known_top) {
AdvanceAllocationObservers();
Address new_top = known_top == 0 ? to_space_.page_low() : known_top;
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_->top());
allocation_info_->Reset(new_top, to_space_.page_high());
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_.top());
allocation_info_.Reset(new_top, to_space_.page_high());
// The order of the following two stores is important.
// See the corresponding loads in ConcurrentMarking::Run.
{
......@@ -682,7 +681,7 @@ void SemiSpaceNewSpace::UpdateInlineAllocationLimit(size_t min_size) {
Address new_limit = ComputeLimit(top(), to_space_.page_high(), min_size);
DCHECK_LE(top(), new_limit);
DCHECK_LE(new_limit, to_space_.page_high());
allocation_info_->SetLimit(new_limit);
allocation_info_.SetLimit(new_limit);
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
#if DEBUG
......@@ -691,7 +690,7 @@ void SemiSpaceNewSpace::UpdateInlineAllocationLimit(size_t min_size) {
}
bool SemiSpaceNewSpace::AddFreshPage() {
Address top = allocation_info_->top();
Address top = allocation_info_.top();
DCHECK(!OldSpace::IsAtPageStart(top));
// Clear remainder of current page.
......@@ -888,8 +887,8 @@ void SemiSpaceNewSpace::MakeLinearAllocationAreaIterable() {
PagedSpaceForNewSpace::PagedSpaceForNewSpace(
Heap* heap, size_t initial_capacity, size_t max_capacity,
AllocationCounter* allocation_counter,
LinearAllocationArea* allocation_info,
AllocationCounter& allocation_counter,
LinearAllocationArea& allocation_info,
LinearAreaOriginalData& linear_area_original_data)
: PagedSpaceBase(heap, NEW_SPACE, NOT_EXECUTABLE,
FreeList::CreateFreeList(), allocation_counter,
......@@ -1015,15 +1014,15 @@ void PagedSpaceForNewSpace::Verify(Isolate* isolate,
PagedNewSpace::PagedNewSpace(Heap* heap, size_t initial_capacity,
size_t max_capacity,
LinearAllocationArea* allocation_info)
LinearAllocationArea& allocation_info)
: NewSpace(heap, allocation_info),
paged_space_(heap, initial_capacity, max_capacity, &allocation_counter_,
paged_space_(heap, initial_capacity, max_capacity, allocation_counter_,
allocation_info_, linear_area_original_data_) {}
PagedNewSpace::~PagedNewSpace() {
// Tears down the space. Heap memory was not allocated by the space, so it
// is not deallocated here.
allocation_info_->Reset(kNullAddress, kNullAddress);
allocation_info_.Reset(kNullAddress, kNullAddress);
paged_space_.TearDown();
}
......
......@@ -43,7 +43,7 @@ class SemiSpace final : public Space {
static void Swap(SemiSpace* from, SemiSpace* to);
SemiSpace(Heap* heap, SemiSpaceId semispace)
: Space(heap, NEW_SPACE, new NoFreeList(), &allocation_counter_),
: Space(heap, NEW_SPACE, new NoFreeList(), allocation_counter_),
current_capacity_(0),
target_capacity_(0),
maximum_capacity_(0),
......@@ -242,7 +242,7 @@ class NewSpace : NON_EXPORTED_BASE(public SpaceWithLinearArea) {
using iterator = PageIterator;
using const_iterator = ConstPageIterator;
NewSpace(Heap* heap, LinearAllocationArea* allocation_info);
NewSpace(Heap* heap, LinearAllocationArea& allocation_info);
inline bool Contains(Object o) const;
inline bool Contains(HeapObject o) const;
......@@ -355,7 +355,7 @@ class V8_EXPORT_PRIVATE SemiSpaceNewSpace final : public NewSpace {
SemiSpaceNewSpace(Heap* heap, size_t initial_semispace_capacity,
size_t max_semispace_capacity,
LinearAllocationArea* allocation_info);
LinearAllocationArea& allocation_info);
~SemiSpaceNewSpace() final;
......@@ -553,8 +553,8 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase {
// from OS.
explicit PagedSpaceForNewSpace(
Heap* heap, size_t initial_capacity, size_t max_capacity,
AllocationCounter* allocation_counter,
LinearAllocationArea* allocation_info,
AllocationCounter& allocation_counter,
LinearAllocationArea& allocation_info,
LinearAreaOriginalData& linear_area_original_data);
void TearDown() { PagedSpaceBase::TearDown(); }
......@@ -636,7 +636,7 @@ class V8_EXPORT_PRIVATE PagedNewSpace final : public NewSpace {
}
PagedNewSpace(Heap* heap, size_t initial_capacity, size_t max_capacity,
LinearAllocationArea* allocation_info);
LinearAllocationArea& allocation_info);
~PagedNewSpace() final;
......@@ -789,9 +789,9 @@ class V8_EXPORT_PRIVATE PagedNewSpace final : public NewSpace {
// For contiguous spaces, top should be in the space (or at the end) and limit
// should be the end of the space.
#define DCHECK_SEMISPACE_ALLOCATION_INFO(info, space) \
SLOW_DCHECK((space).page_low() <= (info)->top() && \
(info)->top() <= (space).page_high() && \
(info)->limit() <= (space).page_high())
SLOW_DCHECK((space).page_low() <= (info).top() && \
(info).top() <= (space).page_high() && \
(info).limit() <= (space).page_high())
} // namespace internal
} // namespace v8
......
......@@ -79,9 +79,8 @@ size_t PagedSpaceBase::RelinkFreeListCategories(Page* page) {
}
bool PagedSpaceBase::TryFreeLast(Address object_address, int object_size) {
if (allocation_info_->top() != kNullAddress) {
return allocation_info_->DecrementTopIfAdjacent(object_address,
object_size);
if (allocation_info_.top() != kNullAddress) {
return allocation_info_.DecrementTopIfAdjacent(object_address, object_size);
}
return false;
}
......@@ -105,7 +104,7 @@ V8_INLINE bool PagedSpaceBase::EnsureAllocation(int size_in_bytes,
if (out_max_aligned_size) {
*out_max_aligned_size = size_in_bytes;
}
if (allocation_info_->top() + size_in_bytes <= allocation_info_->limit()) {
if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
return true;
}
return RefillLabMain(size_in_bytes, origin);
......
......@@ -116,8 +116,8 @@ Page* PagedSpaceBase::InitializePage(MemoryChunk* chunk) {
PagedSpaceBase::PagedSpaceBase(
Heap* heap, AllocationSpace space, Executability executable,
FreeList* free_list, AllocationCounter* allocation_counter,
LinearAllocationArea* allocation_info_,
FreeList* free_list, AllocationCounter& allocation_counter,
LinearAllocationArea& allocation_info_,
LinearAreaOriginalData& linear_area_original_data,
CompactionSpaceKind compaction_space_kind)
: SpaceWithLinearArea(heap, space, free_list, allocation_counter,
......@@ -237,7 +237,7 @@ size_t PagedSpaceBase::CommittedPhysicalMemory() const {
CodePageHeaderModificationScope rwx_write_scope(
"Updating high water mark for Code pages requires write access to "
"the Code page headers");
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_->top());
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_.top());
return committed_physical_memory();
}
......@@ -338,8 +338,8 @@ void PagedSpaceBase::RemovePage(Page* page) {
void PagedSpaceBase::SetTopAndLimit(Address top, Address limit) {
DCHECK(top == limit ||
Page::FromAddress(top) == Page::FromAddress(limit - 1));
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_->top());
allocation_info_->Reset(top, limit);
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_.top());
allocation_info_.Reset(top, limit);
base::Optional<base::SharedMutexGuard<base::kExclusive>> optional_guard;
if (!is_compaction_space()) optional_guard.emplace(linear_area_lock());
......@@ -363,7 +363,7 @@ void PagedSpaceBase::ResetFreeList() {
void PagedSpaceBase::ShrinkImmortalImmovablePages() {
DCHECK(!heap()->deserialization_complete());
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_->top());
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_.top());
FreeLinearAllocationArea();
ResetFreeList();
for (Page* page : *this) {
......@@ -538,7 +538,7 @@ void PagedSpaceBase::ReleasePage(Page* page) {
free_list_->EvictFreeListItems(page);
if (Page::FromAllocationAreaAddress(allocation_info_->top()) == page) {
if (Page::FromAllocationAreaAddress(allocation_info_.top()) == page) {
SetTopAndLimit(kNullAddress, kNullAddress);
}
......@@ -616,7 +616,7 @@ bool PagedSpaceBase::TryAllocationFromFreeListMain(size_t size_in_bytes,
Page* page = Page::FromHeapObject(new_node);
IncreaseAllocatedBytes(new_node_size, page);
DCHECK_EQ(allocation_info_->start(), allocation_info_->top());
DCHECK_EQ(allocation_info_.start(), allocation_info_.top());
Address start = new_node.address();
Address end = new_node.address() + new_node_size;
Address limit = ComputeLimit(start, end, size_in_bytes);
......@@ -763,7 +763,7 @@ void PagedSpaceBase::Print() {}
#ifdef VERIFY_HEAP
void PagedSpaceBase::Verify(Isolate* isolate, ObjectVisitor* visitor) const {
bool allocation_pointer_found_in_space =
(allocation_info_->top() == allocation_info_->limit());
(allocation_info_.top() == allocation_info_.limit());
size_t external_space_bytes[kNumTypes];
size_t external_page_bytes[kNumTypes];
......@@ -779,7 +779,7 @@ void PagedSpaceBase::Verify(Isolate* isolate, ObjectVisitor* visitor) const {
external_page_bytes[static_cast<ExternalBackingStoreType>(i)] = 0;
}
if (page == Page::FromAllocationAreaAddress(allocation_info_->top())) {
if (page == Page::FromAllocationAreaAddress(allocation_info_.top())) {
allocation_pointer_found_in_space = true;
}
CHECK(page->SweepingDone());
......@@ -915,7 +915,7 @@ void PagedSpaceBase::VerifyCountersBeforeConcurrentSweeping() const {
void PagedSpaceBase::UpdateInlineAllocationLimit(size_t min_size) {
// Ensure there are no unaccounted allocations.
DCHECK_EQ(allocation_info_->start(), allocation_info_->top());
DCHECK_EQ(allocation_info_.start(), allocation_info_.top());
Address new_limit = ComputeLimit(top(), limit(), min_size);
DCHECK_LE(top(), new_limit);
......
......@@ -94,8 +94,8 @@ class V8_EXPORT_PRIVATE PagedSpaceBase
// Creates a space with an id.
PagedSpaceBase(
Heap* heap, AllocationSpace id, Executability executable,
FreeList* free_list, AllocationCounter* allocation_counter,
LinearAllocationArea* allocation_info,
FreeList* free_list, AllocationCounter& allocation_counter,
LinearAllocationArea& allocation_info,
LinearAreaOriginalData& linear_area_original_data,
CompactionSpaceKind compaction_space_kind = CompactionSpaceKind::kNone);
......@@ -432,9 +432,9 @@ class V8_EXPORT_PRIVATE PagedSpace : public PagedSpaceBase {
// Creates a space with an id.
PagedSpace(
Heap* heap, AllocationSpace id, Executability executable,
FreeList* free_list, LinearAllocationArea* allocation_info,
FreeList* free_list, LinearAllocationArea& allocation_info,
CompactionSpaceKind compaction_space_kind = CompactionSpaceKind::kNone)
: PagedSpaceBase(heap, id, executable, free_list, &allocation_counter_,
: PagedSpaceBase(heap, id, executable, free_list, allocation_counter_,
allocation_info, linear_area_original_data_,
compaction_space_kind) {}
......@@ -451,7 +451,7 @@ class V8_EXPORT_PRIVATE CompactionSpace final : public PagedSpace {
CompactionSpace(Heap* heap, AllocationSpace id, Executability executable,
CompactionSpaceKind compaction_space_kind)
: PagedSpace(heap, id, executable, FreeList::CreateFreeList(),
&allocation_info_, compaction_space_kind) {
allocation_info_, compaction_space_kind) {
DCHECK(is_compaction_space());
}
......@@ -511,7 +511,7 @@ class OldSpace final : public PagedSpace {
public:
// Creates an old space object. The constructor does not allocate pages
// from OS.
explicit OldSpace(Heap* heap, LinearAllocationArea* allocation_info)
explicit OldSpace(Heap* heap, LinearAllocationArea& allocation_info)
: PagedSpace(heap, OLD_SPACE, NOT_EXECUTABLE, FreeList::CreateFreeList(),
allocation_info) {}
......@@ -536,7 +536,7 @@ class CodeSpace final : public PagedSpace {
// from OS.
explicit CodeSpace(Heap* heap)
: PagedSpace(heap, CODE_SPACE, EXECUTABLE, FreeList::CreateFreeList(),
&paged_allocation_info_) {}
paged_allocation_info_) {}
private:
LinearAllocationArea paged_allocation_info_;
......@@ -550,7 +550,7 @@ class MapSpace final : public PagedSpace {
// Creates a map space object.
explicit MapSpace(Heap* heap)
: PagedSpace(heap, MAP_SPACE, NOT_EXECUTABLE, FreeList::CreateFreeList(),
&paged_allocation_info_) {}
paged_allocation_info_) {}
int RoundSizeDownToObjectAlignment(int size) const override {
if (base::bits::IsPowerOfTwo(Map::kSize)) {
......
......@@ -214,11 +214,11 @@ MemoryChunk* MemoryChunkIterator::Next() {
AllocationResult SpaceWithLinearArea::AllocateFastUnaligned(
int size_in_bytes, AllocationOrigin origin) {
if (!allocation_info_->CanIncrementTop(size_in_bytes)) {
if (!allocation_info_.CanIncrementTop(size_in_bytes)) {
return AllocationResult::Failure();
}
HeapObject obj =
HeapObject::FromAddress(allocation_info_->IncrementTop(size_in_bytes));
HeapObject::FromAddress(allocation_info_.IncrementTop(size_in_bytes));
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
......@@ -228,15 +228,15 @@ AllocationResult SpaceWithLinearArea::AllocateFastUnaligned(
AllocationResult SpaceWithLinearArea::AllocateFastAligned(
int size_in_bytes, int* result_aligned_size_in_bytes,
AllocationAlignment alignment, AllocationOrigin origin) {
Address top = allocation_info_->top();
Address top = allocation_info_.top();
int filler_size = Heap::GetFillToAlign(top, alignment);
int aligned_size_in_bytes = size_in_bytes + filler_size;
if (!allocation_info_->CanIncrementTop(aligned_size_in_bytes)) {
if (!allocation_info_.CanIncrementTop(aligned_size_in_bytes)) {
return AllocationResult::Failure();
}
HeapObject obj = HeapObject::FromAddress(
allocation_info_->IncrementTop(aligned_size_in_bytes));
allocation_info_.IncrementTop(aligned_size_in_bytes));
if (result_aligned_size_in_bytes)
*result_aligned_size_in_bytes = aligned_size_in_bytes;
......@@ -276,7 +276,7 @@ AllocationResult SpaceWithLinearArea::AllocateRawUnaligned(
}
DCHECK_EQ(max_aligned_size, size_in_bytes);
DCHECK_LE(allocation_info_->start(), allocation_info_->top());
DCHECK_LE(allocation_info_.start(), allocation_info_.top());
AllocationResult result = AllocateFastUnaligned(size_in_bytes, origin);
DCHECK(!result.IsFailure());
......@@ -300,7 +300,7 @@ AllocationResult SpaceWithLinearArea::AllocateRawAligned(
}
DCHECK_GE(max_aligned_size, size_in_bytes);
DCHECK_LE(allocation_info_->start(), allocation_info_->top());
DCHECK_LE(allocation_info_.start(), allocation_info_.top());
int aligned_size_in_bytes;
......
......@@ -224,16 +224,16 @@ void Page::DestroyBlackAreaBackground(Address start, Address end) {
// PagedSpace implementation
void Space::AddAllocationObserver(AllocationObserver* observer) {
allocation_counter_->AddAllocationObserver(observer);
allocation_counter_.AddAllocationObserver(observer);
}
void Space::RemoveAllocationObserver(AllocationObserver* observer) {
allocation_counter_->RemoveAllocationObserver(observer);
allocation_counter_.RemoveAllocationObserver(observer);
}
void Space::PauseAllocationObservers() { allocation_counter_->Pause(); }
void Space::PauseAllocationObservers() { allocation_counter_.Pause(); }
void Space::ResumeAllocationObservers() { allocation_counter_->Resume(); }
void Space::ResumeAllocationObservers() { allocation_counter_.Resume(); }
Address SpaceWithLinearArea::ComputeLimit(Address start, Address end,
size_t min_size) const {
......@@ -244,13 +244,13 @@ Address SpaceWithLinearArea::ComputeLimit(Address start, Address end,
return start + min_size;
}
if (SupportsAllocationObserver() && allocation_counter_->IsActive()) {
if (SupportsAllocationObserver() && allocation_counter_.IsActive()) {
// Ensure there are no unaccounted allocations.
DCHECK_EQ(allocation_info_->start(), allocation_info_->top());
DCHECK_EQ(allocation_info_.start(), allocation_info_.top());
// Generated code may allocate inline from the linear allocation area for.
// To make sure we can observe these allocations, we use a lower ©limit.
size_t step = allocation_counter_->NextBytes();
size_t step = allocation_counter_.NextBytes();
DCHECK_NE(step, 0);
size_t rounded_step =
RoundSizeDownToObjectAlignment(static_cast<int>(step - 1));
......@@ -334,7 +334,7 @@ LocalAllocationBuffer& LocalAllocationBuffer::operator=(
}
void SpaceWithLinearArea::AddAllocationObserver(AllocationObserver* observer) {
if (!allocation_counter_->IsStepInProgress()) {
if (!allocation_counter_.IsStepInProgress()) {
AdvanceAllocationObservers();
Space::AddAllocationObserver(observer);
UpdateInlineAllocationLimit(0);
......@@ -345,7 +345,7 @@ void SpaceWithLinearArea::AddAllocationObserver(AllocationObserver* observer) {
void SpaceWithLinearArea::RemoveAllocationObserver(
AllocationObserver* observer) {
if (!allocation_counter_->IsStepInProgress()) {
if (!allocation_counter_.IsStepInProgress()) {
AdvanceAllocationObservers();
Space::RemoveAllocationObserver(observer);
UpdateInlineAllocationLimit(0);
......@@ -366,16 +366,16 @@ void SpaceWithLinearArea::ResumeAllocationObservers() {
}
void SpaceWithLinearArea::AdvanceAllocationObservers() {
if (allocation_info_->top() &&
allocation_info_->start() != allocation_info_->top()) {
allocation_counter_->AdvanceAllocationObservers(allocation_info_->top() -
allocation_info_->start());
if (allocation_info_.top() &&
allocation_info_.start() != allocation_info_.top()) {
allocation_counter_.AdvanceAllocationObservers(allocation_info_.top() -
allocation_info_.start());
MarkLabStartInitialized();
}
}
void SpaceWithLinearArea::MarkLabStartInitialized() {
allocation_info_->ResetStart();
allocation_info_.ResetStart();
if (identity() == NEW_SPACE) {
heap()->new_space()->MoveOriginalTopForward();
......@@ -401,16 +401,16 @@ void SpaceWithLinearArea::InvokeAllocationObservers(
DCHECK(size_in_bytes == aligned_size_in_bytes ||
aligned_size_in_bytes == allocation_size);
if (!SupportsAllocationObserver() || !allocation_counter_->IsActive()) return;
if (!SupportsAllocationObserver() || !allocation_counter_.IsActive()) return;
if (allocation_size >= allocation_counter_->NextBytes()) {
if (allocation_size >= allocation_counter_.NextBytes()) {
// Only the first object in a LAB should reach the next step.
DCHECK_EQ(soon_object, allocation_info_->start() + aligned_size_in_bytes -
size_in_bytes);
DCHECK_EQ(soon_object,
allocation_info_.start() + aligned_size_in_bytes - size_in_bytes);
// Right now the LAB only contains that one object.
DCHECK_EQ(allocation_info_->top() + allocation_size - aligned_size_in_bytes,
allocation_info_->limit());
DCHECK_EQ(allocation_info_.top() + allocation_size - aligned_size_in_bytes,
allocation_info_.limit());
// Ensure that there is a valid object
if (identity() == CODE_SPACE) {
......@@ -423,29 +423,29 @@ void SpaceWithLinearArea::InvokeAllocationObservers(
#if DEBUG
// Ensure that allocation_info_ isn't modified during one of the
// AllocationObserver::Step methods.
LinearAllocationArea saved_allocation_info = *allocation_info_;
LinearAllocationArea saved_allocation_info = allocation_info_;
#endif
// Run AllocationObserver::Step through the AllocationCounter.
allocation_counter_->InvokeAllocationObservers(soon_object, size_in_bytes,
allocation_size);
allocation_counter_.InvokeAllocationObservers(soon_object, size_in_bytes,
allocation_size);
// Ensure that start/top/limit didn't change.
DCHECK_EQ(saved_allocation_info.start(), allocation_info_->start());
DCHECK_EQ(saved_allocation_info.top(), allocation_info_->top());
DCHECK_EQ(saved_allocation_info.limit(), allocation_info_->limit());
DCHECK_EQ(saved_allocation_info.start(), allocation_info_.start());
DCHECK_EQ(saved_allocation_info.top(), allocation_info_.top());
DCHECK_EQ(saved_allocation_info.limit(), allocation_info_.limit());
}
DCHECK_IMPLIES(allocation_counter_->IsActive(),
(allocation_info_->limit() - allocation_info_->start()) <
allocation_counter_->NextBytes());
DCHECK_IMPLIES(allocation_counter_.IsActive(),
(allocation_info_.limit() - allocation_info_.start()) <
allocation_counter_.NextBytes());
}
#if DEBUG
void SpaceWithLinearArea::VerifyTop() const {
// Ensure validity of LAB: start <= top <= limit
DCHECK_LE(allocation_info_->start(), allocation_info_->top());
DCHECK_LE(allocation_info_->top(), allocation_info_->limit());
DCHECK_LE(allocation_info_.start(), allocation_info_.top());
DCHECK_LE(allocation_info_.top(), allocation_info_.limit());
}
#endif // DEBUG
......
......@@ -113,11 +113,10 @@ class SemiSpace;
class V8_EXPORT_PRIVATE Space : public BaseSpace {
public:
Space(Heap* heap, AllocationSpace id, FreeList* free_list,
AllocationCounter* allocation_counter)
AllocationCounter& allocation_counter)
: BaseSpace(heap, id),
free_list_(std::unique_ptr<FreeList>(free_list)),
allocation_counter_(allocation_counter) {
DCHECK_NOT_NULL(allocation_counter_);
external_backing_store_bytes_ =
new std::atomic<size_t>[ExternalBackingStoreType::kNumTypes];
external_backing_store_bytes_[ExternalBackingStoreType::kArrayBuffer] = 0;
......@@ -204,7 +203,7 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace {
std::unique_ptr<FreeList> free_list_;
AllocationCounter* const allocation_counter_;
AllocationCounter& allocation_counter_;
};
static_assert(sizeof(std::atomic<intptr_t>) == kSystemPointerSize);
......@@ -511,8 +510,8 @@ class LinearAreaOriginalData {
class SpaceWithLinearArea : public Space {
public:
SpaceWithLinearArea(Heap* heap, AllocationSpace id, FreeList* free_list,
AllocationCounter* allocation_counter,
LinearAllocationArea* allocation_info,
AllocationCounter& allocation_counter,
LinearAllocationArea& allocation_info,
LinearAreaOriginalData& linear_area_original_data)
: Space(heap, id, free_list, allocation_counter),
allocation_info_(allocation_info),
......@@ -521,17 +520,17 @@ class SpaceWithLinearArea : public Space {
virtual bool SupportsAllocationObserver() const = 0;
// Returns the allocation pointer in this space.
Address top() const { return allocation_info_->top(); }
Address limit() const { return allocation_info_->limit(); }
Address top() const { return allocation_info_.top(); }
Address limit() const { return allocation_info_.limit(); }
// The allocation top address.
Address* allocation_top_address() const {
return allocation_info_->top_address();
return allocation_info_.top_address();
}
// The allocation limit address.
Address* allocation_limit_address() const {
return allocation_info_->limit_address();
return allocation_info_.limit_address();
}
// Methods needed for allocation observers.
......@@ -633,7 +632,7 @@ class SpaceWithLinearArea : public Space {
V8_EXPORT_PRIVATE virtual void VerifyTop() const;
#endif // DEBUG
LinearAllocationArea* const allocation_info_;
LinearAllocationArea& allocation_info_;
LinearAreaOriginalData& linear_area_original_data_;
bool use_lab_ = true;
......
......@@ -219,7 +219,7 @@ TEST(MemoryAllocator) {
LinearAllocationArea allocation_info;
int total_pages = 0;
OldSpace faked_space(heap, &allocation_info);
OldSpace faked_space(heap, allocation_info);
CHECK(!faked_space.first_page());
CHECK(!faked_space.last_page());
Page* first_page = memory_allocator->AllocatePage(
......@@ -303,7 +303,7 @@ TEST(SemiSpaceNewSpace) {
std::unique_ptr<SemiSpaceNewSpace> new_space =
std::make_unique<SemiSpaceNewSpace>(
heap, CcTest::heap()->InitialSemiSpaceSize(),
CcTest::heap()->InitialSemiSpaceSize(), &allocation_info);
CcTest::heap()->InitialSemiSpaceSize(), allocation_info);
CHECK(new_space->MaximumCapacity());
while (new_space->Available() >= kMaxRegularHeapObjectSize) {
......@@ -326,7 +326,7 @@ TEST(PagedNewSpace) {
std::unique_ptr<PagedNewSpace> new_space = std::make_unique<PagedNewSpace>(
heap, CcTest::heap()->InitialSemiSpaceSize(),
CcTest::heap()->InitialSemiSpaceSize(), &allocation_info);
CcTest::heap()->InitialSemiSpaceSize(), allocation_info);
CHECK(new_space->MaximumCapacity());
AllocationResult allocation_result;
......@@ -346,7 +346,7 @@ TEST(OldSpace) {
TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved());
LinearAllocationArea allocation_info;
OldSpace* s = new OldSpace(heap, &allocation_info);
OldSpace* s = new OldSpace(heap, allocation_info);
CHECK_NOT_NULL(s);
while (s->Available() > 0) {
......@@ -857,7 +857,7 @@ TEST(NoMemoryForNewPage) {
TestMemoryAllocatorScope test_allocator_scope(isolate, 0, &failing_allocator);
MemoryAllocator* memory_allocator = test_allocator_scope.allocator();
LinearAllocationArea allocation_info;
OldSpace faked_space(heap, &allocation_info);
OldSpace faked_space(heap, allocation_info);
Page* page = memory_allocator->AllocatePage(
MemoryAllocator::AllocationMode::kRegular,
static_cast<PagedSpace*>(&faked_space), NOT_EXECUTABLE);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment