Commit 8403d5f1 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Move allocation_observers_paused_ into AllocationCounter

Bug: v8:10315
Change-Id: Ie36035db0a1a2fa32bfec17eca7cf3ed0c91ca29
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2315991Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69026}
parent 9458d625
...@@ -16,6 +16,8 @@ class AllocationObserver; ...@@ -16,6 +16,8 @@ class AllocationObserver;
class AllocationCounter { class AllocationCounter {
public: public:
AllocationCounter() : paused_(false) {}
auto begin() { return allocation_observers_.begin(); } auto begin() { return allocation_observers_.begin(); }
auto end() { return allocation_observers_.end(); } auto end() { return allocation_observers_.end(); }
...@@ -25,8 +27,23 @@ class AllocationCounter { ...@@ -25,8 +27,23 @@ class AllocationCounter {
bool HasAllocationObservers() { return !allocation_observers_.empty(); } bool HasAllocationObservers() { return !allocation_observers_.empty(); }
size_t NumberAllocationObservers() { return allocation_observers_.size(); } size_t NumberAllocationObservers() { return allocation_observers_.size(); }
bool IsActive() { return !IsPaused() && HasAllocationObservers(); }
void Pause() {
DCHECK(!paused_);
paused_ = true;
}
void Resume() {
DCHECK(paused_);
paused_ = false;
}
private: private:
bool IsPaused() { return paused_; }
std::vector<AllocationObserver*> allocation_observers_; std::vector<AllocationObserver*> allocation_observers_;
bool paused_;
}; };
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
......
...@@ -466,7 +466,7 @@ bool NewSpace::Rebalance() { ...@@ -466,7 +466,7 @@ bool NewSpace::Rebalance() {
void NewSpace::UpdateLinearAllocationArea() { void NewSpace::UpdateLinearAllocationArea() {
// Make sure there is no unaccounted allocations. // Make sure there is no unaccounted allocations.
DCHECK(!AllocationObserversActive() || top_on_previous_step_ == top()); DCHECK(!allocation_counter_.IsActive() || top_on_previous_step_ == top());
Address new_top = to_space_.page_low(); Address new_top = to_space_.page_low();
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_.top()); BasicMemoryChunk::UpdateHighWaterMark(allocation_info_.top());
......
...@@ -257,15 +257,13 @@ void Space::RemoveAllocationObserver(AllocationObserver* observer) { ...@@ -257,15 +257,13 @@ void Space::RemoveAllocationObserver(AllocationObserver* observer) {
StartNextInlineAllocationStep(); StartNextInlineAllocationStep();
} }
void Space::PauseAllocationObservers() { allocation_observers_paused_ = true; } void Space::PauseAllocationObservers() { allocation_counter_.Pause(); }
void Space::ResumeAllocationObservers() { void Space::ResumeAllocationObservers() { allocation_counter_.Resume(); }
allocation_observers_paused_ = false;
}
void Space::AllocationStep(int bytes_since_last, Address soon_object, void Space::AllocationStep(int bytes_since_last, Address soon_object,
int size) { int size) {
if (!AllocationObserversActive()) { if (!allocation_counter_.IsActive()) {
return; return;
} }
...@@ -279,7 +277,7 @@ void Space::AllocationStep(int bytes_since_last, Address soon_object, ...@@ -279,7 +277,7 @@ void Space::AllocationStep(int bytes_since_last, Address soon_object,
} }
void Space::AllocationStepAfterMerge(Address first_object_in_chunk, int size) { void Space::AllocationStepAfterMerge(Address first_object_in_chunk, int size) {
if (!AllocationObserversActive()) { if (!allocation_counter_.IsActive()) {
return; return;
} }
...@@ -308,7 +306,7 @@ Address SpaceWithLinearArea::ComputeLimit(Address start, Address end, ...@@ -308,7 +306,7 @@ Address SpaceWithLinearArea::ComputeLimit(Address start, Address end,
if (heap()->inline_allocation_disabled()) { if (heap()->inline_allocation_disabled()) {
// Fit the requested area exactly. // Fit the requested area exactly.
return start + min_size; return start + min_size;
} else if (SupportsInlineAllocation() && AllocationObserversActive()) { } else if (SupportsInlineAllocation() && allocation_counter_.IsActive()) {
// Generated code may allocate inline from the linear allocation area for. // Generated code may allocate inline from the linear allocation area for.
// To make sure we can observe these allocations, we use a lower limit. // To make sure we can observe these allocations, we use a lower limit.
size_t step = GetNextInlineAllocationStepSize(); size_t step = GetNextInlineAllocationStepSize();
...@@ -385,7 +383,7 @@ void SpaceWithLinearArea::StartNextInlineAllocationStep() { ...@@ -385,7 +383,7 @@ void SpaceWithLinearArea::StartNextInlineAllocationStep() {
return; return;
} }
if (AllocationObserversActive()) { if (allocation_counter_.IsActive()) {
top_on_previous_step_ = top(); top_on_previous_step_ = top();
UpdateInlineAllocationLimit(0); UpdateInlineAllocationLimit(0);
} else { } else {
...@@ -396,7 +394,7 @@ void SpaceWithLinearArea::StartNextInlineAllocationStep() { ...@@ -396,7 +394,7 @@ void SpaceWithLinearArea::StartNextInlineAllocationStep() {
void SpaceWithLinearArea::AddAllocationObserver(AllocationObserver* observer) { void SpaceWithLinearArea::AddAllocationObserver(AllocationObserver* observer) {
InlineAllocationStep(top(), top(), kNullAddress, 0); InlineAllocationStep(top(), top(), kNullAddress, 0);
Space::AddAllocationObserver(observer); Space::AddAllocationObserver(observer);
DCHECK_IMPLIES(top_on_previous_step_, AllocationObserversActive()); DCHECK_IMPLIES(top_on_previous_step_, allocation_counter_.IsActive());
} }
void SpaceWithLinearArea::RemoveAllocationObserver( void SpaceWithLinearArea::RemoveAllocationObserver(
...@@ -406,7 +404,7 @@ void SpaceWithLinearArea::RemoveAllocationObserver( ...@@ -406,7 +404,7 @@ void SpaceWithLinearArea::RemoveAllocationObserver(
: top(); : top();
InlineAllocationStep(top(), top_for_next_step, kNullAddress, 0); InlineAllocationStep(top(), top_for_next_step, kNullAddress, 0);
Space::RemoveAllocationObserver(observer); Space::RemoveAllocationObserver(observer);
DCHECK_IMPLIES(top_on_previous_step_, AllocationObserversActive()); DCHECK_IMPLIES(top_on_previous_step_, allocation_counter_.IsActive());
} }
void SpaceWithLinearArea::PauseAllocationObservers() { void SpaceWithLinearArea::PauseAllocationObservers() {
......
...@@ -112,7 +112,6 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace { ...@@ -112,7 +112,6 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace {
public: public:
Space(Heap* heap, AllocationSpace id, FreeList* free_list) Space(Heap* heap, AllocationSpace id, FreeList* free_list)
: BaseSpace(heap, id), : BaseSpace(heap, id),
allocation_observers_paused_(false),
free_list_(std::unique_ptr<FreeList>(free_list)) { free_list_(std::unique_ptr<FreeList>(free_list)) {
external_backing_store_bytes_ = external_backing_store_bytes_ =
new std::atomic<size_t>[ExternalBackingStoreType::kNumTypes]; new std::atomic<size_t>[ExternalBackingStoreType::kNumTypes];
...@@ -193,10 +192,6 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace { ...@@ -193,10 +192,6 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace {
protected: protected:
intptr_t GetNextInlineAllocationStepSize(); intptr_t GetNextInlineAllocationStepSize();
bool AllocationObserversActive() {
return !allocation_observers_paused_ &&
allocation_counter_.HasAllocationObservers();
}
AllocationCounter allocation_counter_; AllocationCounter allocation_counter_;
...@@ -206,8 +201,6 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace { ...@@ -206,8 +201,6 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace {
// Tracks off-heap memory used by this space. // Tracks off-heap memory used by this space.
std::atomic<size_t>* external_backing_store_bytes_; std::atomic<size_t>* external_backing_store_bytes_;
bool allocation_observers_paused_;
std::unique_ptr<FreeList> free_list_; std::unique_ptr<FreeList> free_list_;
DISALLOW_COPY_AND_ASSIGN(Space); DISALLOW_COPY_AND_ASSIGN(Space);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment