Commit 203e5916 authored by Ali Ijaz Sheikh's avatar Ali Ijaz Sheikh Committed by Commit Bot

[heap] make allocation_step_in_progress_ a heap property

Don't start new steps recursively if a step is already in progress.
Having this property on a space is not sufficient, as an allocation
is a global (heap-wide) event. Computing the next step size, for example
is a property of all observers in existence rather than the spaces in
existence.

In this case a failure was due to the fact that we attempted to compute
the next step size while a given observer was mid-way through its step
triggered from a different space. bytes_to_next_step_ was partially
updated at that point.

BUG=v8:7313

Change-Id: Iaf632fce2cfd5ed49b0f41a69c3694e505e17d81
Reviewed-on: https://chromium-review.googlesource.com/887174
Commit-Queue: Ali Ijaz Sheikh <ofrobots@google.com>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#50909}
parent 06d8f6fa
...@@ -177,6 +177,7 @@ Heap::Heap() ...@@ -177,6 +177,7 @@ Heap::Heap()
raw_allocations_hash_(0), raw_allocations_hash_(0),
stress_marking_observer_(nullptr), stress_marking_observer_(nullptr),
stress_scavenge_observer_(nullptr), stress_scavenge_observer_(nullptr),
allocation_step_in_progress_(false),
max_marking_limit_reached_(0.0), max_marking_limit_reached_(0.0),
ms_count_(0), ms_count_(0),
gc_count_(0), gc_count_(0),
......
...@@ -1574,6 +1574,11 @@ class Heap { ...@@ -1574,6 +1574,11 @@ class Heap {
void RemoveAllocationObserversFromAllSpaces( void RemoveAllocationObserversFromAllSpaces(
AllocationObserver* observer, AllocationObserver* new_space_observer); AllocationObserver* observer, AllocationObserver* new_space_observer);
bool allocation_step_in_progress() { return allocation_step_in_progress_; }
void set_allocation_step_in_progress(bool val) {
allocation_step_in_progress_ = val;
}
// =========================================================================== // ===========================================================================
// Retaining path tracking. ================================================== // Retaining path tracking. ==================================================
// =========================================================================== // ===========================================================================
...@@ -2403,6 +2408,8 @@ class Heap { ...@@ -2403,6 +2408,8 @@ class Heap {
// Observer that can cause early scavenge start. // Observer that can cause early scavenge start.
StressScavengeObserver* stress_scavenge_observer_; StressScavengeObserver* stress_scavenge_observer_;
bool allocation_step_in_progress_;
// The maximum percent of the marking limit reached wihout causing marking. // The maximum percent of the marking limit reached wihout causing marking.
// This is tracked when specyfing --fuzzer-gc-analysis. // This is tracked when specyfing --fuzzer-gc-analysis.
double max_marking_limit_reached_; double max_marking_limit_reached_;
......
...@@ -1356,13 +1356,13 @@ void Space::AllocationStep(int bytes_since_last, Address soon_object, ...@@ -1356,13 +1356,13 @@ void Space::AllocationStep(int bytes_since_last, Address soon_object,
return; return;
} }
DCHECK(!allocation_step_in_progress_); DCHECK(!heap()->allocation_step_in_progress());
allocation_step_in_progress_ = true; heap()->set_allocation_step_in_progress(true);
heap()->CreateFillerObjectAt(soon_object, size, ClearRecordedSlots::kNo); heap()->CreateFillerObjectAt(soon_object, size, ClearRecordedSlots::kNo);
for (AllocationObserver* observer : allocation_observers_) { for (AllocationObserver* observer : allocation_observers_) {
observer->AllocationStep(bytes_since_last, soon_object, size); observer->AllocationStep(bytes_since_last, soon_object, size);
} }
allocation_step_in_progress_ = false; heap()->set_allocation_step_in_progress(false);
} }
intptr_t Space::GetNextInlineAllocationStepSize() { intptr_t Space::GetNextInlineAllocationStepSize() {
...@@ -2187,7 +2187,7 @@ bool NewSpace::EnsureAllocation(int size_in_bytes, ...@@ -2187,7 +2187,7 @@ bool NewSpace::EnsureAllocation(int size_in_bytes,
} }
void SpaceWithLinearArea::StartNextInlineAllocationStep() { void SpaceWithLinearArea::StartNextInlineAllocationStep() {
if (allocation_step_in_progress_) { if (heap()->allocation_step_in_progress()) {
// If we are mid-way through an existing step, don't start a new one. // If we are mid-way through an existing step, don't start a new one.
return; return;
} }
...@@ -2233,7 +2233,7 @@ void SpaceWithLinearArea::InlineAllocationStep(Address top, ...@@ -2233,7 +2233,7 @@ void SpaceWithLinearArea::InlineAllocationStep(Address top,
Address top_for_next_step, Address top_for_next_step,
Address soon_object, Address soon_object,
size_t size) { size_t size) {
if (allocation_step_in_progress_) { if (heap()->allocation_step_in_progress()) {
// Avoid starting a new step if we are mid-way through an existing one. // Avoid starting a new step if we are mid-way through an existing one.
return; return;
} }
......
...@@ -893,7 +893,6 @@ class Space : public Malloced { ...@@ -893,7 +893,6 @@ class Space : public Malloced {
public: public:
Space(Heap* heap, AllocationSpace id, Executability executable) Space(Heap* heap, AllocationSpace id, Executability executable)
: allocation_observers_paused_(false), : allocation_observers_paused_(false),
allocation_step_in_progress_(false),
heap_(heap), heap_(heap),
id_(id), id_(id),
executable_(executable), executable_(executable),
...@@ -980,7 +979,6 @@ class Space : public Malloced { ...@@ -980,7 +979,6 @@ class Space : public Malloced {
std::vector<AllocationObserver*> allocation_observers_; std::vector<AllocationObserver*> allocation_observers_;
bool allocation_observers_paused_; bool allocation_observers_paused_;
bool allocation_step_in_progress_;
protected: protected:
Heap* heap_; Heap* heap_;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment