Commit cd81dd6d authored by ofrobots's avatar ofrobots Committed by Commit bot

[heap] pause/resume inline allocation observers around scavenge

We should not be counting the bump pointer allocations done during scavenge as
the objects are copied. The inline allocation observers were getting unnecessary
notifications.

R=hpayer@chromium.org, ulan@chromium.org
BUG=

Review URL: https://codereview.chromium.org/1465633002

Cr-Commit-Position: refs/heads/master@{#32153}
parent b7ab5ba4
......@@ -1583,6 +1583,10 @@ void Heap::Scavenge() {
// trigger one during scavenge: scavenges allocation should always succeed.
AlwaysAllocateScope scope(isolate());
// Bump-pointer allocations done during scavenge are not real allocations.
// Pause the inline allocation steps.
new_space()->PauseInlineAllocationObservers();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this);
#endif
......@@ -1712,9 +1716,7 @@ void Heap::Scavenge() {
// Set age mark.
new_space_.set_age_mark(new_space_.top());
// We start a new step without accounting the objects copied into to space
// as those are not allocations.
new_space_.StartNextInlineAllocationStep();
new_space()->ResumeInlineAllocationObservers();
array_buffer_tracker()->FreeDead(true);
......
......@@ -1509,7 +1509,8 @@ void NewSpace::UpdateInlineAllocationLimit(int size_in_bytes) {
Address high = to_space_.page_high();
Address new_top = allocation_info_.top() + size_in_bytes;
allocation_info_.set_limit(Min(new_top, high));
} else if (top_on_previous_step_ == 0) {
} else if (inline_allocation_observers_paused_ ||
top_on_previous_step_ == 0) {
// Normal limit is the end of the current page.
allocation_info_.set_limit(to_space_.page_high());
} else {
......@@ -1602,6 +1603,7 @@ bool NewSpace::EnsureAllocation(int size_in_bytes,
void NewSpace::StartNextInlineAllocationStep() {
DCHECK(!inline_allocation_observers_paused_);
top_on_previous_step_ =
inline_allocation_observers_.length() ? allocation_info_.top() : 0;
UpdateInlineAllocationLimit(0);
......@@ -1636,6 +1638,22 @@ void NewSpace::RemoveInlineAllocationObserver(
}
void NewSpace::PauseInlineAllocationObservers() {
// Do a step to account for memory allocated so far.
InlineAllocationStep(top(), top());
inline_allocation_observers_paused_ = true;
top_on_previous_step_ = 0;
UpdateInlineAllocationLimit(0);
}
void NewSpace::ResumeInlineAllocationObservers() {
DCHECK(top_on_previous_step_ == 0);
inline_allocation_observers_paused_ = false;
StartNextInlineAllocationStep();
}
void NewSpace::InlineAllocationStep(Address top, Address new_top) {
if (top_on_previous_step_) {
int bytes_allocated = static_cast<int>(top - top_on_previous_step_);
......
......@@ -2559,7 +2559,8 @@ class NewSpace : public Space {
to_space_(heap, kToSpace),
from_space_(heap, kFromSpace),
reservation_(),
top_on_previous_step_(0) {}
top_on_previous_step_(0),
inline_allocation_observers_paused_(false) {}
// Sets up the new space using the given chunk.
bool SetUp(int reserved_semispace_size_, int max_semi_space_size);
......@@ -2732,7 +2733,6 @@ class NewSpace : public Space {
void ResetAllocationInfo();
void UpdateInlineAllocationLimit(int size_in_bytes);
void StartNextInlineAllocationStep();
// Allows observation of inline allocation. The observer->Step() method gets
// called after every step_size bytes have been allocated (approximately).
......@@ -2743,6 +2743,9 @@ class NewSpace : public Space {
// Removes a previously installed observer.
void RemoveInlineAllocationObserver(InlineAllocationObserver* observer);
void PauseInlineAllocationObservers();
void ResumeInlineAllocationObservers();
void DisableInlineAllocationSteps() {
top_on_previous_step_ = 0;
UpdateInlineAllocationLimit(0);
......@@ -2846,8 +2849,8 @@ class NewSpace : public Space {
// than the actual limit and and increasing it in steps to guarantee that the
// observers are notified periodically.
List<InlineAllocationObserver*> inline_allocation_observers_;
Address top_on_previous_step_;
bool inline_allocation_observers_paused_;
HistogramInfo* allocated_histogram_;
HistogramInfo* promoted_histogram_;
......@@ -2862,6 +2865,7 @@ class NewSpace : public Space {
// different when we cross a page boundary or reset the space.
void InlineAllocationStep(Address top, Address new_top);
intptr_t GetNextInlineAllocationStepSize();
void StartNextInlineAllocationStep();
friend class SemiSpaceIterator;
};
......
......@@ -872,10 +872,24 @@ UNINITIALIZED_TEST(InlineAllocationObserver) {
CHECK_EQ(observer1.count(), 20); // no more notifications.
CHECK_EQ(observer2.count(), 3); // this one is still active.
// Ensure that Pause/ResumeInlineAllocationObservers work correctly.
AllocateUnaligned(new_space, 48);
CHECK_EQ(observer2.count(), 3);
new_space->PauseInlineAllocationObservers();
CHECK_EQ(observer2.count(), 3);
AllocateUnaligned(new_space, 384);
CHECK_EQ(observer2.count(), 3);
new_space->ResumeInlineAllocationObservers();
CHECK_EQ(observer2.count(), 3);
// Coupled with the 48 bytes allocated before the pause, another 48 bytes
// allocated here should trigger a notification.
AllocateUnaligned(new_space, 48);
CHECK_EQ(observer2.count(), 4);
new_space->RemoveInlineAllocationObserver(&observer2);
AllocateUnaligned(new_space, 384);
CHECK_EQ(observer1.count(), 20);
CHECK_EQ(observer2.count(), 3);
CHECK_EQ(observer2.count(), 4);
}
isolate->Dispose();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment