Commit 63317662 authored by Ali Ijaz Sheikh's avatar Ali Ijaz Sheikh Committed by Commit Bot

[heap] Add missing steps for Add/Remove observers (reland^2)

(Previously landed as #48513 / e770879e)
(Previously landed as #48606 / d0e3fb4f)

CQ_INCLUDE_TRYBOTS=master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel;master.tryserver.chromium.android:android_optional_gpu_tests_rel

Bug: 
Change-Id: I302fb52fb47b6c9d59dd2b28eb9923c37881c15b
Reviewed-on: https://chromium-review.googlesource.com/794857Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Ali Ijaz Sheikh <ofrobots@google.com>
Cr-Commit-Position: refs/heads/master@{#49763}
parent 6f026886
...@@ -6567,6 +6567,17 @@ void Heap::CreateObjectStats() { ...@@ -6567,6 +6567,17 @@ void Heap::CreateObjectStats() {
} }
} }
void AllocationObserver::AllocationStep(int bytes_allocated,
Address soon_object, size_t size) {
DCHECK_GE(bytes_allocated, 0);
bytes_to_next_step_ -= bytes_allocated;
if (bytes_to_next_step_ <= 0) {
Step(static_cast<int>(step_size_ - bytes_to_next_step_), soon_object, size);
step_size_ = GetNextStepSize();
bytes_to_next_step_ = step_size_;
}
}
namespace { namespace {
Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) { Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
......
...@@ -2800,15 +2800,7 @@ class AllocationObserver { ...@@ -2800,15 +2800,7 @@ class AllocationObserver {
// Called each time the observed space does an allocation step. This may be // Called each time the observed space does an allocation step. This may be
// more frequently than the step_size we are monitoring (e.g. when there are // more frequently than the step_size we are monitoring (e.g. when there are
// multiple observers, or when page or space boundary is encountered.) // multiple observers, or when page or space boundary is encountered.)
void AllocationStep(int bytes_allocated, Address soon_object, size_t size) { void AllocationStep(int bytes_allocated, Address soon_object, size_t size);
bytes_to_next_step_ -= bytes_allocated;
if (bytes_to_next_step_ <= 0) {
Step(static_cast<int>(step_size_ - bytes_to_next_step_), soon_object,
size);
step_size_ = GetNextStepSize();
bytes_to_next_step_ = step_size_;
}
}
protected: protected:
intptr_t step_size() const { return step_size_; } intptr_t step_size() const { return step_size_; }
......
...@@ -103,7 +103,7 @@ void ScavengeJob::ScheduleIdleTaskIfNeeded(Heap* heap, int bytes_allocated) { ...@@ -103,7 +103,7 @@ void ScavengeJob::ScheduleIdleTaskIfNeeded(Heap* heap, int bytes_allocated) {
void ScavengeJob::ScheduleIdleTask(Heap* heap) { void ScavengeJob::ScheduleIdleTask(Heap* heap) {
if (!idle_task_pending_) { if (!idle_task_pending_ && heap->use_tasks()) {
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate()); v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate());
if (V8::GetCurrentPlatform()->IdleTasksEnabled(isolate)) { if (V8::GetCurrentPlatform()->IdleTasksEnabled(isolate)) {
idle_task_pending_ = true; idle_task_pending_ = true;
......
...@@ -462,6 +462,12 @@ AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) { ...@@ -462,6 +462,12 @@ AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
AllocationResult NewSpace::AllocateRaw(int size_in_bytes, AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment) {
if (top() < top_on_previous_step_) {
// Generated code decreased the top() pointer to do folded allocations
DCHECK_EQ(Page::FromAddress(top()),
Page::FromAddress(top_on_previous_step_));
top_on_previous_step_ = top();
}
#ifdef V8_HOST_ARCH_32_BIT #ifdef V8_HOST_ARCH_32_BIT
return alignment == kDoubleAligned return alignment == kDoubleAligned
? AllocateRawAligned(size_in_bytes, kDoubleAligned) ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
......
...@@ -1718,12 +1718,7 @@ void PagedSpace::EmptyAllocationInfo() { ...@@ -1718,12 +1718,7 @@ void PagedSpace::EmptyAllocationInfo() {
} }
} }
if (top_on_previous_step_) { InlineAllocationStep(current_top, nullptr, nullptr, 0);
DCHECK(current_top >= top_on_previous_step_);
AllocationStep(static_cast<int>(current_top - top_on_previous_step_),
nullptr, 0);
top_on_previous_step_ = 0;
}
SetTopAndLimit(nullptr, nullptr); SetTopAndLimit(nullptr, nullptr);
DCHECK_GE(current_limit, current_top); DCHECK_GE(current_limit, current_top);
Free(current_top, current_limit - current_top); Free(current_top, current_limit - current_top);
...@@ -2189,6 +2184,30 @@ void NewSpace::StartNextInlineAllocationStep() { ...@@ -2189,6 +2184,30 @@ void NewSpace::StartNextInlineAllocationStep() {
} }
} }
// TODO(ofrobots): refactor into SpaceWithLinearArea
void NewSpace::AddAllocationObserver(AllocationObserver* observer) {
InlineAllocationStep(top(), top(), nullptr, 0);
Space::AddAllocationObserver(observer);
}
// TODO(ofrobots): refactor into SpaceWithLinearArea
void PagedSpace::AddAllocationObserver(AllocationObserver* observer) {
InlineAllocationStep(top(), top(), nullptr, 0);
Space::AddAllocationObserver(observer);
}
// TODO(ofrobots): refactor into SpaceWithLinearArea
void NewSpace::RemoveAllocationObserver(AllocationObserver* observer) {
InlineAllocationStep(top(), top(), nullptr, 0);
Space::RemoveAllocationObserver(observer);
}
// TODO(ofrobots): refactor into SpaceWithLinearArea
void PagedSpace::RemoveAllocationObserver(AllocationObserver* observer) {
InlineAllocationStep(top(), top(), nullptr, 0);
Space::RemoveAllocationObserver(observer);
}
void NewSpace::PauseAllocationObservers() { void NewSpace::PauseAllocationObservers() {
// Do a step to account for memory allocated so far. // Do a step to account for memory allocated so far.
InlineAllocationStep(top(), top(), nullptr, 0); InlineAllocationStep(top(), top(), nullptr, 0);
...@@ -2199,10 +2218,7 @@ void NewSpace::PauseAllocationObservers() { ...@@ -2199,10 +2218,7 @@ void NewSpace::PauseAllocationObservers() {
void PagedSpace::PauseAllocationObservers() { void PagedSpace::PauseAllocationObservers() {
// Do a step to account for memory allocated so far. // Do a step to account for memory allocated so far.
if (top_on_previous_step_) { InlineAllocationStep(top(), nullptr, nullptr, 0);
int bytes_allocated = static_cast<int>(top() - top_on_previous_step_);
AllocationStep(bytes_allocated, nullptr, 0);
}
Space::PauseAllocationObservers(); Space::PauseAllocationObservers();
top_on_previous_step_ = 0; top_on_previous_step_ = 0;
} }
...@@ -2220,13 +2236,35 @@ void PagedSpace::ResumeAllocationObservers() { ...@@ -2220,13 +2236,35 @@ void PagedSpace::ResumeAllocationObservers() {
StartNextInlineAllocationStep(); StartNextInlineAllocationStep();
} }
// TODO(ofrobots): refactor into SpaceWithLinearArea
void PagedSpace::InlineAllocationStep(Address top, Address new_top,
Address soon_object, size_t size) {
if (top_on_previous_step_) {
if (top < top_on_previous_step_) {
// Generated code decreased the top pointer to do folded allocations.
DCHECK_NOT_NULL(top);
DCHECK_EQ(Page::FromAllocationAreaAddress(top),
Page::FromAllocationAreaAddress(top_on_previous_step_));
top_on_previous_step_ = top;
}
int bytes_allocated = static_cast<int>(top - top_on_previous_step_);
AllocationStep(bytes_allocated, soon_object, static_cast<int>(size));
top_on_previous_step_ = new_top;
}
}
void NewSpace::InlineAllocationStep(Address top, Address new_top, void NewSpace::InlineAllocationStep(Address top, Address new_top,
Address soon_object, size_t size) { Address soon_object, size_t size) {
if (top_on_previous_step_) { if (top_on_previous_step_) {
int bytes_allocated = static_cast<int>(top - top_on_previous_step_); if (top < top_on_previous_step_) {
for (AllocationObserver* observer : allocation_observers_) { // Generated code decreased the top pointer to do folded allocations.
observer->AllocationStep(bytes_allocated, soon_object, size); DCHECK_NOT_NULL(top);
DCHECK_EQ(Page::FromAllocationAreaAddress(top),
Page::FromAllocationAreaAddress(top_on_previous_step_));
top_on_previous_step_ = top;
} }
int bytes_allocated = static_cast<int>(top - top_on_previous_step_);
AllocationStep(bytes_allocated, soon_object, static_cast<int>(size));
top_on_previous_step_ = new_top; top_on_previous_step_ = new_top;
} }
} }
......
...@@ -933,9 +933,11 @@ class Space : public Malloced { ...@@ -933,9 +933,11 @@ class Space : public Malloced {
// Identity used in error reporting. // Identity used in error reporting.
AllocationSpace identity() { return id_; } AllocationSpace identity() { return id_; }
void AddAllocationObserver(AllocationObserver* observer); V8_EXPORT_PRIVATE virtual void AddAllocationObserver(
AllocationObserver* observer);
void RemoveAllocationObserver(AllocationObserver* observer); V8_EXPORT_PRIVATE virtual void RemoveAllocationObserver(
AllocationObserver* observer);
V8_EXPORT_PRIVATE virtual void PauseAllocationObservers(); V8_EXPORT_PRIVATE virtual void PauseAllocationObservers();
...@@ -2108,9 +2110,14 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) { ...@@ -2108,9 +2110,14 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
void ResetFreeList() { free_list_.Reset(); } void ResetFreeList() { free_list_.Reset(); }
void AddAllocationObserver(AllocationObserver* observer) override;
void RemoveAllocationObserver(AllocationObserver* observer) override;
void PauseAllocationObservers() override; void PauseAllocationObservers() override;
void ResumeAllocationObservers() override; void ResumeAllocationObservers() override;
void InlineAllocationStep(Address top, Address new_top, Address soon_object,
size_t size);
// Empty space allocation info, returning unused area to free list. // Empty space allocation info, returning unused area to free list.
void EmptyAllocationInfo(); void EmptyAllocationInfo();
...@@ -2757,6 +2764,8 @@ class NewSpace : public Space { ...@@ -2757,6 +2764,8 @@ class NewSpace : public Space {
SemiSpace* active_space() { return &to_space_; } SemiSpace* active_space() { return &to_space_; }
void AddAllocationObserver(AllocationObserver* observer) override;
void RemoveAllocationObserver(AllocationObserver* observer) override;
void PauseAllocationObservers() override; void PauseAllocationObservers() override;
void ResumeAllocationObservers() override; void ResumeAllocationObservers() override;
......
...@@ -3138,3 +3138,28 @@ TEST(SamplingHeapProfilerPretenuredInlineAllocations) { ...@@ -3138,3 +3138,28 @@ TEST(SamplingHeapProfilerPretenuredInlineAllocations) {
CHECK_GE(count, 8000); CHECK_GE(count, 8000);
} }
TEST(SamplingHeapProfilerLargeInterval) {
v8::HandleScope scope(v8::Isolate::GetCurrent());
LocalContext env;
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests.
v8::internal::FLAG_sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(512 * 1024);
for (int i = 0; i < 8 * 1024; ++i) {
CcTest::i_isolate()->factory()->NewFixedArray(1024);
}
std::unique_ptr<v8::AllocationProfile> profile(
heap_profiler->GetAllocationProfile());
CHECK(profile);
const char* names[] = {"(EXTERNAL)"};
auto node = FindAllocationProfileNode(env->GetIsolate(), *profile,
ArrayVector(names));
CHECK(node);
heap_profiler->StopSamplingHeapProfiler();
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment