Commit 7b704c4f authored by ofrobots's avatar ofrobots Committed by Commit bot

[heap] inline allocation steps refactor

Expose the steps for incremental marking and idle scavenge more directly in
NewSpace. Adjust the NewSpace and Heap interfaces to allow callers to be more
clear about how they are interacting with inline allocation steps. This refactor
prepares the ground for more consumers of inline allocation steps (e.g. sampling
heap profiler.)

R=hpayer@chromium.org
BUG=

Review URL: https://codereview.chromium.org/1404523002

Cr-Commit-Position: refs/heads/master@{#31814}
parent ac75fe1a
......@@ -52,6 +52,19 @@ struct Heap::StrongRootsList {
StrongRootsList* next;
};
class IdleScavengeObserver : public InlineAllocationObserver {
public:
IdleScavengeObserver(Heap& heap, intptr_t step_size)
: InlineAllocationObserver(step_size), heap_(heap) {}
virtual void Step(int bytes_allocated) {
heap_.ScheduleIdleScavengeIfNeeded(bytes_allocated);
}
private:
Heap& heap_;
};
Heap::Heap()
: amount_of_external_allocated_memory_(0),
......@@ -129,6 +142,7 @@ Heap::Heap()
memory_reducer_(nullptr),
object_stats_(nullptr),
scavenge_job_(nullptr),
idle_scavenge_observer_(nullptr),
full_codegen_bytes_generated_(0),
crankshaft_codegen_bytes_generated_(0),
new_space_allocation_counter_(0),
......@@ -1719,8 +1733,9 @@ void Heap::Scavenge() {
// Set age mark.
new_space_.set_age_mark(new_space_.top());
new_space_.LowerInlineAllocationLimit(
new_space_.inline_allocation_limit_step());
// We start a new step without accounting the objects copied into to space
// as those are not allocations.
new_space_.UpdateInlineAllocationLimitStep();
array_buffer_tracker()->FreeDead(true);
......@@ -5022,17 +5037,6 @@ void Heap::DisableInlineAllocation() {
}
void Heap::LowerInlineAllocationLimit(intptr_t step) {
new_space()->LowerInlineAllocationLimit(step);
}
void Heap::ResetInlineAllocationLimit() {
new_space()->LowerInlineAllocationLimit(
ScavengeJob::kBytesAllocatedBeforeNextIdleTask);
}
V8_DECLARE_ONCE(initialize_gc_once);
static void InitializeGCOnce() {
......@@ -5141,7 +5145,9 @@ bool Heap::SetUp() {
mark_compact_collector()->SetUp();
ResetInlineAllocationLimit();
idle_scavenge_observer_ = new IdleScavengeObserver(
*this, ScavengeJob::kBytesAllocatedBeforeNextIdleTask);
new_space()->AddInlineAllocationObserver(idle_scavenge_observer_);
return true;
}
......@@ -5240,6 +5246,10 @@ void Heap::TearDown() {
PrintAlloctionsHash();
}
new_space()->RemoveInlineAllocationObserver(idle_scavenge_observer_);
delete idle_scavenge_observer_;
idle_scavenge_observer_ = nullptr;
delete scavenge_collector_;
scavenge_collector_ = nullptr;
......
......@@ -1895,13 +1895,6 @@ class Heap {
void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
double mutator_speed);
// ===========================================================================
// Inline allocation. ========================================================
// ===========================================================================
void LowerInlineAllocationLimit(intptr_t step);
void ResetInlineAllocationLimit();
// ===========================================================================
// Idle notification. ========================================================
// ===========================================================================
......@@ -2289,6 +2282,8 @@ class Heap {
ScavengeJob* scavenge_job_;
InlineAllocationObserver* idle_scavenge_observer_;
// These two counters are monotomically increasing and never reset.
size_t full_codegen_bytes_generated_;
size_t crankshaft_codegen_bytes_generated_;
......@@ -2359,6 +2354,7 @@ class Heap {
friend class GCCallbacksScope;
friend class GCTracer;
friend class HeapIterator;
friend class IdleScavengeObserver;
friend class IncrementalMarking;
friend class MarkCompactCollector;
friend class MarkCompactMarkingVisitor;
......
......@@ -17,7 +17,6 @@
namespace v8 {
namespace internal {
IncrementalMarking::StepActions IncrementalMarking::IdleStepActions() {
return StepActions(IncrementalMarking::NO_GC_VIA_STACK_GUARD,
IncrementalMarking::FORCE_MARKING,
......@@ -27,6 +26,7 @@ IncrementalMarking::StepActions IncrementalMarking::IdleStepActions() {
IncrementalMarking::IncrementalMarking(Heap* heap)
: heap_(heap),
observer_(*this, kAllocatedThreshold),
state_(STOPPED),
is_compacting_(false),
steps_count_(0),
......@@ -568,7 +568,8 @@ void IncrementalMarking::Start(const char* reason) {
state_ = SWEEPING;
}
heap_->LowerInlineAllocationLimit(kAllocatedThreshold);
heap_->new_space()->AddInlineAllocationObserver(&observer_);
incremental_marking_job()->Start(heap_);
}
......@@ -843,7 +844,8 @@ void IncrementalMarking::Stop() {
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Stopping.\n");
}
heap_->ResetInlineAllocationLimit();
heap_->new_space()->RemoveInlineAllocationObserver(&observer_);
IncrementalMarking::set_should_hurry(false);
ResetStepCounters();
if (IsMarking()) {
......@@ -871,7 +873,8 @@ void IncrementalMarking::Finalize() {
Hurry();
state_ = STOPPED;
is_compacting_ = false;
heap_->ResetInlineAllocationLimit();
heap_->new_space()->RemoveInlineAllocationObserver(&observer_);
IncrementalMarking::set_should_hurry(false);
ResetStepCounters();
PatchIncrementalMarkingRecordWriteStubs(heap_,
......
......@@ -8,6 +8,7 @@
#include "src/cancelable-task.h"
#include "src/execution.h"
#include "src/heap/incremental-marking-job.h"
#include "src/heap/spaces.h"
#include "src/objects.h"
namespace v8 {
......@@ -214,6 +215,21 @@ class IncrementalMarking {
}
private:
class Observer : public InlineAllocationObserver {
public:
Observer(IncrementalMarking& incremental_marking, intptr_t step_size)
: InlineAllocationObserver(step_size),
incremental_marking_(incremental_marking) {}
virtual void Step(int bytes_allocated) {
incremental_marking_.Step(bytes_allocated,
IncrementalMarking::GC_VIA_STACK_GUARD);
}
private:
IncrementalMarking& incremental_marking_;
};
int64_t SpaceLeftInOldSpace();
void SpeedUp();
......@@ -248,6 +264,8 @@ class IncrementalMarking {
Heap* heap_;
Observer observer_;
State state_;
bool is_compacting_;
......
......@@ -1509,7 +1509,7 @@ void NewSpace::UpdateInlineAllocationLimit(int size_in_bytes) {
Address high = to_space_.page_high();
Address new_top = allocation_info_.top() + size_in_bytes;
allocation_info_.set_limit(Min(new_top, high));
} else if (inline_allocation_limit_step_ == 0) {
} else if (top_on_previous_step_ == 0) {
// Normal limit is the end of the current page.
allocation_info_.set_limit(to_space_.page_high());
} else {
......@@ -1601,12 +1601,40 @@ bool NewSpace::EnsureAllocation(int size_in_bytes,
}
void NewSpace::UpdateInlineAllocationLimitStep() {
intptr_t step = 0;
for (int i = 0; i < inline_allocation_observers_.length(); ++i) {
InlineAllocationObserver* observer = inline_allocation_observers_[i];
step = step ? Min(step, observer->step_size()) : observer->step_size();
}
inline_allocation_limit_step_ = step;
top_on_previous_step_ = step ? allocation_info_.top() : 0;
UpdateInlineAllocationLimit(0);
}
void NewSpace::AddInlineAllocationObserver(InlineAllocationObserver* observer) {
inline_allocation_observers_.Add(observer);
UpdateInlineAllocationLimitStep();
}
void NewSpace::RemoveInlineAllocationObserver(
InlineAllocationObserver* observer) {
bool removed = inline_allocation_observers_.RemoveElement(observer);
// Only used in assertion. Suppress unused variable warning.
static_cast<void>(removed);
DCHECK(removed);
UpdateInlineAllocationLimitStep();
}
void NewSpace::InlineAllocationStep(Address top, Address new_top) {
if (top_on_previous_step_) {
int bytes_allocated = static_cast<int>(top - top_on_previous_step_);
heap()->ScheduleIdleScavengeIfNeeded(bytes_allocated);
heap()->incremental_marking()->Step(bytes_allocated,
IncrementalMarking::GC_VIA_STACK_GUARD);
for (int i = 0; i < inline_allocation_observers_.length(); ++i) {
inline_allocation_observers_[i]->InlineAllocationStep(bytes_allocated);
}
top_on_previous_step_ = new_top;
}
}
......
......@@ -2509,6 +2509,43 @@ class NewSpacePageIterator BASE_EMBEDDED {
NewSpacePage* last_page_;
};
// -----------------------------------------------------------------------------
// Allows observation of inline allocation in the new space.
class InlineAllocationObserver {
public:
explicit InlineAllocationObserver(intptr_t step_size)
: step_size_(step_size), bytes_to_next_step_(step_size) {
DCHECK(step_size >= kPointerSize && (step_size & kHeapObjectTagMask) == 0);
}
virtual ~InlineAllocationObserver() {}
private:
intptr_t step_size() const { return step_size_; }
// Pure virtual method provided by the subclasses that gets called when more
// than step_size byte have been allocated.
virtual void Step(int bytes_allocated) = 0;
// Called each time the new space does an inline allocation step. This may be
// more frequently than the step_size we are monitoring (e.g. when there are
// multiple observers, or when page or space boundary is encountered.) The
// Step method is only called once more than step_size bytes have been
// allocated.
void InlineAllocationStep(int bytes_allocated) {
bytes_to_next_step_ -= bytes_allocated;
if (bytes_to_next_step_ <= 0) {
Step(static_cast<int>(step_size_ - bytes_to_next_step_));
bytes_to_next_step_ = step_size_;
}
}
intptr_t step_size_;
intptr_t bytes_to_next_step_;
friend class NewSpace;
DISALLOW_COPY_AND_ASSIGN(InlineAllocationObserver);
};
// -----------------------------------------------------------------------------
// The young generation space.
......@@ -2698,10 +2735,21 @@ class NewSpace : public Space {
void ResetAllocationInfo();
void UpdateInlineAllocationLimit(int size_in_bytes);
void LowerInlineAllocationLimit(intptr_t step) {
inline_allocation_limit_step_ = step;
void UpdateInlineAllocationLimitStep();
// Allows observation of inline allocation. The observer->Step() method gets
// called after every step_size bytes have been allocated (approximately).
// This works by adjusting the allocation limit to a lower value and adjusting
// it after each step.
void AddInlineAllocationObserver(InlineAllocationObserver* observer);
// Removes a previously installed observer.
void RemoveInlineAllocationObserver(InlineAllocationObserver* observer);
void DisableInlineAllocationSteps() {
inline_allocation_limit_step_ = 0;
top_on_previous_step_ = 0;
UpdateInlineAllocationLimit(0);
top_on_previous_step_ = step ? allocation_info_.top() : 0;
}
// Get the extent of the inactive semispace (for use as a marking stack,
......@@ -2771,10 +2819,6 @@ class NewSpace : public Space {
bool IsFromSpaceCommitted() { return from_space_.is_committed(); }
inline intptr_t inline_allocation_limit_step() {
return inline_allocation_limit_step_;
}
SemiSpace* active_space() { return &to_space_; }
private:
......@@ -2800,11 +2844,13 @@ class NewSpace : public Space {
// mark-compact collection.
AllocationInfo allocation_info_;
// When incremental marking is active we will set allocation_info_.limit
// to be lower than actual limit and then will gradually increase it
// in steps to guarantee that we do incremental marking steps even
// when all allocation is performed from inlined generated code.
// When inline allocation stepping is active, either because of incremental
// marking or because of idle scavenge, we 'interrupt' inline allocation every
// once in a while. This is done by setting allocation_info_.limit to be lower
// than the actual limit and and increasing it in steps to guarantee that the
// observers are notified periodically.
intptr_t inline_allocation_limit_step_;
List<InlineAllocationObserver*> inline_allocation_observers_;
Address top_on_previous_step_;
......@@ -2814,8 +2860,7 @@ class NewSpace : public Space {
bool EnsureAllocation(int size_in_bytes, AllocationAlignment alignment);
// If we are doing inline allocation in steps, this method performs the 'step'
// operation. Right now incremental marking is the only consumer of inline
// allocation steps. top is the memory address of the bump pointer at the last
// operation. top is the memory address of the bump pointer at the last
// inline allocation (i.e. it determines the numbers of bytes actually
// allocated since the last step.) new_top is the address of the bump pointer
// where the next byte is going to be allocated from. top and new_top may be
......
......@@ -525,11 +525,6 @@ static inline void ExpectUndefined(const char* code) {
}
static inline void DisableInlineAllocationSteps(v8::internal::NewSpace* space) {
space->LowerInlineAllocationLimit(0);
}
static inline void CheckDoubleEquals(double expected, double actual) {
const double kEpsilon = 1e-10;
CHECK_LE(expected, actual + kEpsilon);
......@@ -557,7 +552,7 @@ static inline void CreatePadding(i::Heap* heap, int padding_size,
*heap->old_space()->allocation_top_address());
CHECK(padding_size <= current_free_memory || current_free_memory == 0);
} else {
DisableInlineAllocationSteps(heap->new_space());
heap->new_space()->DisableInlineAllocationSteps();
int current_free_memory =
static_cast<int>(*heap->new_space()->allocation_limit_address() -
*heap->new_space()->allocation_top_address());
......@@ -587,7 +582,7 @@ static inline void CreatePadding(i::Heap* heap, int padding_size,
// Helper function that simulates a full new-space in the heap.
static inline bool FillUpOnePage(v8::internal::NewSpace* space) {
DisableInlineAllocationSteps(space);
space->DisableInlineAllocationSteps();
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
*space->allocation_top_address());
if (space_remaining == 0) return false;
......@@ -599,7 +594,7 @@ static inline bool FillUpOnePage(v8::internal::NewSpace* space) {
// Helper function that simulates a fill new-space in the heap.
static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
int extra_bytes) {
DisableInlineAllocationSteps(space);
space->DisableInlineAllocationSteps();
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
*space->allocation_top_address());
CHECK(space_remaining >= extra_bytes);
......
......@@ -792,3 +792,99 @@ UNINITIALIZED_TEST(NewSpaceGrowsToTargetCapacity) {
}
isolate->Dispose();
}
static HeapObject* AllocateUnaligned(NewSpace* space, int size) {
AllocationResult allocation = space->AllocateRawUnaligned(size);
CHECK(!allocation.IsRetry());
HeapObject* filler = NULL;
CHECK(allocation.To(&filler));
space->heap()->CreateFillerObjectAt(filler->address(), size);
return filler;
}
class Observer : public InlineAllocationObserver {
public:
explicit Observer(intptr_t step_size)
: InlineAllocationObserver(step_size), count_(0) {}
virtual void Step(int bytes_allocated) { count_++; }
int count() const { return count_; }
private:
int count_;
};
UNINITIALIZED_TEST(InlineAllocationObserver) {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
v8::Context::New(isolate)->Enter();
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
NewSpace* new_space = i_isolate->heap()->new_space();
Observer observer1(128);
new_space->AddInlineAllocationObserver(&observer1);
// The observer should not get notified if we have only allocated less than
// 128 bytes.
AllocateUnaligned(new_space, 64);
CHECK_EQ(observer1.count(), 0);
// The observer should not get called even when we have allocated exactly
// 128 bytes.
AllocateUnaligned(new_space, 64);
CHECK_EQ(observer1.count(), 0);
// The next allocation gets the notification.
AllocateUnaligned(new_space, 8);
CHECK_EQ(observer1.count(), 1);
// Another >128 bytes should get another notification.
AllocateUnaligned(new_space, 136);
CHECK_EQ(observer1.count(), 2);
// Allocating a large object should get only one notification.
AllocateUnaligned(new_space, 1024);
CHECK_EQ(observer1.count(), 3);
// Allocating another 2048 bytes in small objects should get 12
// notifications.
for (int i = 0; i < 64; ++i) {
AllocateUnaligned(new_space, 32);
}
CHECK_EQ(observer1.count(), 15);
// Multiple observers should work.
Observer observer2(96);
new_space->AddInlineAllocationObserver(&observer2);
AllocateUnaligned(new_space, 2048);
CHECK_EQ(observer1.count(), 16);
CHECK_EQ(observer2.count(), 1);
AllocateUnaligned(new_space, 104);
CHECK_EQ(observer1.count(), 16);
CHECK_EQ(observer2.count(), 2);
// Callback should stop getting called after an observer is removed.
new_space->RemoveInlineAllocationObserver(&observer1);
AllocateUnaligned(new_space, 384);
CHECK_EQ(observer1.count(), 16); // no more notifications.
CHECK_EQ(observer2.count(), 3); // this one is still active.
new_space->RemoveInlineAllocationObserver(&observer2);
AllocateUnaligned(new_space, 384);
CHECK_EQ(observer1.count(), 16);
CHECK_EQ(observer2.count(), 3);
}
isolate->Dispose();
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment