Commit 55422bdd authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Use HashMap as scratchpad backing store

We use a scratchpad to remember visited allocation sites for post processing
(making tenure decisions). The previous implementation used a rooted FixedArray
with constant length (256) to remember all sites. Updating the scratchpad is a
bottleneck in any parallel/concurrent implementation of newspace evacuation.

The new implementation uses a HashMap with allocation sites as keys and
temporary counts as values. During evacuation we collect a local hashmap of
visited allocation sites. Upon merging the local hashmap back into a global one
we update potential forward pointers of compacted allocation sites.  The
scavenger can directly enter its entries into the global hashmap. Note that the
actual memento found count is still kept on the AllocationSite as it needs to
survive scavenges and full GCs.

BUG=chromium:524425
LOG=N
R=hpayer@chromium.org

Review URL: https://codereview.chromium.org/1535723002

Cr-Commit-Position: refs/heads/master@{#33233}
parent aacce200
...@@ -507,21 +507,39 @@ AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) { ...@@ -507,21 +507,39 @@ AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
} }
void Heap::UpdateAllocationSiteFeedback(HeapObject* object, void Heap::UpdateAllocationSite(HeapObject* object,
ScratchpadSlotMode mode) { HashMap* pretenuring_feedback) {
Heap* heap = object->GetHeap(); DCHECK(InFromSpace(object));
DCHECK(heap->InFromSpace(object));
if (!FLAG_allocation_site_pretenuring || if (!FLAG_allocation_site_pretenuring ||
!AllocationSite::CanTrack(object->map()->instance_type())) !AllocationSite::CanTrack(object->map()->instance_type()))
return; return;
AllocationMemento* memento = FindAllocationMemento(object);
if (memento == nullptr) return;
AllocationSite* key = memento->GetAllocationSite();
DCHECK(!key->IsZombie());
if (pretenuring_feedback == global_pretenuring_feedback_) {
// For inserting in the global pretenuring storage we need to first
// increment the memento found count on the allocation site.
if (key->IncrementMementoFoundCount()) {
global_pretenuring_feedback_->LookupOrInsert(
key, static_cast<uint32_t>(bit_cast<uintptr_t>(key)));
}
} else {
// Any other pretenuring storage than the global one is used as a cache,
// where the count is later on merge in the allocation site.
HashMap::Entry* e = pretenuring_feedback->LookupOrInsert(
key, static_cast<uint32_t>(bit_cast<uintptr_t>(key)));
DCHECK(e != nullptr);
(*bit_cast<intptr_t*>(&e->value))++;
}
}
AllocationMemento* memento = heap->FindAllocationMemento(object);
if (memento == NULL) return;
if (memento->GetAllocationSite()->IncrementMementoFoundCount()) { void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite* site) {
heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode); global_pretenuring_feedback_->Remove(
} site, static_cast<uint32_t>(bit_cast<uintptr_t>(site)));
} }
......
This diff is collapsed.
...@@ -176,7 +176,6 @@ namespace internal { ...@@ -176,7 +176,6 @@ namespace internal {
V(SeededNumberDictionary, empty_slow_element_dictionary, \ V(SeededNumberDictionary, empty_slow_element_dictionary, \
EmptySlowElementDictionary) \ EmptySlowElementDictionary) \
V(FixedArray, materialized_objects, MaterializedObjects) \ V(FixedArray, materialized_objects, MaterializedObjects) \
V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
V(FixedArray, microtask_queue, MicrotaskQueue) \ V(FixedArray, microtask_queue, MicrotaskQueue) \
V(TypeFeedbackVector, dummy_vector, DummyVector) \ V(TypeFeedbackVector, dummy_vector, DummyVector) \
V(FixedArray, cleared_optimized_code_map, ClearedOptimizedCodeMap) \ V(FixedArray, cleared_optimized_code_map, ClearedOptimizedCodeMap) \
...@@ -636,7 +635,7 @@ class Heap { ...@@ -636,7 +635,7 @@ class Heap {
// - or mutator code (CONCURRENT_TO_SWEEPER). // - or mutator code (CONCURRENT_TO_SWEEPER).
enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER }; enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT }; enum PretenuringFeedbackInsertionMode { kCached, kGlobal };
enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
...@@ -762,12 +761,6 @@ class Heap { ...@@ -762,12 +761,6 @@ class Heap {
// Checks whether the space is valid. // Checks whether the space is valid.
static bool IsValidAllocationSpace(AllocationSpace space); static bool IsValidAllocationSpace(AllocationSpace space);
// An object may have an AllocationSite associated with it through a trailing
// AllocationMemento. Its feedback should be updated when objects are found
// in the heap.
static inline void UpdateAllocationSiteFeedback(HeapObject* object,
ScratchpadSlotMode mode);
// Generated code can embed direct references to non-writable roots if // Generated code can embed direct references to non-writable roots if
// they are in new space. // they are in new space.
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index); static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
...@@ -1544,6 +1537,27 @@ class Heap { ...@@ -1544,6 +1537,27 @@ class Heap {
return array_buffer_tracker_; return array_buffer_tracker_;
} }
// ===========================================================================
// Allocation site tracking. =================================================
// ===========================================================================
// Updates the AllocationSite of a given {object}. If the global prenuring
// storage is passed as {pretenuring_feedback} the memento found count on
// the corresponding allocation site is immediately updated and an entry
// in the hash map is created. Otherwise the entry (including a the count
// value) is cached on the local pretenuring feedback.
inline void UpdateAllocationSite(HeapObject* object,
HashMap* pretenuring_feedback);
// Removes an entry from the global pretenuring storage.
inline void RemoveAllocationSitePretenuringFeedback(AllocationSite* site);
// Merges local pretenuring feedback into the global one. Note that this
// method needs to be called after evacuation, as allocation sites may be
// evacuated and this method resolves forward pointers accordingly.
void MergeAllocationSitePretenuringFeedback(
const HashMap& local_pretenuring_feedback);
// ============================================================================= // =============================================================================
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
...@@ -1567,6 +1581,7 @@ class Heap { ...@@ -1567,6 +1581,7 @@ class Heap {
#endif #endif
private: private:
class PretenuringScope;
class UnmapFreeMemoryTask; class UnmapFreeMemoryTask;
// External strings table is a place where all external strings are // External strings table is a place where all external strings are
...@@ -1661,7 +1676,7 @@ class Heap { ...@@ -1661,7 +1676,7 @@ class Heap {
static const int kMaxMarkCompactsInIdleRound = 7; static const int kMaxMarkCompactsInIdleRound = 7;
static const int kIdleScavengeThreshold = 5; static const int kIdleScavengeThreshold = 5;
static const int kAllocationSiteScratchpadSize = 256; static const int kInitialFeedbackCapacity = 256;
Heap(); Heap();
...@@ -1703,12 +1718,6 @@ class Heap { ...@@ -1703,12 +1718,6 @@ class Heap {
void PreprocessStackTraces(); void PreprocessStackTraces();
// Pretenuring decisions are made based on feedback collected during new
// space evacuation. Note that between feedback collection and calling this
// method object in old space must not move.
// Right now we only process pretenuring feedback in high promotion mode.
bool ProcessPretenuringFeedback();
// Checks whether a global GC is necessary // Checks whether a global GC is necessary
GarbageCollector SelectGarbageCollector(AllocationSpace space, GarbageCollector SelectGarbageCollector(AllocationSpace space,
const char** reason); const char** reason);
...@@ -1788,16 +1797,6 @@ class Heap { ...@@ -1788,16 +1797,6 @@ class Heap {
// Flush the number to string cache. // Flush the number to string cache.
void FlushNumberStringCache(); void FlushNumberStringCache();
// Sets used allocation sites entries to undefined.
void FlushAllocationSitesScratchpad();
// Initializes the allocation sites scratchpad with undefined values.
void InitializeAllocationSitesScratchpad();
// Adds an allocation site to the scratchpad if there is space left.
void AddAllocationSiteToScratchpad(AllocationSite* site,
ScratchpadSlotMode mode);
// TODO(hpayer): Allocation site pretenuring may make this method obsolete. // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
// Re-visit incremental marking heuristics. // Re-visit incremental marking heuristics.
bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; } bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
...@@ -1848,6 +1847,15 @@ class Heap { ...@@ -1848,6 +1847,15 @@ class Heap {
// memory reduction // memory reduction
HistogramTimer* GCTypeTimer(GarbageCollector collector); HistogramTimer* GCTypeTimer(GarbageCollector collector);
// ===========================================================================
// Pretenuring. ==============================================================
// ===========================================================================
// Pretenuring decisions are made based on feedback collected during new space
// evacuation. Note that between feedback collection and calling this method
// object in old space must not move.
void ProcessPretenuringFeedback();
// =========================================================================== // ===========================================================================
// Actual GC. ================================================================ // Actual GC. ================================================================
// =========================================================================== // ===========================================================================
...@@ -2143,6 +2151,8 @@ class Heap { ...@@ -2143,6 +2151,8 @@ class Heap {
MUST_USE_RESULT AllocationResult InternalizeString(String* str); MUST_USE_RESULT AllocationResult InternalizeString(String* str);
// ===========================================================================
void set_force_oom(bool value) { force_oom_ = value; } void set_force_oom(bool value) { force_oom_ = value; }
// The amount of external memory registered through the API kept alive // The amount of external memory registered through the API kept alive
...@@ -2352,7 +2362,12 @@ class Heap { ...@@ -2352,7 +2362,12 @@ class Heap {
// deoptimization triggered by garbage collection. // deoptimization triggered by garbage collection.
int gcs_since_last_deopt_; int gcs_since_last_deopt_;
int allocation_sites_scratchpad_length_; // The feedback storage is used to store allocation sites (keys) and how often
// they have been visited (values) by finding a memento behind an object. The
// storage is only alive temporary during a GC. The invariant is that all
// pointers in this map are already fixed, i.e., they do not point to
// forwarding pointers.
HashMap* global_pretenuring_feedback_;
char trace_ring_buffer_[kTraceRingBufferSize]; char trace_ring_buffer_[kTraceRingBufferSize];
// If it's not full then the data is from 0 to ring_buffer_end_. If it's // If it's not full then the data is from 0 to ring_buffer_end_. If it's
......
...@@ -1581,15 +1581,17 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final ...@@ -1581,15 +1581,17 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
static const intptr_t kMaxLabObjectSize = 256; static const intptr_t kMaxLabObjectSize = 256;
explicit EvacuateNewSpaceVisitor(Heap* heap, explicit EvacuateNewSpaceVisitor(Heap* heap,
SlotsBuffer** evacuation_slots_buffer) SlotsBuffer** evacuation_slots_buffer,
HashMap* local_pretenuring_feedback)
: EvacuateVisitorBase(heap, evacuation_slots_buffer), : EvacuateVisitorBase(heap, evacuation_slots_buffer),
buffer_(LocalAllocationBuffer::InvalidBuffer()), buffer_(LocalAllocationBuffer::InvalidBuffer()),
space_to_allocate_(NEW_SPACE), space_to_allocate_(NEW_SPACE),
promoted_size_(0), promoted_size_(0),
semispace_copied_size_(0) {} semispace_copied_size_(0),
local_pretenuring_feedback_(local_pretenuring_feedback) {}
bool Visit(HeapObject* object) override { bool Visit(HeapObject* object) override {
Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); heap_->UpdateAllocationSite(object, local_pretenuring_feedback_);
int size = object->Size(); int size = object->Size();
HeapObject* target_object = nullptr; HeapObject* target_object = nullptr;
if (heap_->ShouldBePromoted(object->address(), size) && if (heap_->ShouldBePromoted(object->address(), size) &&
...@@ -1716,6 +1718,7 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final ...@@ -1716,6 +1718,7 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
AllocationSpace space_to_allocate_; AllocationSpace space_to_allocate_;
intptr_t promoted_size_; intptr_t promoted_size_;
intptr_t semispace_copied_size_; intptr_t semispace_copied_size_;
HashMap* local_pretenuring_feedback_;
}; };
...@@ -3056,7 +3059,7 @@ void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot, ...@@ -3056,7 +3059,7 @@ void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
} }
void MarkCompactCollector::EvacuateNewSpace() { void MarkCompactCollector::EvacuateNewSpacePrologue() {
// There are soft limits in the allocation code, designed trigger a mark // There are soft limits in the allocation code, designed trigger a mark
// sweep collection by failing allocations. But since we are already in // sweep collection by failing allocations. But since we are already in
// a mark-sweep allocation, there is no sense in trying to trigger one. // a mark-sweep allocation, there is no sense in trying to trigger one.
...@@ -3073,14 +3076,26 @@ void MarkCompactCollector::EvacuateNewSpace() { ...@@ -3073,14 +3076,26 @@ void MarkCompactCollector::EvacuateNewSpace() {
new_space->Flip(); new_space->Flip();
new_space->ResetAllocationInfo(); new_space->ResetAllocationInfo();
newspace_evacuation_candidates_.Clear();
NewSpacePageIterator it(from_bottom, from_top);
while (it.has_next()) {
newspace_evacuation_candidates_.Add(it.next());
}
}
HashMap* MarkCompactCollector::EvacuateNewSpaceInParallel() {
HashMap* local_pretenuring_feedback = new HashMap(
HashMap::PointersMatch, kInitialLocalPretenuringFeedbackCapacity);
EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_,
local_pretenuring_feedback);
// First pass: traverse all objects in inactive semispace, remove marks, // First pass: traverse all objects in inactive semispace, remove marks,
// migrate live objects and write forwarding addresses. This stage puts // migrate live objects and write forwarding addresses. This stage puts
// new entries in the store buffer and may cause some pages to be marked // new entries in the store buffer and may cause some pages to be marked
// scan-on-scavenge. // scan-on-scavenge.
NewSpacePageIterator it(from_bottom, from_top); for (int i = 0; i < newspace_evacuation_candidates_.length(); i++) {
EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_); NewSpacePage* p =
while (it.has_next()) { reinterpret_cast<NewSpacePage*>(newspace_evacuation_candidates_[i]);
NewSpacePage* p = it.next();
bool ok = VisitLiveObjects(p, &new_space_visitor, kClearMarkbits); bool ok = VisitLiveObjects(p, &new_space_visitor, kClearMarkbits);
USE(ok); USE(ok);
DCHECK(ok); DCHECK(ok);
...@@ -3092,7 +3107,7 @@ void MarkCompactCollector::EvacuateNewSpace() { ...@@ -3092,7 +3107,7 @@ void MarkCompactCollector::EvacuateNewSpace() {
heap_->IncrementYoungSurvivorsCounter( heap_->IncrementYoungSurvivorsCounter(
static_cast<int>(new_space_visitor.promoted_size()) + static_cast<int>(new_space_visitor.promoted_size()) +
static_cast<int>(new_space_visitor.semispace_copied_size())); static_cast<int>(new_space_visitor.semispace_copied_size()));
new_space->set_age_mark(new_space->top()); return local_pretenuring_feedback;
} }
...@@ -3557,11 +3572,14 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { ...@@ -3557,11 +3572,14 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_EVACUATE); GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_EVACUATE);
Heap::RelocationLock relocation_lock(heap()); Heap::RelocationLock relocation_lock(heap());
HashMap* local_pretenuring_feedback = nullptr;
{ {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_EVACUATE_NEW_SPACE); GCTracer::Scope::MC_EVACUATE_NEW_SPACE);
EvacuationScope evacuation_scope(this); EvacuationScope evacuation_scope(this);
EvacuateNewSpace(); EvacuateNewSpacePrologue();
local_pretenuring_feedback = EvacuateNewSpaceInParallel();
heap_->new_space()->set_age_mark(heap_->new_space()->top());
} }
{ {
...@@ -3571,6 +3589,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { ...@@ -3571,6 +3589,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
EvacuatePagesInParallel(); EvacuatePagesInParallel();
} }
{
heap_->MergeAllocationSitePretenuringFeedback(*local_pretenuring_feedback);
delete local_pretenuring_feedback;
}
UpdatePointersAfterEvacuation(); UpdatePointersAfterEvacuation();
{ {
......
...@@ -511,6 +511,8 @@ class MarkCompactCollector { ...@@ -511,6 +511,8 @@ class MarkCompactCollector {
class HeapObjectVisitor; class HeapObjectVisitor;
class SweeperTask; class SweeperTask;
static const int kInitialLocalPretenuringFeedbackCapacity = 256;
explicit MarkCompactCollector(Heap* heap); explicit MarkCompactCollector(Heap* heap);
bool WillBeDeoptimized(Code* code); bool WillBeDeoptimized(Code* code);
...@@ -696,7 +698,10 @@ class MarkCompactCollector { ...@@ -696,7 +698,10 @@ class MarkCompactCollector {
// regions to each space's free list. // regions to each space's free list.
void SweepSpaces(); void SweepSpaces();
void EvacuateNewSpace(); void EvacuateNewSpacePrologue();
// Returns local pretenuring feedback.
HashMap* EvacuateNewSpaceInParallel();
void AddEvacuationSlotsBufferSynchronized( void AddEvacuationSlotsBufferSynchronized(
SlotsBuffer* evacuation_slots_buffer); SlotsBuffer* evacuation_slots_buffer);
...@@ -772,6 +777,8 @@ class MarkCompactCollector { ...@@ -772,6 +777,8 @@ class MarkCompactCollector {
List<Page*> evacuation_candidates_; List<Page*> evacuation_candidates_;
List<MemoryChunk*> newspace_evacuation_candidates_;
// The evacuation_slots_buffers_ are used by the compaction threads. // The evacuation_slots_buffers_ are used by the compaction threads.
// When a compaction task finishes, it uses // When a compaction task finishes, it uses
// AddEvacuationSlotsbufferSynchronized to adds its slots buffer to the // AddEvacuationSlotsbufferSynchronized to adds its slots buffer to the
......
...@@ -28,7 +28,8 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) { ...@@ -28,7 +28,8 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
return; return;
} }
Heap::UpdateAllocationSiteFeedback(object, Heap::IGNORE_SCRATCHPAD_SLOT); object->GetHeap()->UpdateAllocationSite(
object, object->GetHeap()->global_pretenuring_feedback_);
// AllocationMementos are unrooted and shouldn't survive a scavenge // AllocationMementos are unrooted and shouldn't survive a scavenge
DCHECK(object->map() != object->GetHeap()->allocation_memento_map()); DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
......
...@@ -3056,7 +3056,6 @@ HeapObject* LargeObjectIterator::Next() { ...@@ -3056,7 +3056,6 @@ HeapObject* LargeObjectIterator::Next() {
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// LargeObjectSpace // LargeObjectSpace
static bool ComparePointers(void* key1, void* key2) { return key1 == key2; }
LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id) LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
...@@ -3065,7 +3064,7 @@ LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id) ...@@ -3065,7 +3064,7 @@ LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
size_(0), size_(0),
page_count_(0), page_count_(0),
objects_size_(0), objects_size_(0),
chunk_map_(ComparePointers, 1024) {} chunk_map_(HashMap::PointersMatch, 1024) {}
LargeObjectSpace::~LargeObjectSpace() {} LargeObjectSpace::~LargeObjectSpace() {}
......
...@@ -1773,12 +1773,12 @@ void AllocationSite::set_memento_create_count(int count) { ...@@ -1773,12 +1773,12 @@ void AllocationSite::set_memento_create_count(int count) {
} }
inline bool AllocationSite::IncrementMementoFoundCount() { bool AllocationSite::IncrementMementoFoundCount(int increment) {
if (IsZombie()) return false; if (IsZombie()) return false;
int value = memento_found_count(); int value = memento_found_count();
set_memento_found_count(value + 1); set_memento_found_count(value + increment);
return memento_found_count() == kPretenureMinimumCreated; return memento_found_count() >= kPretenureMinimumCreated;
} }
...@@ -1832,11 +1832,12 @@ inline bool AllocationSite::DigestPretenuringFeedback( ...@@ -1832,11 +1832,12 @@ inline bool AllocationSite::DigestPretenuringFeedback(
} }
if (FLAG_trace_pretenuring_statistics) { if (FLAG_trace_pretenuring_statistics) {
PrintF( PrintIsolate(GetIsolate(),
"AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n", "pretenuring: AllocationSite(%p): (created, found, ratio) "
static_cast<void*>(this), create_count, found_count, ratio, "(%d, %d, %f) %s => %s\n",
PretenureDecisionName(current_decision), this, create_count, found_count, ratio,
PretenureDecisionName(pretenure_decision())); PretenureDecisionName(current_decision),
PretenureDecisionName(pretenure_decision()));
} }
// Clear feedback calculation fields until the next gc. // Clear feedback calculation fields until the next gc.
......
...@@ -8149,7 +8149,7 @@ class AllocationSite: public Struct { ...@@ -8149,7 +8149,7 @@ class AllocationSite: public Struct {
// Increments the mementos found counter and returns true when the first // Increments the mementos found counter and returns true when the first
// memento was found for a given allocation site. // memento was found for a given allocation site.
inline bool IncrementMementoFoundCount(); inline bool IncrementMementoFoundCount(int increment = 1);
inline void IncrementMementoCreateCount(); inline void IncrementMementoCreateCount();
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment