Commit 55422bdd authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Use HashMap as scratchpad backing store

We use a scratchpad to remember visited allocation sites for post processing
(making tenure decisions). The previous implementation used a rooted FixedArray
with constant length (256) to remember all sites. Updating the scratchpad is a
bottleneck in any parallel/concurrent implementation of newspace evacuation.

The new implementation uses a HashMap with allocation sites as keys and
temporary counts as values. During evacuation we collect a local hashmap of
visited allocation sites. Upon merging the local hashmap back into a global one
we update potential forward pointers of compacted allocation sites.  The
scavenger can directly enter its entries into the global hashmap. Note that the
actual memento found count is still kept on the AllocationSite as it needs to
survive scavenges and full GCs.

BUG=chromium:524425
LOG=N
R=hpayer@chromium.org

Review URL: https://codereview.chromium.org/1535723002

Cr-Commit-Position: refs/heads/master@{#33233}
parent aacce200
......@@ -507,21 +507,39 @@ AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
}
void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
ScratchpadSlotMode mode) {
Heap* heap = object->GetHeap();
DCHECK(heap->InFromSpace(object));
void Heap::UpdateAllocationSite(HeapObject* object,
HashMap* pretenuring_feedback) {
DCHECK(InFromSpace(object));
if (!FLAG_allocation_site_pretenuring ||
!AllocationSite::CanTrack(object->map()->instance_type()))
return;
AllocationMemento* memento = FindAllocationMemento(object);
if (memento == nullptr) return;
AllocationSite* key = memento->GetAllocationSite();
DCHECK(!key->IsZombie());
if (pretenuring_feedback == global_pretenuring_feedback_) {
// For inserting in the global pretenuring storage we need to first
// increment the memento found count on the allocation site.
if (key->IncrementMementoFoundCount()) {
global_pretenuring_feedback_->LookupOrInsert(
key, static_cast<uint32_t>(bit_cast<uintptr_t>(key)));
}
} else {
// Any other pretenuring storage than the global one is used as a cache,
// where the count is later on merge in the allocation site.
HashMap::Entry* e = pretenuring_feedback->LookupOrInsert(
key, static_cast<uint32_t>(bit_cast<uintptr_t>(key)));
DCHECK(e != nullptr);
(*bit_cast<intptr_t*>(&e->value))++;
}
}
AllocationMemento* memento = heap->FindAllocationMemento(object);
if (memento == NULL) return;
if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
}
void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite* site) {
global_pretenuring_feedback_->Remove(
site, static_cast<uint32_t>(bit_cast<uintptr_t>(site)));
}
......
This diff is collapsed.
......@@ -176,7 +176,6 @@ namespace internal {
V(SeededNumberDictionary, empty_slow_element_dictionary, \
EmptySlowElementDictionary) \
V(FixedArray, materialized_objects, MaterializedObjects) \
V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
V(FixedArray, microtask_queue, MicrotaskQueue) \
V(TypeFeedbackVector, dummy_vector, DummyVector) \
V(FixedArray, cleared_optimized_code_map, ClearedOptimizedCodeMap) \
......@@ -636,7 +635,7 @@ class Heap {
// - or mutator code (CONCURRENT_TO_SWEEPER).
enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT };
enum PretenuringFeedbackInsertionMode { kCached, kGlobal };
enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
......@@ -762,12 +761,6 @@ class Heap {
// Checks whether the space is valid.
static bool IsValidAllocationSpace(AllocationSpace space);
// An object may have an AllocationSite associated with it through a trailing
// AllocationMemento. Its feedback should be updated when objects are found
// in the heap.
static inline void UpdateAllocationSiteFeedback(HeapObject* object,
ScratchpadSlotMode mode);
// Generated code can embed direct references to non-writable roots if
// they are in new space.
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
......@@ -1544,6 +1537,27 @@ class Heap {
return array_buffer_tracker_;
}
// ===========================================================================
// Allocation site tracking. =================================================
// ===========================================================================
// Updates the AllocationSite of a given {object}. If the global prenuring
// storage is passed as {pretenuring_feedback} the memento found count on
// the corresponding allocation site is immediately updated and an entry
// in the hash map is created. Otherwise the entry (including a the count
// value) is cached on the local pretenuring feedback.
inline void UpdateAllocationSite(HeapObject* object,
HashMap* pretenuring_feedback);
// Removes an entry from the global pretenuring storage.
inline void RemoveAllocationSitePretenuringFeedback(AllocationSite* site);
// Merges local pretenuring feedback into the global one. Note that this
// method needs to be called after evacuation, as allocation sites may be
// evacuated and this method resolves forward pointers accordingly.
void MergeAllocationSitePretenuringFeedback(
const HashMap& local_pretenuring_feedback);
// =============================================================================
#ifdef VERIFY_HEAP
......@@ -1567,6 +1581,7 @@ class Heap {
#endif
private:
class PretenuringScope;
class UnmapFreeMemoryTask;
// External strings table is a place where all external strings are
......@@ -1661,7 +1676,7 @@ class Heap {
static const int kMaxMarkCompactsInIdleRound = 7;
static const int kIdleScavengeThreshold = 5;
static const int kAllocationSiteScratchpadSize = 256;
static const int kInitialFeedbackCapacity = 256;
Heap();
......@@ -1703,12 +1718,6 @@ class Heap {
void PreprocessStackTraces();
// Pretenuring decisions are made based on feedback collected during new
// space evacuation. Note that between feedback collection and calling this
// method object in old space must not move.
// Right now we only process pretenuring feedback in high promotion mode.
bool ProcessPretenuringFeedback();
// Checks whether a global GC is necessary
GarbageCollector SelectGarbageCollector(AllocationSpace space,
const char** reason);
......@@ -1788,16 +1797,6 @@ class Heap {
// Flush the number to string cache.
void FlushNumberStringCache();
// Sets used allocation sites entries to undefined.
void FlushAllocationSitesScratchpad();
// Initializes the allocation sites scratchpad with undefined values.
void InitializeAllocationSitesScratchpad();
// Adds an allocation site to the scratchpad if there is space left.
void AddAllocationSiteToScratchpad(AllocationSite* site,
ScratchpadSlotMode mode);
// TODO(hpayer): Allocation site pretenuring may make this method obsolete.
// Re-visit incremental marking heuristics.
bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
......@@ -1848,6 +1847,15 @@ class Heap {
// memory reduction
HistogramTimer* GCTypeTimer(GarbageCollector collector);
// ===========================================================================
// Pretenuring. ==============================================================
// ===========================================================================
// Pretenuring decisions are made based on feedback collected during new space
// evacuation. Note that between feedback collection and calling this method
// object in old space must not move.
void ProcessPretenuringFeedback();
// ===========================================================================
// Actual GC. ================================================================
// ===========================================================================
......@@ -2143,6 +2151,8 @@ class Heap {
MUST_USE_RESULT AllocationResult InternalizeString(String* str);
// ===========================================================================
void set_force_oom(bool value) { force_oom_ = value; }
// The amount of external memory registered through the API kept alive
......@@ -2352,7 +2362,12 @@ class Heap {
// deoptimization triggered by garbage collection.
int gcs_since_last_deopt_;
int allocation_sites_scratchpad_length_;
// The feedback storage is used to store allocation sites (keys) and how often
// they have been visited (values) by finding a memento behind an object. The
// storage is only alive temporary during a GC. The invariant is that all
// pointers in this map are already fixed, i.e., they do not point to
// forwarding pointers.
HashMap* global_pretenuring_feedback_;
char trace_ring_buffer_[kTraceRingBufferSize];
// If it's not full then the data is from 0 to ring_buffer_end_. If it's
......
......@@ -1581,15 +1581,17 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
static const intptr_t kMaxLabObjectSize = 256;
explicit EvacuateNewSpaceVisitor(Heap* heap,
SlotsBuffer** evacuation_slots_buffer)
SlotsBuffer** evacuation_slots_buffer,
HashMap* local_pretenuring_feedback)
: EvacuateVisitorBase(heap, evacuation_slots_buffer),
buffer_(LocalAllocationBuffer::InvalidBuffer()),
space_to_allocate_(NEW_SPACE),
promoted_size_(0),
semispace_copied_size_(0) {}
semispace_copied_size_(0),
local_pretenuring_feedback_(local_pretenuring_feedback) {}
bool Visit(HeapObject* object) override {
Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT);
heap_->UpdateAllocationSite(object, local_pretenuring_feedback_);
int size = object->Size();
HeapObject* target_object = nullptr;
if (heap_->ShouldBePromoted(object->address(), size) &&
......@@ -1716,6 +1718,7 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
AllocationSpace space_to_allocate_;
intptr_t promoted_size_;
intptr_t semispace_copied_size_;
HashMap* local_pretenuring_feedback_;
};
......@@ -3056,7 +3059,7 @@ void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
}
void MarkCompactCollector::EvacuateNewSpace() {
void MarkCompactCollector::EvacuateNewSpacePrologue() {
// There are soft limits in the allocation code, designed trigger a mark
// sweep collection by failing allocations. But since we are already in
// a mark-sweep allocation, there is no sense in trying to trigger one.
......@@ -3073,14 +3076,26 @@ void MarkCompactCollector::EvacuateNewSpace() {
new_space->Flip();
new_space->ResetAllocationInfo();
newspace_evacuation_candidates_.Clear();
NewSpacePageIterator it(from_bottom, from_top);
while (it.has_next()) {
newspace_evacuation_candidates_.Add(it.next());
}
}
HashMap* MarkCompactCollector::EvacuateNewSpaceInParallel() {
HashMap* local_pretenuring_feedback = new HashMap(
HashMap::PointersMatch, kInitialLocalPretenuringFeedbackCapacity);
EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_,
local_pretenuring_feedback);
// First pass: traverse all objects in inactive semispace, remove marks,
// migrate live objects and write forwarding addresses. This stage puts
// new entries in the store buffer and may cause some pages to be marked
// scan-on-scavenge.
NewSpacePageIterator it(from_bottom, from_top);
EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_);
while (it.has_next()) {
NewSpacePage* p = it.next();
for (int i = 0; i < newspace_evacuation_candidates_.length(); i++) {
NewSpacePage* p =
reinterpret_cast<NewSpacePage*>(newspace_evacuation_candidates_[i]);
bool ok = VisitLiveObjects(p, &new_space_visitor, kClearMarkbits);
USE(ok);
DCHECK(ok);
......@@ -3092,7 +3107,7 @@ void MarkCompactCollector::EvacuateNewSpace() {
heap_->IncrementYoungSurvivorsCounter(
static_cast<int>(new_space_visitor.promoted_size()) +
static_cast<int>(new_space_visitor.semispace_copied_size()));
new_space->set_age_mark(new_space->top());
return local_pretenuring_feedback;
}
......@@ -3557,11 +3572,14 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_EVACUATE);
Heap::RelocationLock relocation_lock(heap());
HashMap* local_pretenuring_feedback = nullptr;
{
GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_EVACUATE_NEW_SPACE);
EvacuationScope evacuation_scope(this);
EvacuateNewSpace();
EvacuateNewSpacePrologue();
local_pretenuring_feedback = EvacuateNewSpaceInParallel();
heap_->new_space()->set_age_mark(heap_->new_space()->top());
}
{
......@@ -3571,6 +3589,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
EvacuatePagesInParallel();
}
{
heap_->MergeAllocationSitePretenuringFeedback(*local_pretenuring_feedback);
delete local_pretenuring_feedback;
}
UpdatePointersAfterEvacuation();
{
......
......@@ -511,6 +511,8 @@ class MarkCompactCollector {
class HeapObjectVisitor;
class SweeperTask;
static const int kInitialLocalPretenuringFeedbackCapacity = 256;
explicit MarkCompactCollector(Heap* heap);
bool WillBeDeoptimized(Code* code);
......@@ -696,7 +698,10 @@ class MarkCompactCollector {
// regions to each space's free list.
void SweepSpaces();
void EvacuateNewSpace();
void EvacuateNewSpacePrologue();
// Returns local pretenuring feedback.
HashMap* EvacuateNewSpaceInParallel();
void AddEvacuationSlotsBufferSynchronized(
SlotsBuffer* evacuation_slots_buffer);
......@@ -772,6 +777,8 @@ class MarkCompactCollector {
List<Page*> evacuation_candidates_;
List<MemoryChunk*> newspace_evacuation_candidates_;
// The evacuation_slots_buffers_ are used by the compaction threads.
// When a compaction task finishes, it uses
// AddEvacuationSlotsbufferSynchronized to adds its slots buffer to the
......
......@@ -28,7 +28,8 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
return;
}
Heap::UpdateAllocationSiteFeedback(object, Heap::IGNORE_SCRATCHPAD_SLOT);
object->GetHeap()->UpdateAllocationSite(
object, object->GetHeap()->global_pretenuring_feedback_);
// AllocationMementos are unrooted and shouldn't survive a scavenge
DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
......
......@@ -3056,7 +3056,6 @@ HeapObject* LargeObjectIterator::Next() {
// -----------------------------------------------------------------------------
// LargeObjectSpace
static bool ComparePointers(void* key1, void* key2) { return key1 == key2; }
LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
......@@ -3065,7 +3064,7 @@ LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
size_(0),
page_count_(0),
objects_size_(0),
chunk_map_(ComparePointers, 1024) {}
chunk_map_(HashMap::PointersMatch, 1024) {}
LargeObjectSpace::~LargeObjectSpace() {}
......
......@@ -1773,12 +1773,12 @@ void AllocationSite::set_memento_create_count(int count) {
}
inline bool AllocationSite::IncrementMementoFoundCount() {
bool AllocationSite::IncrementMementoFoundCount(int increment) {
if (IsZombie()) return false;
int value = memento_found_count();
set_memento_found_count(value + 1);
return memento_found_count() == kPretenureMinimumCreated;
set_memento_found_count(value + increment);
return memento_found_count() >= kPretenureMinimumCreated;
}
......@@ -1832,11 +1832,12 @@ inline bool AllocationSite::DigestPretenuringFeedback(
}
if (FLAG_trace_pretenuring_statistics) {
PrintF(
"AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
static_cast<void*>(this), create_count, found_count, ratio,
PretenureDecisionName(current_decision),
PretenureDecisionName(pretenure_decision()));
PrintIsolate(GetIsolate(),
"pretenuring: AllocationSite(%p): (created, found, ratio) "
"(%d, %d, %f) %s => %s\n",
this, create_count, found_count, ratio,
PretenureDecisionName(current_decision),
PretenureDecisionName(pretenure_decision()));
}
// Clear feedback calculation fields until the next gc.
......
......@@ -8149,7 +8149,7 @@ class AllocationSite: public Struct {
// Increments the mementos found counter and returns true when the first
// memento was found for a given allocation site.
inline bool IncrementMementoFoundCount();
inline bool IncrementMementoFoundCount(int increment = 1);
inline void IncrementMementoCreateCount();
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment