Commit 062fc9ab authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Slightly optimize Heap::UpdateAllocationSite()

This function is called for every live object in new space.  We statically know
which version to call, so let's use templates here and eliminite a branch at
runtime.

BUG=chromium:524425
LOG=N

Review URL: https://codereview.chromium.org/1659823002

Cr-Commit-Position: refs/heads/master@{#33662}
parent 4da26845
......@@ -515,7 +515,7 @@ AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
return nullptr;
}
template <Heap::UpdateAllocationSiteMode mode>
void Heap::UpdateAllocationSite(HeapObject* object,
HashMap* pretenuring_feedback) {
DCHECK(InFromSpace(object));
......@@ -525,7 +525,8 @@ void Heap::UpdateAllocationSite(HeapObject* object,
AllocationMemento* memento_candidate = FindAllocationMemento<kForGC>(object);
if (memento_candidate == nullptr) return;
if (pretenuring_feedback == global_pretenuring_feedback_) {
if (mode == kGlobal) {
DCHECK_EQ(pretenuring_feedback, global_pretenuring_feedback_);
// Entering global pretenuring feedback is only used in the scavenger, where
// we are allowed to actually touch the allocation site.
if (!memento_candidate->IsValid()) return;
......@@ -538,6 +539,8 @@ void Heap::UpdateAllocationSite(HeapObject* object,
ObjectHash(site->address()));
}
} else {
DCHECK_EQ(mode, kCached);
DCHECK_NE(pretenuring_feedback, global_pretenuring_feedback_);
// Entering cached feedback is used in the parallel case. We are not allowed
// to dereference the allocation site and rather have to postpone all checks
// till actually merging the data.
......
......@@ -442,17 +442,17 @@ class Heap {
kSmiRootsStart = kStringTableRootIndex + 1
};
enum FindMementoMode { kForRuntime, kForGC };
enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
// Indicates whether live bytes adjustment is triggered
// - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
// - or from within GC (CONCURRENT_TO_SWEEPER),
// - or mutator code (CONCURRENT_TO_SWEEPER).
enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
enum PretenuringFeedbackInsertionMode { kCached, kGlobal };
enum FindMementoMode { kForRuntime, kForGC };
enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
enum UpdateAllocationSiteMode { kGlobal, kCached };
// Taking this lock prevents the GC from entering a phase that relocates
// object references.
......@@ -1361,6 +1361,7 @@ class Heap {
// the corresponding allocation site is immediately updated and an entry
// in the hash map is created. Otherwise the entry (including a the count
// value) is cached on the local pretenuring feedback.
template <UpdateAllocationSiteMode mode>
inline void UpdateAllocationSite(HeapObject* object,
HashMap* pretenuring_feedback);
......
......@@ -1571,7 +1571,8 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
local_pretenuring_feedback_(local_pretenuring_feedback) {}
bool Visit(HeapObject* object) override {
heap_->UpdateAllocationSite(object, local_pretenuring_feedback_);
heap_->UpdateAllocationSite<Heap::kCached>(object,
local_pretenuring_feedback_);
int size = object->Size();
HeapObject* target_object = nullptr;
if (heap_->ShouldBePromoted(object->address(), size) &&
......
......@@ -28,7 +28,7 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
return;
}
object->GetHeap()->UpdateAllocationSite(
object->GetHeap()->UpdateAllocationSite<Heap::kGlobal>(
object, object->GetHeap()->global_pretenuring_feedback_);
// AllocationMementos are unrooted and shouldn't survive a scavenge
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment