Commit a8e3925e authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] GC-based fast promotion mode

A fast promotion mode that works solely on GC heuristics without
requiring compiler or other profiler support by implementing zero-copy 
evacuation for new space.

- Once a threshold of survived bytes in the Scavenger is reached the
  mode is activated.
- In fast promotion mode all pages are moved to old space instead of
  performing a Scavenge.
- The inevitable upcoming full MC determines whether the decision
  whether the mode should stay on or be turned off based on the young
  generation survival rate.

BUG=chromium:693413

Change-Id: Ifdf296092a9bac609f9dcdfb47a24046f3093745
Reviewed-on: https://chromium-review.googlesource.com/442560
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#43276}
parent 454816f0
......@@ -829,6 +829,8 @@ DEFINE_BOOL(move_object_start, true, "enable moving of object starts")
DEFINE_BOOL(memory_reducer, true, "use memory reducer")
DEFINE_INT(heap_growing_percent, 0,
"specifies heap growing factor as (1 + heap_growing_percent/100)")
DEFINE_BOOL(fast_promotion_new_space, false,
"fast promote new space on high survival rates")
// spaces.cc
DEFINE_INT(v8_os_page_size, 0, "override OS page size (in KBytes)")
......
......@@ -445,6 +445,7 @@ void GCTracer::PrintNVP() const {
"gc=%s "
"reduce_memory=%d "
"scavenge=%.2f "
"evacuate=%.2f "
"old_new=%.2f "
"weak=%.2f "
"roots=%.2f "
......@@ -481,6 +482,7 @@ void GCTracer::PrintNVP() const {
"context_disposal_rate=%.1f\n",
duration, spent_in_mutator, current_.TypeName(true),
current_.reduce_memory, current_.scopes[Scope::SCAVENGER_SCAVENGE],
current_.scopes[Scope::SCAVENGER_EVACUATE],
current_.scopes[Scope::SCAVENGER_OLD_TO_NEW_POINTERS],
current_.scopes[Scope::SCAVENGER_WEAK],
current_.scopes[Scope::SCAVENGER_ROOTS],
......
......@@ -87,6 +87,7 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
F(MINOR_MC_MARK_ROOTS) \
F(MINOR_MC_MARK_WEAK) \
F(SCAVENGER_CODE_FLUSH_CANDIDATES) \
F(SCAVENGER_EVACUATE) \
F(SCAVENGER_OLD_TO_NEW_POINTERS) \
F(SCAVENGER_ROOTS) \
F(SCAVENGER_SCAVENGE) \
......
......@@ -692,6 +692,10 @@ Isolate* Heap::isolate() {
reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
}
void Heap::ExternalStringTable::PromoteAllNewSpaceStrings() {
old_space_strings_.AddAll(new_space_strings_);
new_space_strings_.Clear();
}
void Heap::ExternalStringTable::AddString(String* string) {
DCHECK(string->IsExternalString());
......
......@@ -157,6 +157,7 @@ Heap::Heap()
strong_roots_list_(NULL),
heap_iterator_depth_(0),
local_embedder_heap_tracer_(nullptr),
fast_promotion_mode_(false),
force_oom_(false),
delay_sweeper_tasks_for_testing_(false) {
// Allow build-time customization of the max semispace size. Building
......@@ -1357,7 +1358,10 @@ bool Heap::PerformGarbageCollection(
MinorMarkCompact();
break;
case SCAVENGER:
Scavenge();
if (fast_promotion_mode_ && CanExpandOldGeneration(new_space()->Size()))
EvacuateYoungGeneration();
else
Scavenge();
break;
}
......@@ -1367,6 +1371,10 @@ bool Heap::PerformGarbageCollection(
UpdateSurvivalStatistics(start_new_space_size);
ConfigureInitialOldGenerationSize();
if (!fast_promotion_mode_ || collector == MARK_COMPACTOR) {
ComputeFastPromotionMode(promotion_ratio_ + semi_space_copied_rate_);
}
isolate_->counters()->objs_since_last_young()->Set(0);
gc_post_processing_depth_++;
......@@ -1604,6 +1612,44 @@ class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
Heap* heap_;
};
void Heap::EvacuateYoungGeneration() {
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_EVACUATE);
DCHECK(fast_promotion_mode_);
DCHECK(CanExpandOldGeneration(new_space()->Size()));
mark_compact_collector()->sweeper().EnsureNewSpaceCompleted();
SetGCState(SCAVENGE);
LOG(isolate_, ResourceEvent("scavenge", "begin"));
// Move pages from new->old generation.
PageRange range(new_space()->bottom(), new_space()->top());
for (auto it = range.begin(); it != range.end();) {
Page* p = (*++it)->prev_page();
p->Unlink();
Page::ConvertNewToOld(p);
if (incremental_marking()->IsMarking())
mark_compact_collector()->RecordLiveSlotsOnPage(p);
}
// Reset new space.
if (!new_space()->Rebalance()) {
FatalProcessOutOfMemory("NewSpace::Rebalance");
}
new_space()->ResetAllocationInfo();
new_space()->set_age_mark(new_space()->top());
// Fix up special trackers.
external_string_table_.PromoteAllNewSpaceStrings();
// GlobalHandles are updated in PostGarbageCollectonProcessing
IncrementYoungSurvivorsCounter(new_space()->Size());
IncrementPromotedObjectsSize(new_space()->Size());
IncrementSemiSpaceCopiedObjectSize(0);
LOG(isolate_, ResourceEvent("scavenge", "end"));
SetGCState(NOT_IN_GC);
}
void Heap::Scavenge() {
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE);
......@@ -1745,6 +1791,17 @@ void Heap::Scavenge() {
SetGCState(NOT_IN_GC);
}
void Heap::ComputeFastPromotionMode(double survival_rate) {
if (new_space_->IsAtMaximumCapacity() && !FLAG_optimize_for_size) {
fast_promotion_mode_ =
FLAG_fast_promotion_new_space &&
survival_rate >= kMinPromotedPercentForFastPromotionMode;
if (FLAG_trace_gc_verbose) {
PrintIsolate(isolate(), "Fast promotion mode: %s survival rate: %f%%\n",
fast_promotion_mode_ ? "true" : "false", survival_rate);
}
}
}
String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
Object** p) {
......@@ -5170,7 +5227,7 @@ const double Heap::kMaxHeapGrowingFactorMemoryConstrained = 2.0;
const double Heap::kMaxHeapGrowingFactorIdle = 1.5;
const double Heap::kConservativeHeapGrowingFactor = 1.3;
const double Heap::kTargetMutatorUtilization = 0.97;
const double Heap::kMinPromotedPercentForFastPromotionMode = 90;
// Given GC speed in bytes per ms, the allocation throughput in bytes per ms
// (mutator speed), this function returns the heap growing factor that will
......
......@@ -637,6 +637,8 @@ class Heap {
// The minimum size of a HeapObject on the heap.
static const int kMinObjectSizeInWords = 2;
static const double kMinPromotedPercentForFastPromotionMode;
STATIC_ASSERT(kUndefinedValueRootIndex ==
Internals::kUndefinedValueRootIndex);
STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex);
......@@ -1516,6 +1518,7 @@ class Heap {
inline void IterateAll(ObjectVisitor* v);
inline void IterateNewSpaceStrings(ObjectVisitor* v);
inline void PromoteAllNewSpaceStrings();
// Restores internal invariant and gets rid of collected strings. Must be
// called after each Iterate*() that modified the strings.
......@@ -1750,6 +1753,8 @@ class Heap {
void InvokeOutOfMemoryCallback();
void ComputeFastPromotionMode(double survival_rate);
// Attempt to over-approximate the weak closure by marking object groups and
// implicit references from global handles, but don't atomically complete
// marking. If we continue to mark incrementally, we might have marked
......@@ -1793,6 +1798,7 @@ class Heap {
// Performs a minor collection in new generation.
void Scavenge();
void EvacuateYoungGeneration();
Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
......@@ -1873,7 +1879,7 @@ class Heap {
bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
bool CanExpandOldGeneration(int size) {
bool CanExpandOldGeneration(size_t size) {
if (force_oom_) return false;
return (OldGenerationCapacity() + size) < MaxOldGenerationSize();
}
......@@ -2336,6 +2342,8 @@ class Heap {
LocalEmbedderHeapTracer* local_embedder_heap_tracer_;
bool fast_promotion_mode_;
// Used for testing purposes.
bool force_oom_;
bool delay_sweeper_tasks_for_testing_;
......
......@@ -3494,6 +3494,11 @@ static void VerifyAllBlackObjects(MemoryChunk* page) {
}
#endif // VERIFY_HEAP
void MarkCompactCollector::RecordLiveSlotsOnPage(Page* page) {
EvacuateRecordOnlyVisitor visitor(heap());
VisitLiveObjects(page, &visitor, kKeepMarking);
}
template <class Visitor>
bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor,
IterationMode mode) {
......
......@@ -540,6 +540,7 @@ class MarkCompactCollector {
INLINE(void RecordSlot(HeapObject* object, Object** slot, Object* target));
INLINE(void ForceRecordSlot(HeapObject* object, Object** slot,
Object* target));
void RecordLiveSlotsOnPage(Page* page);
void UpdateSlots(SlotsBuffer* buffer);
void UpdateSlotsRecordedIn(SlotsBuffer* buffer);
......
......@@ -208,8 +208,9 @@ Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
}
Page* Page::ConvertNewToOld(Page* old_page) {
OldSpace* old_space = old_page->heap()->old_space();
DCHECK(!old_page->is_anchor());
DCHECK(old_page->InNewSpace());
OldSpace* old_space = old_page->heap()->old_space();
old_page->set_owner(old_space);
old_page->SetFlags(0, ~0);
old_space->AccountCommitted(old_page->size());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment