Commit cf83949e authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Add a flag for measuring the impact of stack scanning in Scavenge

Change-Id: I3d5d856d86deb283173c7b6f0f302e3c4e4b67fb
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2190755
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67729}
parent 302bfa21
......@@ -909,6 +909,8 @@ DEFINE_BOOL(parallel_scavenge, true, "parallel scavenge")
DEFINE_BOOL(scavenge_task, true, "schedule scavenge tasks")
DEFINE_INT(scavenge_task_trigger, 80,
"scavenge task trigger in percent of the current heap limit")
DEFINE_BOOL(scavenge_separate_stack_scanning, true,
"use a separate phase for stack scanning in scavenge")
DEFINE_BOOL(trace_parallel_scavenge, false, "trace parallel scavenge")
DEFINE_BOOL(write_protect_code_memory, true, "write protect code memory")
#ifdef V8_CONCURRENT_MARKING
......
......@@ -4606,8 +4606,7 @@ void Heap::IterateRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) {
v->Synchronize(VisitorSynchronization::kGlobalHandles);
if (!options.contains(SkipRoot::kStack)) {
isolate_->Iterate(v);
isolate_->global_handles()->IterateStrongStackRoots(v);
IterateStackRoots(v);
v->Synchronize(VisitorSynchronization::kTop);
}
......@@ -4677,6 +4676,11 @@ void Heap::IterateBuiltins(RootVisitor* v) {
STATIC_ASSERT(Builtins::AllBuiltinsAreIsolateIndependent());
}
void Heap::IterateStackRoots(RootVisitor* v) {
isolate_->Iterate(v);
isolate_->global_handles()->IterateStrongStackRoots(v);
}
namespace {
size_t GlobalMemorySizeFromV8Size(size_t v8_size) {
const size_t kGlobalMemoryToV8Ratio = 2;
......
......@@ -938,10 +938,9 @@ class Heap {
void IterateSmiRoots(RootVisitor* v);
// Iterates over weak string tables.
void IterateWeakRoots(RootVisitor* v, base::EnumSet<SkipRoot> options);
// Iterates over weak global handles.
void IterateWeakGlobalHandles(RootVisitor* v);
// Iterates over builtins.
void IterateBuiltins(RootVisitor* v);
void IterateStackRoots(RootVisitor* v);
// ===========================================================================
// Store buffer API. =========================================================
......
......@@ -299,11 +299,13 @@ void ScavengerCollector::CollectGarbage() {
// Scavenger treats all weak roots except for global handles as strong.
// That is why we don't set skip_weak = true here and instead visit
// global handles separately.
heap_->IterateRoots(
&root_scavenge_visitor,
base::EnumSet<SkipRoot>{SkipRoot::kExternalStringTable,
SkipRoot::kGlobalHandles,
SkipRoot::kOldGeneration});
base::EnumSet<SkipRoot> options({SkipRoot::kExternalStringTable,
SkipRoot::kGlobalHandles,
SkipRoot::kOldGeneration});
if (V8_UNLIKELY(FLAG_scavenge_separate_stack_scanning)) {
options.Add(SkipRoot::kStack);
}
heap_->IterateRoots(&root_scavenge_visitor, options);
isolate_->global_handles()->IterateYoungStrongAndDependentRoots(
&root_scavenge_visitor);
}
......@@ -314,6 +316,14 @@ void ScavengerCollector::CollectGarbage() {
DCHECK(copied_list.IsEmpty());
DCHECK(promotion_list.IsEmpty());
}
if (V8_UNLIKELY(FLAG_scavenge_separate_stack_scanning)) {
IterateStackAndScavenge(&root_scavenge_visitor, scavengers,
num_scavenge_tasks, kMainThreadId);
DCHECK(copied_list.IsEmpty());
DCHECK(promotion_list.IsEmpty());
}
{
// Scavenge weak global handles.
TRACE_GC(heap_->tracer(),
......@@ -405,6 +415,39 @@ void ScavengerCollector::CollectGarbage() {
heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedYoungObjectSize());
}
void ScavengerCollector::IterateStackAndScavenge(
RootScavengeVisitor* root_scavenge_visitor, Scavenger** scavengers,
int num_scavenge_tasks, int main_thread_id) {
// Scan the stack, scavenge the newly discovered objects, and report
// the survival statistics before and afer the stack scanning.
// This code is not intended for production.
TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_STACK_ROOTS);
size_t survived_bytes_before = 0;
for (int i = 0; i < num_scavenge_tasks; i++) {
survived_bytes_before +=
scavengers[i]->bytes_copied() + scavengers[i]->bytes_promoted();
}
heap_->IterateStackRoots(root_scavenge_visitor);
scavengers[main_thread_id]->Process();
size_t survived_bytes_after = 0;
for (int i = 0; i < num_scavenge_tasks; i++) {
survived_bytes_after +=
scavengers[i]->bytes_copied() + scavengers[i]->bytes_promoted();
}
TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
"V8.GCScavengerStackScanning", "survived_bytes_before",
survived_bytes_before, "survived_bytes_after",
survived_bytes_after);
if (FLAG_trace_gc_verbose && !FLAG_trace_gc_ignore_scavenger) {
isolate_->PrintWithTimestamp(
"Scavenge stack scanning: survived_before=%4zuKB, "
"survived_after=%4zuKB delta=%.1f%%\n",
survived_bytes_before / KB, survived_bytes_after / KB,
(survived_bytes_after - survived_bytes_before) * 100.0 /
survived_bytes_after);
}
}
void ScavengerCollector::SweepArrayBufferExtensions() {
heap_->array_buffer_sweeper()->RequestSweepYoung();
}
......
......@@ -15,6 +15,8 @@ namespace v8 {
namespace internal {
class OneshotBarrier;
class RootScavengeVisitor;
class Scavenger;
enum class CopyAndForwardResult {
SUCCESS_YOUNG_GENERATION,
......@@ -53,6 +55,9 @@ class ScavengerCollector {
void SweepArrayBufferExtensions();
void IterateStackAndScavenge(RootScavengeVisitor* root_scavenge_visitor,
Scavenger** scavengers, int num_scavenge_tasks,
int main_thread_id);
Isolate* const isolate_;
Heap* const heap_;
base::Semaphore parallel_scavenge_semaphore_;
......
......@@ -495,6 +495,7 @@
F(SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS) \
F(SCAVENGER_SCAVENGE_PARALLEL) \
F(SCAVENGER_SCAVENGE_ROOTS) \
F(SCAVENGER_SCAVENGE_STACK_ROOTS) \
F(SCAVENGER_SCAVENGE_UPDATE_REFS) \
F(SCAVENGER_SCAVENGE_WEAK) \
F(SCAVENGER_SCAVENGE_FINALIZE) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment