Commit caed2cc0 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Improve embedder tracing during incremental marking

Add a path into embedder tracing on allocation. This is safe as as Blink
is not allowed to call into V8 during object construction.

Bug: chromium:843903
Change-Id: I5af053c3169f5a33778ebce5d7c5c43e4efb1aa4
Reviewed-on: https://chromium-review.googlesource.com/c/1348749
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#57757}
parent 78ca705f
...@@ -14,6 +14,7 @@ void LocalEmbedderHeapTracer::TracePrologue() { ...@@ -14,6 +14,7 @@ void LocalEmbedderHeapTracer::TracePrologue() {
CHECK(cached_wrappers_to_trace_.empty()); CHECK(cached_wrappers_to_trace_.empty());
num_v8_marking_worklist_was_empty_ = 0; num_v8_marking_worklist_was_empty_ = 0;
embedder_worklist_empty_ = false;
remote_tracer_->TracePrologue(); remote_tracer_->TracePrologue();
} }
......
...@@ -58,13 +58,18 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final { ...@@ -58,13 +58,18 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
void NotifyV8MarkingWorklistWasEmpty() { void NotifyV8MarkingWorklistWasEmpty() {
num_v8_marking_worklist_was_empty_++; num_v8_marking_worklist_was_empty_++;
} }
bool ShouldFinalizeIncrementalMarking() { bool ShouldFinalizeIncrementalMarking() {
static const size_t kMaxIncrementalFixpointRounds = 3; static const size_t kMaxIncrementalFixpointRounds = 3;
return !FLAG_incremental_marking_wrappers || !InUse() || return !FLAG_incremental_marking_wrappers || !InUse() ||
IsRemoteTracingDone() || (IsRemoteTracingDone() && embedder_worklist_empty_) ||
num_v8_marking_worklist_was_empty_ > kMaxIncrementalFixpointRounds; num_v8_marking_worklist_was_empty_ > kMaxIncrementalFixpointRounds;
} }
void SetEmbedderWorklistEmpty(bool empty) {
embedder_worklist_empty_ = empty;
}
void SetEmbedderStackStateForNextFinalization( void SetEmbedderStackStateForNextFinalization(
EmbedderHeapTracer::EmbedderStackState stack_state); EmbedderHeapTracer::EmbedderStackState stack_state);
...@@ -78,6 +83,11 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final { ...@@ -78,6 +83,11 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
EmbedderHeapTracer::EmbedderStackState embedder_stack_state_ = EmbedderHeapTracer::EmbedderStackState embedder_stack_state_ =
EmbedderHeapTracer::kUnknown; EmbedderHeapTracer::kUnknown;
// Indicates whether the embedder worklist was observed empty on the main
// thread. This is opportunistic as concurrent marking tasks may hold local
// segments of potential embedder fields to move to the main thread.
bool embedder_worklist_empty_ = false;
friend class EmbedderStackStateScope; friend class EmbedderStackStateScope;
}; };
......
...@@ -808,30 +808,27 @@ intptr_t IncrementalMarking::ProcessMarkingWorklist( ...@@ -808,30 +808,27 @@ intptr_t IncrementalMarking::ProcessMarkingWorklist(
} }
void IncrementalMarking::EmbedderStep(double duration_ms) { void IncrementalMarking::EmbedderStep(double duration_ms) {
constexpr int kObjectsToProcessBeforeInterrupt = 100; constexpr size_t kObjectsToProcessBeforeInterrupt = 500;
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_TRACING); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_TRACING);
double deadline = heap_->MonotonicallyIncreasingTimeInMs() + duration_ms;
const double deadline = do {
heap_->MonotonicallyIncreasingTimeInMs() + duration_ms; HeapObject* object;
size_t cnt = 0;
HeapObject* object; bool embedder_fields_empty = true;
int cnt = 0; while (marking_worklist()->embedder()->Pop(0, &object)) {
while (marking_worklist()->embedder()->Pop(0, &object)) { heap_->TracePossibleWrapper(JSObject::cast(object));
heap_->TracePossibleWrapper(JSObject::cast(object)); if (++cnt == kObjectsToProcessBeforeInterrupt) {
if (++cnt == kObjectsToProcessBeforeInterrupt) { cnt = 0;
cnt = 0; embedder_fields_empty = false;
if (heap_->MonotonicallyIncreasingTimeInMs() > deadline) {
break; break;
} }
} }
} heap_->local_embedder_heap_tracer()->SetEmbedderWorklistEmpty(
embedder_fields_empty);
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer(); heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
if (!heap_->local_embedder_heap_tracer()
->ShouldFinalizeIncrementalMarking()) {
heap_->local_embedder_heap_tracer()->Trace(deadline); heap_->local_embedder_heap_tracer()->Trace(deadline);
} } while (heap_->MonotonicallyIncreasingTimeInMs() < deadline);
} }
void IncrementalMarking::Hurry() { void IncrementalMarking::Hurry() {
...@@ -941,6 +938,11 @@ void IncrementalMarking::Epilogue() { ...@@ -941,6 +938,11 @@ void IncrementalMarking::Epilogue() {
finalize_marking_completed_ = false; finalize_marking_completed_ = false;
} }
bool IncrementalMarking::ShouldDoEmbedderStep() {
return state_ == MARKING && FLAG_incremental_marking_wrappers &&
heap_->local_embedder_heap_tracer()->InUse();
}
double IncrementalMarking::AdvanceIncrementalMarking( double IncrementalMarking::AdvanceIncrementalMarking(
double deadline_in_ms, CompletionAction completion_action, double deadline_in_ms, CompletionAction completion_action,
StepOrigin step_origin) { StepOrigin step_origin) {
...@@ -953,23 +955,20 @@ double IncrementalMarking::AdvanceIncrementalMarking( ...@@ -953,23 +955,20 @@ double IncrementalMarking::AdvanceIncrementalMarking(
0, heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace()); 0, heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
double remaining_time_in_ms = 0.0; double remaining_time_in_ms = 0.0;
intptr_t step_size_in_bytes = GCIdleTimeHandler::EstimateMarkingStepSize(
kStepSizeInMs,
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
const bool incremental_wrapper_tracing =
state_ == MARKING && FLAG_incremental_marking_wrappers &&
heap_->local_embedder_heap_tracer()->InUse();
do { do {
if (incremental_wrapper_tracing && trace_wrappers_toggle_) { if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
EmbedderStep(kStepSizeInMs); EmbedderStep(kStepSizeInMs);
} else { } else {
const intptr_t step_size_in_bytes =
GCIdleTimeHandler::EstimateMarkingStepSize(
kStepSizeInMs,
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
Step(step_size_in_bytes, completion_action, step_origin); Step(step_size_in_bytes, completion_action, step_origin);
} }
trace_wrappers_toggle_ = !trace_wrappers_toggle_; trace_wrappers_toggle_ = !trace_wrappers_toggle_;
remaining_time_in_ms = remaining_time_in_ms =
deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs(); deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
} while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() && } while (remaining_time_in_ms > kStepSizeInMs && !IsComplete() &&
!marking_worklist()->IsEmpty()); !marking_worklist()->IsEmpty());
return remaining_time_in_ms; return remaining_time_in_ms;
} }
...@@ -1021,49 +1020,58 @@ void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() { ...@@ -1021,49 +1020,58 @@ void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
return; return;
} }
size_t bytes_to_process = HistogramTimerScope incremental_marking_scope(
StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress(); heap_->isolate()->counters()->gc_incremental_marking());
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) { TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
HistogramTimerScope incremental_marking_scope(
heap_->isolate()->counters()->gc_incremental_marking()); if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
TRACE_EVENT0("v8", "V8.GCIncrementalMarking"); EmbedderStep(kMaxStepSizeInMs);
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL); } else {
// The first step after Scavenge will see many allocated bytes. size_t bytes_to_process =
// Cap the step size to distribute the marking work more uniformly. StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize( if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) {
kMaxStepSizeInMs, HistogramTimerScope incremental_marking_scope(
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond()); heap_->isolate()->counters()->gc_incremental_marking());
bytes_to_process = Min(bytes_to_process, max_step_size); TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
size_t bytes_processed = 0; TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
if (FLAG_concurrent_marking) { // The first step after Scavenge will see many allocated bytes.
bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD, // Cap the step size to distribute the marking work more uniformly.
StepOrigin::kV8, WorklistToProcess::kBailout); size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
bytes_to_process = (bytes_processed >= bytes_to_process) kMaxStepSizeInMs,
? 0 heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
: bytes_to_process - bytes_processed; bytes_to_process = Min(bytes_to_process, max_step_size);
size_t current_bytes_marked_concurrently = size_t bytes_processed = 0;
heap()->concurrent_marking()->TotalMarkedBytes(); if (FLAG_concurrent_marking) {
// The concurrent_marking()->TotalMarkedBytes() is not monothonic for a bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD,
// short period of time when a concurrent marking task is finishing. StepOrigin::kV8, WorklistToProcess::kBailout);
if (current_bytes_marked_concurrently > bytes_marked_concurrently_) { bytes_to_process = (bytes_processed >= bytes_to_process)
bytes_marked_ahead_of_schedule_ += ? 0
current_bytes_marked_concurrently - bytes_marked_concurrently_; : bytes_to_process - bytes_processed;
bytes_marked_concurrently_ = current_bytes_marked_concurrently; size_t current_bytes_marked_concurrently =
heap()->concurrent_marking()->TotalMarkedBytes();
// The concurrent_marking()->TotalMarkedBytes() is not monothonic for a
// short period of time when a concurrent marking task is finishing.
if (current_bytes_marked_concurrently > bytes_marked_concurrently_) {
bytes_marked_ahead_of_schedule_ +=
current_bytes_marked_concurrently - bytes_marked_concurrently_;
bytes_marked_concurrently_ = current_bytes_marked_concurrently;
}
} }
if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
// Steps performed in tasks and concurrently have put us ahead of
// schedule. We skip processing of marking dequeue here and thus shift
// marking time from inside V8 to standalone tasks.
bytes_marked_ahead_of_schedule_ -= bytes_to_process;
bytes_processed += bytes_to_process;
bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
}
bytes_processed += Step(bytes_to_process, GC_VIA_STACK_GUARD,
StepOrigin::kV8, WorklistToProcess::kAll);
bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
} }
if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
// Steps performed in tasks and concurrently have put us ahead of
// schedule. We skip processing of marking dequeue here and thus shift
// marking time from inside V8 to standalone tasks.
bytes_marked_ahead_of_schedule_ -= bytes_to_process;
bytes_processed += bytes_to_process;
bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
}
bytes_processed += Step(bytes_to_process, GC_VIA_STACK_GUARD,
StepOrigin::kV8, WorklistToProcess::kAll);
bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
} }
trace_wrappers_toggle_ = !trace_wrappers_toggle_;
} }
size_t IncrementalMarking::Step(size_t bytes_to_process, size_t IncrementalMarking::Step(size_t bytes_to_process,
......
...@@ -177,6 +177,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -177,6 +177,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
size_t Step(size_t bytes_to_process, CompletionAction action, size_t Step(size_t bytes_to_process, CompletionAction action,
StepOrigin step_origin, StepOrigin step_origin,
WorklistToProcess worklist_to_process = WorklistToProcess::kAll); WorklistToProcess worklist_to_process = WorklistToProcess::kAll);
bool ShouldDoEmbedderStep();
void EmbedderStep(double duration); void EmbedderStep(double duration);
inline void RestartIfNotMarking(); inline void RestartIfNotMarking();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment