Commit 01aed57e authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Add IncrementalMarking::AdvanceOnTask as new bottleneck

Introduce common bottleneck for all incremental marking step
invocations from a task context. This will later be used to move
code out of IncrementalMarking::Step.

Bug: v8:11708
Change-Id: Iba2dc2402083f8b4152ded56eaf0e13d473442a8
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3822682Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82343}
parent 65d43890
......@@ -4024,10 +4024,7 @@ bool Heap::PerformIdleTimeAction(GCIdleTimeAction action,
result = true;
break;
case GCIdleTimeAction::kIncrementalStep: {
incremental_marking()->AdvanceWithDeadline(deadline_in_ms,
StepOrigin::kTask);
FinalizeIncrementalMarkingIfComplete(
GarbageCollectionReason::kFinalizeMarkingViaTask);
incremental_marking()->AdvanceFromTask();
result = incremental_marking()->IsStopped();
break;
}
......
......@@ -20,8 +20,6 @@ namespace internal {
class IncrementalMarkingJob::Task : public CancelableTask {
public:
static StepResult Step(Heap* heap);
Task(Isolate* isolate, IncrementalMarkingJob* job,
EmbedderHeapTracer::EmbedderStackState stack_state, TaskType task_type)
: CancelableTask(isolate),
......@@ -50,48 +48,40 @@ void IncrementalMarkingJob::Start(Heap* heap) {
void IncrementalMarkingJob::ScheduleTask(Heap* heap, TaskType task_type) {
base::MutexGuard guard(&mutex_);
if (!IsTaskPending(task_type) && !heap->IsTearingDown() &&
FLAG_incremental_marking_task) {
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate());
SetTaskPending(task_type, true);
auto taskrunner =
V8::GetCurrentPlatform()->GetForegroundTaskRunner(isolate);
const EmbedderHeapTracer::EmbedderStackState stack_state =
taskrunner->NonNestableTasksEnabled()
? EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers
: EmbedderHeapTracer::EmbedderStackState::kMayContainHeapPointers;
auto task =
std::make_unique<Task>(heap->isolate(), this, stack_state, task_type);
if (task_type == TaskType::kNormal) {
scheduled_time_ = heap->MonotonicallyIncreasingTimeInMs();
if (taskrunner->NonNestableTasksEnabled()) {
taskrunner->PostNonNestableTask(std::move(task));
} else {
taskrunner->PostTask(std::move(task));
}
if (IsTaskPending(task_type) || heap->IsTearingDown() ||
!FLAG_incremental_marking_task) {
return;
}
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate());
SetTaskPending(task_type, true);
auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner(isolate);
const EmbedderHeapTracer::EmbedderStackState stack_state =
taskrunner->NonNestableTasksEnabled()
? EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers
: EmbedderHeapTracer::EmbedderStackState::kMayContainHeapPointers;
auto task =
std::make_unique<Task>(heap->isolate(), this, stack_state, task_type);
if (task_type == TaskType::kNormal) {
scheduled_time_ = heap->MonotonicallyIncreasingTimeInMs();
if (taskrunner->NonNestableTasksEnabled()) {
taskrunner->PostNonNestableTask(std::move(task));
} else {
if (taskrunner->NonNestableDelayedTasksEnabled()) {
taskrunner->PostNonNestableDelayedTask(std::move(task),
kDelayInSeconds);
} else {
taskrunner->PostDelayedTask(std::move(task), kDelayInSeconds);
}
taskrunner->PostTask(std::move(task));
}
} else {
if (taskrunner->NonNestableDelayedTasksEnabled()) {
taskrunner->PostNonNestableDelayedTask(std::move(task), kDelayInSeconds);
} else {
taskrunner->PostDelayedTask(std::move(task), kDelayInSeconds);
}
}
}
StepResult IncrementalMarkingJob::Task::Step(Heap* heap) {
const int kIncrementalMarkingDelayMs = 1;
double deadline =
heap->MonotonicallyIncreasingTimeInMs() + kIncrementalMarkingDelayMs;
StepResult result = heap->incremental_marking()->AdvanceWithDeadline(
deadline, i::StepOrigin::kTask);
heap->FinalizeIncrementalMarkingIfComplete(
GarbageCollectionReason::kFinalizeMarkingViaTask);
return result;
}
void IncrementalMarkingJob::Task::RunInternal() {
VMState<GC> state(isolate());
TRACE_EVENT_CALL_STATS_SCOPED(isolate(), "v8", "V8.Task");
......@@ -104,6 +94,7 @@ void IncrementalMarkingJob::Task::RunInternal() {
heap->MonotonicallyIncreasingTimeInMs() - job_->scheduled_time_);
job_->scheduled_time_ = 0.0;
}
IncrementalMarking* incremental_marking = heap->incremental_marking();
if (incremental_marking->IsStopped()) {
if (heap->IncrementalMarkingLimitReached() !=
......@@ -121,18 +112,15 @@ void IncrementalMarkingJob::Task::RunInternal() {
job_->SetTaskPending(task_type_, false);
}
if (!incremental_marking->IsStopped()) {
if (incremental_marking->IsRunning()) {
// All objects are initialized at that point.
heap->new_space()->MarkLabStartInitialized();
heap->new_lo_space()->ResetPendingObject();
StepResult step_result = Step(heap);
if (!incremental_marking->IsStopped()) {
const TaskType task_type =
incremental_marking->IsComplete() ||
step_result != StepResult::kNoImmediateWork
? TaskType::kNormal
: TaskType::kDelayed;
job_->ScheduleTask(heap, task_type);
heap->incremental_marking()->AdvanceFromTask();
if (incremental_marking->IsRunning()) {
job_->ScheduleTask(heap, TaskType::kDelayed);
}
}
}
......
......@@ -708,6 +708,12 @@ StepResult IncrementalMarking::AdvanceWithDeadline(double deadline_in_ms,
return Step(kStepSizeInMs, step_origin);
}
void IncrementalMarking::AdvanceFromTask() {
AdvanceWithDeadline(0, StepOrigin::kTask);
heap()->FinalizeIncrementalMarkingIfComplete(
GarbageCollectionReason::kFinalizeMarkingViaTask);
}
size_t IncrementalMarking::StepSizeToKeepUpWithAllocations() {
// Update bytes_allocated_ based on the allocation counter.
size_t current_counter = heap_->OldGenerationAllocationCounter();
......
......@@ -123,6 +123,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
// marking schedule, which is indicated with StepResult::kDone.
StepResult AdvanceWithDeadline(double deadline_in_ms, StepOrigin step_origin);
// Performs incremental marking step and finalizes marking if complete.
void AdvanceFromTask();
StepResult Step(double max_step_size_in_ms, StepOrigin step_origin);
// This function is used to color the object black before it undergoes an
......
......@@ -84,13 +84,7 @@ void MemoryReducer::NotifyTimer(const Event& event) {
// Make progress with pending incremental marking if memory usage has
// higher priority than latency. This is important for background tabs
// that do not send idle notifications.
const int kIncrementalMarkingDelayMs = 500;
double deadline = heap()->MonotonicallyIncreasingTimeInMs() +
kIncrementalMarkingDelayMs;
heap()->incremental_marking()->AdvanceWithDeadline(deadline,
StepOrigin::kTask);
heap()->FinalizeIncrementalMarkingIfComplete(
GarbageCollectionReason::kFinalizeMarkingViaTask);
heap()->incremental_marking()->AdvanceFromTask();
}
// Re-schedule the timer.
ScheduleTimer(state_.next_gc_start_ms - event.time_ms);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment