Commit fffa8873 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

Revert "[heap] Add IncrementalMarking::AdvanceOnTask as new bottleneck"

This reverts commit 01aed57e.

Reason for revert: Might have caused some regressions, see https://crbug.com/1351991.

Original change's description:
> [heap] Add IncrementalMarking::AdvanceOnTask as new bottleneck
>
> Introduce common bottleneck for all incremental marking step
> invocations from a task context. This will later be used to move
> code out of IncrementalMarking::Step.
>
> Bug: v8:11708
> Change-Id: Iba2dc2402083f8b4152ded56eaf0e13d473442a8
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3822682
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#82343}

Bug: v8:11708
Change-Id: I1ec74974d90b865baf223f9820f5bf346f113d86
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3827865
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82422}
parent e6804d01
......@@ -4017,7 +4017,10 @@ bool Heap::PerformIdleTimeAction(GCIdleTimeAction action,
result = true;
break;
case GCIdleTimeAction::kIncrementalStep: {
incremental_marking()->AdvanceFromTask();
incremental_marking()->AdvanceWithDeadline(deadline_in_ms,
StepOrigin::kTask);
FinalizeIncrementalMarkingIfComplete(
GarbageCollectionReason::kFinalizeMarkingViaTask);
result = incremental_marking()->IsStopped();
break;
}
......
......@@ -20,6 +20,8 @@ namespace internal {
class IncrementalMarkingJob::Task : public CancelableTask {
public:
static StepResult Step(Heap* heap);
Task(Isolate* isolate, IncrementalMarkingJob* job,
EmbedderHeapTracer::EmbedderStackState stack_state, TaskType task_type)
: CancelableTask(isolate),
......@@ -48,40 +50,48 @@ void IncrementalMarkingJob::Start(Heap* heap) {
void IncrementalMarkingJob::ScheduleTask(Heap* heap, TaskType task_type) {
base::MutexGuard guard(&mutex_);
if (IsTaskPending(task_type) || heap->IsTearingDown() ||
!FLAG_incremental_marking_task) {
return;
}
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate());
SetTaskPending(task_type, true);
auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner(isolate);
const EmbedderHeapTracer::EmbedderStackState stack_state =
taskrunner->NonNestableTasksEnabled()
? EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers
: EmbedderHeapTracer::EmbedderStackState::kMayContainHeapPointers;
auto task =
std::make_unique<Task>(heap->isolate(), this, stack_state, task_type);
if (task_type == TaskType::kNormal) {
scheduled_time_ = heap->MonotonicallyIncreasingTimeInMs();
if (taskrunner->NonNestableTasksEnabled()) {
taskrunner->PostNonNestableTask(std::move(task));
if (!IsTaskPending(task_type) && !heap->IsTearingDown() &&
FLAG_incremental_marking_task) {
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate());
SetTaskPending(task_type, true);
auto taskrunner =
V8::GetCurrentPlatform()->GetForegroundTaskRunner(isolate);
const EmbedderHeapTracer::EmbedderStackState stack_state =
taskrunner->NonNestableTasksEnabled()
? EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers
: EmbedderHeapTracer::EmbedderStackState::kMayContainHeapPointers;
auto task =
std::make_unique<Task>(heap->isolate(), this, stack_state, task_type);
if (task_type == TaskType::kNormal) {
scheduled_time_ = heap->MonotonicallyIncreasingTimeInMs();
if (taskrunner->NonNestableTasksEnabled()) {
taskrunner->PostNonNestableTask(std::move(task));
} else {
taskrunner->PostTask(std::move(task));
}
} else {
taskrunner->PostTask(std::move(task));
}
} else {
if (taskrunner->NonNestableDelayedTasksEnabled()) {
taskrunner->PostNonNestableDelayedTask(std::move(task), kDelayInSeconds);
} else {
taskrunner->PostDelayedTask(std::move(task), kDelayInSeconds);
if (taskrunner->NonNestableDelayedTasksEnabled()) {
taskrunner->PostNonNestableDelayedTask(std::move(task),
kDelayInSeconds);
} else {
taskrunner->PostDelayedTask(std::move(task), kDelayInSeconds);
}
}
}
}
StepResult IncrementalMarkingJob::Task::Step(Heap* heap) {
const int kIncrementalMarkingDelayMs = 1;
double deadline =
heap->MonotonicallyIncreasingTimeInMs() + kIncrementalMarkingDelayMs;
StepResult result = heap->incremental_marking()->AdvanceWithDeadline(
deadline, i::StepOrigin::kTask);
heap->FinalizeIncrementalMarkingIfComplete(
GarbageCollectionReason::kFinalizeMarkingViaTask);
return result;
}
void IncrementalMarkingJob::Task::RunInternal() {
VMState<GC> state(isolate());
TRACE_EVENT_CALL_STATS_SCOPED(isolate(), "v8", "V8.Task");
......@@ -94,7 +104,6 @@ void IncrementalMarkingJob::Task::RunInternal() {
heap->MonotonicallyIncreasingTimeInMs() - job_->scheduled_time_);
job_->scheduled_time_ = 0.0;
}
IncrementalMarking* incremental_marking = heap->incremental_marking();
if (incremental_marking->IsStopped()) {
if (heap->IncrementalMarkingLimitReached() !=
......@@ -112,15 +121,18 @@ void IncrementalMarkingJob::Task::RunInternal() {
job_->SetTaskPending(task_type_, false);
}
if (incremental_marking->IsRunning()) {
if (!incremental_marking->IsStopped()) {
// All objects are initialized at that point.
heap->new_space()->MarkLabStartInitialized();
heap->new_lo_space()->ResetPendingObject();
heap->incremental_marking()->AdvanceFromTask();
if (incremental_marking->IsRunning()) {
job_->ScheduleTask(heap, TaskType::kDelayed);
StepResult step_result = Step(heap);
if (!incremental_marking->IsStopped()) {
const TaskType task_type =
incremental_marking->IsComplete() ||
step_result != StepResult::kNoImmediateWork
? TaskType::kNormal
: TaskType::kDelayed;
job_->ScheduleTask(heap, task_type);
}
}
}
......
......@@ -708,12 +708,6 @@ StepResult IncrementalMarking::AdvanceWithDeadline(double deadline_in_ms,
return Step(kStepSizeInMs, step_origin);
}
void IncrementalMarking::AdvanceFromTask() {
AdvanceWithDeadline(0, StepOrigin::kTask);
heap()->FinalizeIncrementalMarkingIfComplete(
GarbageCollectionReason::kFinalizeMarkingViaTask);
}
size_t IncrementalMarking::StepSizeToKeepUpWithAllocations() {
// Update bytes_allocated_ based on the allocation counter.
size_t current_counter = heap_->OldGenerationAllocationCounter();
......
......@@ -123,9 +123,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
// marking schedule, which is indicated with StepResult::kDone.
StepResult AdvanceWithDeadline(double deadline_in_ms, StepOrigin step_origin);
// Performs incremental marking step and finalizes marking if complete.
void AdvanceFromTask();
// Performs incremental marking step and schedules job for finalization if
// marking completes.
void AdvanceOnAllocation();
......
......@@ -84,7 +84,13 @@ void MemoryReducer::NotifyTimer(const Event& event) {
// Make progress with pending incremental marking if memory usage has
// higher priority than latency. This is important for background tabs
// that do not send idle notifications.
heap()->incremental_marking()->AdvanceFromTask();
const int kIncrementalMarkingDelayMs = 500;
double deadline = heap()->MonotonicallyIncreasingTimeInMs() +
kIncrementalMarkingDelayMs;
heap()->incremental_marking()->AdvanceWithDeadline(deadline,
StepOrigin::kTask);
heap()->FinalizeIncrementalMarkingIfComplete(
GarbageCollectionReason::kFinalizeMarkingViaTask);
}
// Re-schedule the timer.
ScheduleTimer(state_.next_gc_start_ms - event.time_ms);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment