Commit de7e8865 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Remove TaskType for incremental marking jobs

Delayed tasks were never used, so remove support for it. The only
supported task type is therefore "normal", immediately scheduled tasks.

Bug: v8:12775
Change-Id: Ifd659deae2b98f424d889e4253f79d9b031a82d1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3849831Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82659}
parent 1478e8d0
......@@ -5442,8 +5442,7 @@ void Isolate::SetRAILMode(RAILMode rail_mode) {
}
rail_mode_.store(rail_mode);
if (old_rail_mode == PERFORMANCE_LOAD && rail_mode != PERFORMANCE_LOAD) {
heap()->incremental_marking()->incremental_marking_job()->ScheduleTask(
heap());
heap()->incremental_marking()->incremental_marking_job()->ScheduleTask();
}
if (FLAG_trace_rail) {
PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode));
......
......@@ -2025,7 +2025,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
gc_callback_flags);
break;
case IncrementalMarkingLimit::kSoftLimit:
incremental_marking()->incremental_marking_job()->ScheduleTask(this);
incremental_marking()->incremental_marking_job()->ScheduleTask();
break;
case IncrementalMarkingLimit::kFallbackForEmbedderLimit:
// This is a fallback case where no appropriate limits have been
......@@ -2050,7 +2050,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReachedBackground() {
const size_t old_generation_space_available = OldGenerationSpaceAvailable();
if (old_generation_space_available < NewSpaceCapacity()) {
incremental_marking()->incremental_marking_job()->ScheduleTask(this);
incremental_marking()->incremental_marking_job()->ScheduleTask();
}
}
......
......@@ -21,12 +21,11 @@ namespace internal {
class IncrementalMarkingJob::Task : public CancelableTask {
public:
Task(Isolate* isolate, IncrementalMarkingJob* job,
EmbedderHeapTracer::EmbedderStackState stack_state, TaskType task_type)
EmbedderHeapTracer::EmbedderStackState stack_state)
: CancelableTask(isolate),
isolate_(isolate),
job_(job),
stack_state_(stack_state),
task_type_(task_type) {}
stack_state_(stack_state) {}
// CancelableTask overrides.
void RunInternal() override;
......@@ -37,24 +36,18 @@ class IncrementalMarkingJob::Task : public CancelableTask {
Isolate* const isolate_;
IncrementalMarkingJob* const job_;
const EmbedderHeapTracer::EmbedderStackState stack_state_;
const TaskType task_type_;
};
void IncrementalMarkingJob::Start(Heap* heap) {
DCHECK(!heap->incremental_marking()->IsStopped());
ScheduleTask(heap);
}
void IncrementalMarkingJob::ScheduleTask(Heap* heap, TaskType task_type) {
void IncrementalMarkingJob::ScheduleTask() {
base::MutexGuard guard(&mutex_);
if (IsTaskPending(task_type) || heap->IsTearingDown() ||
if (is_task_pending_ || heap_->IsTearingDown() ||
!FLAG_incremental_marking_task) {
return;
}
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate());
SetTaskPending(task_type, true);
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap_->isolate());
is_task_pending_ = true;
auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner(isolate);
const EmbedderHeapTracer::EmbedderStackState stack_state =
......@@ -62,23 +55,14 @@ void IncrementalMarkingJob::ScheduleTask(Heap* heap, TaskType task_type) {
? EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers
: EmbedderHeapTracer::EmbedderStackState::kMayContainHeapPointers;
auto task =
std::make_unique<Task>(heap->isolate(), this, stack_state, task_type);
auto task = std::make_unique<Task>(heap_->isolate(), this, stack_state);
if (task_type == TaskType::kNormal) {
scheduled_time_ = heap->MonotonicallyIncreasingTimeInMs();
scheduled_time_ = heap_->MonotonicallyIncreasingTimeInMs();
if (taskrunner->NonNestableTasksEnabled()) {
taskrunner->PostNonNestableTask(std::move(task));
} else {
taskrunner->PostTask(std::move(task));
}
if (taskrunner->NonNestableTasksEnabled()) {
taskrunner->PostNonNestableTask(std::move(task));
} else {
if (taskrunner->NonNestableDelayedTasksEnabled()) {
taskrunner->PostNonNestableDelayedTask(std::move(task), kDelayInSeconds);
} else {
taskrunner->PostDelayedTask(std::move(task), kDelayInSeconds);
}
taskrunner->PostTask(std::move(task));
}
}
......@@ -89,11 +73,10 @@ void IncrementalMarkingJob::Task::RunInternal() {
Heap* heap = isolate()->heap();
EmbedderStackStateScope scope(
heap, EmbedderStackStateScope::kImplicitThroughTask, stack_state_);
if (task_type_ == TaskType::kNormal) {
heap->tracer()->RecordTimeToIncrementalMarkingTask(
heap->MonotonicallyIncreasingTimeInMs() - job_->scheduled_time_);
job_->scheduled_time_ = 0.0;
}
heap->tracer()->RecordTimeToIncrementalMarkingTask(
heap->MonotonicallyIncreasingTimeInMs() - job_->scheduled_time_);
job_->scheduled_time_ = 0.0;
IncrementalMarking* incremental_marking = heap->incremental_marking();
if (incremental_marking->IsStopped()) {
......@@ -109,7 +92,7 @@ void IncrementalMarkingJob::Task::RunInternal() {
// scheduling a new task when starting incremental marking.
{
base::MutexGuard guard(&job_->mutex_);
job_->SetTaskPending(task_type_, false);
job_->is_task_pending_ = false;
}
if (incremental_marking->IsRunning()) {
......@@ -124,15 +107,15 @@ void IncrementalMarkingJob::Task::RunInternal() {
// immediately here. This was introduced since delayed task were
// unreliable at some point. Investigate whether this is still the case
// and whether this could be improved.
job_->ScheduleTask(heap, TaskType::kNormal);
job_->ScheduleTask();
}
}
}
double IncrementalMarkingJob::CurrentTimeToTask(Heap* heap) const {
double IncrementalMarkingJob::CurrentTimeToTask() const {
if (scheduled_time_ == 0.0) return 0.0;
return heap->MonotonicallyIncreasingTimeInMs() - scheduled_time_;
return heap_->MonotonicallyIncreasingTimeInMs() - scheduled_time_;
}
} // namespace internal
......
......@@ -18,37 +18,19 @@ class Isolate;
// step and posts another task until the marking is completed.
class IncrementalMarkingJob final {
public:
enum class TaskType { kNormal, kDelayed };
explicit IncrementalMarkingJob(Heap* heap) V8_NOEXCEPT : heap_(heap) {}
IncrementalMarkingJob() V8_NOEXCEPT = default;
void Start(Heap* heap);
void ScheduleTask(Heap* heap, TaskType task_type = TaskType::kNormal);
double CurrentTimeToTask(Heap* heap) const;
void ScheduleTask();
double CurrentTimeToTask() const;
private:
class Task;
static constexpr double kDelayInSeconds = 10.0 / 1000.0;
bool IsTaskPending(TaskType task_type) const {
return task_type == TaskType::kNormal ? normal_task_pending_
: delayed_task_pending_;
}
void SetTaskPending(TaskType task_type, bool value) {
if (task_type == TaskType::kNormal) {
normal_task_pending_ = value;
} else {
delayed_task_pending_ = value;
}
}
Heap* heap_;
base::Mutex mutex_;
double scheduled_time_ = 0.0;
bool normal_task_pending_ = false;
bool delayed_task_pending_ = false;
bool is_task_pending_ = false;
};
} // namespace internal
} // namespace v8
......
......@@ -15,6 +15,7 @@
#include "src/heap/heap-inl.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/incremental-marking-job.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/mark-compact.h"
#include "src/heap/marking-barrier.h"
......@@ -53,6 +54,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap, WeakObjects* weak_objects)
: heap_(heap),
collector_(heap->mark_compact_collector()),
weak_objects_(weak_objects),
incremental_marking_job_(heap),
new_generation_observer_(this, kYoungGenerationAllocatedThreshold),
old_generation_observer_(this, kOldGenerationAllocatedThreshold),
marking_state_(heap->isolate()),
......@@ -175,7 +177,7 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
heap_->AddAllocationObserversToAllSpaces(&old_generation_observer_,
&new_generation_observer_);
incremental_marking_job()->Start(heap_);
incremental_marking_job()->ScheduleTask();
}
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
......@@ -543,15 +545,14 @@ double IncrementalMarking::CurrentTimeToMarkingTask() const {
const double recorded_time_to_marking_task =
heap_->tracer()->AverageTimeToIncrementalMarkingTask();
const double current_time_to_marking_task =
incremental_marking_job_.CurrentTimeToTask(heap_);
incremental_marking_job_.CurrentTimeToTask();
if (recorded_time_to_marking_task == 0.0) return 0.0;
return std::max(recorded_time_to_marking_task, current_time_to_marking_task);
}
bool IncrementalMarking::ShouldWaitForTask() {
if (!completion_task_scheduled_) {
incremental_marking_job_.ScheduleTask(
heap(), IncrementalMarkingJob::TaskType::kNormal);
incremental_marking_job_.ScheduleTask();
completion_task_scheduled_ = true;
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment