Commit de7e8865 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Remove TaskType for incremental marking jobs

Delayed tasks were never used, so remove support for it. The only
supported task type is therefore "normal", immediately scheduled tasks.

Bug: v8:12775
Change-Id: Ifd659deae2b98f424d889e4253f79d9b031a82d1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3849831Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82659}
parent 1478e8d0
...@@ -5442,8 +5442,7 @@ void Isolate::SetRAILMode(RAILMode rail_mode) { ...@@ -5442,8 +5442,7 @@ void Isolate::SetRAILMode(RAILMode rail_mode) {
} }
rail_mode_.store(rail_mode); rail_mode_.store(rail_mode);
if (old_rail_mode == PERFORMANCE_LOAD && rail_mode != PERFORMANCE_LOAD) { if (old_rail_mode == PERFORMANCE_LOAD && rail_mode != PERFORMANCE_LOAD) {
heap()->incremental_marking()->incremental_marking_job()->ScheduleTask( heap()->incremental_marking()->incremental_marking_job()->ScheduleTask();
heap());
} }
if (FLAG_trace_rail) { if (FLAG_trace_rail) {
PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode)); PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode));
......
...@@ -2025,7 +2025,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached( ...@@ -2025,7 +2025,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
gc_callback_flags); gc_callback_flags);
break; break;
case IncrementalMarkingLimit::kSoftLimit: case IncrementalMarkingLimit::kSoftLimit:
incremental_marking()->incremental_marking_job()->ScheduleTask(this); incremental_marking()->incremental_marking_job()->ScheduleTask();
break; break;
case IncrementalMarkingLimit::kFallbackForEmbedderLimit: case IncrementalMarkingLimit::kFallbackForEmbedderLimit:
// This is a fallback case where no appropriate limits have been // This is a fallback case where no appropriate limits have been
...@@ -2050,7 +2050,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReachedBackground() { ...@@ -2050,7 +2050,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReachedBackground() {
const size_t old_generation_space_available = OldGenerationSpaceAvailable(); const size_t old_generation_space_available = OldGenerationSpaceAvailable();
if (old_generation_space_available < NewSpaceCapacity()) { if (old_generation_space_available < NewSpaceCapacity()) {
incremental_marking()->incremental_marking_job()->ScheduleTask(this); incremental_marking()->incremental_marking_job()->ScheduleTask();
} }
} }
......
...@@ -21,12 +21,11 @@ namespace internal { ...@@ -21,12 +21,11 @@ namespace internal {
class IncrementalMarkingJob::Task : public CancelableTask { class IncrementalMarkingJob::Task : public CancelableTask {
public: public:
Task(Isolate* isolate, IncrementalMarkingJob* job, Task(Isolate* isolate, IncrementalMarkingJob* job,
EmbedderHeapTracer::EmbedderStackState stack_state, TaskType task_type) EmbedderHeapTracer::EmbedderStackState stack_state)
: CancelableTask(isolate), : CancelableTask(isolate),
isolate_(isolate), isolate_(isolate),
job_(job), job_(job),
stack_state_(stack_state), stack_state_(stack_state) {}
task_type_(task_type) {}
// CancelableTask overrides. // CancelableTask overrides.
void RunInternal() override; void RunInternal() override;
...@@ -37,24 +36,18 @@ class IncrementalMarkingJob::Task : public CancelableTask { ...@@ -37,24 +36,18 @@ class IncrementalMarkingJob::Task : public CancelableTask {
Isolate* const isolate_; Isolate* const isolate_;
IncrementalMarkingJob* const job_; IncrementalMarkingJob* const job_;
const EmbedderHeapTracer::EmbedderStackState stack_state_; const EmbedderHeapTracer::EmbedderStackState stack_state_;
const TaskType task_type_;
}; };
void IncrementalMarkingJob::Start(Heap* heap) { void IncrementalMarkingJob::ScheduleTask() {
DCHECK(!heap->incremental_marking()->IsStopped());
ScheduleTask(heap);
}
void IncrementalMarkingJob::ScheduleTask(Heap* heap, TaskType task_type) {
base::MutexGuard guard(&mutex_); base::MutexGuard guard(&mutex_);
if (IsTaskPending(task_type) || heap->IsTearingDown() || if (is_task_pending_ || heap_->IsTearingDown() ||
!FLAG_incremental_marking_task) { !FLAG_incremental_marking_task) {
return; return;
} }
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate()); v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap_->isolate());
SetTaskPending(task_type, true); is_task_pending_ = true;
auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner(isolate); auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner(isolate);
const EmbedderHeapTracer::EmbedderStackState stack_state = const EmbedderHeapTracer::EmbedderStackState stack_state =
...@@ -62,23 +55,14 @@ void IncrementalMarkingJob::ScheduleTask(Heap* heap, TaskType task_type) { ...@@ -62,23 +55,14 @@ void IncrementalMarkingJob::ScheduleTask(Heap* heap, TaskType task_type) {
? EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers ? EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers
: EmbedderHeapTracer::EmbedderStackState::kMayContainHeapPointers; : EmbedderHeapTracer::EmbedderStackState::kMayContainHeapPointers;
auto task = auto task = std::make_unique<Task>(heap_->isolate(), this, stack_state);
std::make_unique<Task>(heap->isolate(), this, stack_state, task_type);
if (task_type == TaskType::kNormal) { scheduled_time_ = heap_->MonotonicallyIncreasingTimeInMs();
scheduled_time_ = heap->MonotonicallyIncreasingTimeInMs();
if (taskrunner->NonNestableTasksEnabled()) { if (taskrunner->NonNestableTasksEnabled()) {
taskrunner->PostNonNestableTask(std::move(task)); taskrunner->PostNonNestableTask(std::move(task));
} else {
taskrunner->PostTask(std::move(task));
}
} else { } else {
if (taskrunner->NonNestableDelayedTasksEnabled()) { taskrunner->PostTask(std::move(task));
taskrunner->PostNonNestableDelayedTask(std::move(task), kDelayInSeconds);
} else {
taskrunner->PostDelayedTask(std::move(task), kDelayInSeconds);
}
} }
} }
...@@ -89,11 +73,10 @@ void IncrementalMarkingJob::Task::RunInternal() { ...@@ -89,11 +73,10 @@ void IncrementalMarkingJob::Task::RunInternal() {
Heap* heap = isolate()->heap(); Heap* heap = isolate()->heap();
EmbedderStackStateScope scope( EmbedderStackStateScope scope(
heap, EmbedderStackStateScope::kImplicitThroughTask, stack_state_); heap, EmbedderStackStateScope::kImplicitThroughTask, stack_state_);
if (task_type_ == TaskType::kNormal) {
heap->tracer()->RecordTimeToIncrementalMarkingTask( heap->tracer()->RecordTimeToIncrementalMarkingTask(
heap->MonotonicallyIncreasingTimeInMs() - job_->scheduled_time_); heap->MonotonicallyIncreasingTimeInMs() - job_->scheduled_time_);
job_->scheduled_time_ = 0.0; job_->scheduled_time_ = 0.0;
}
IncrementalMarking* incremental_marking = heap->incremental_marking(); IncrementalMarking* incremental_marking = heap->incremental_marking();
if (incremental_marking->IsStopped()) { if (incremental_marking->IsStopped()) {
...@@ -109,7 +92,7 @@ void IncrementalMarkingJob::Task::RunInternal() { ...@@ -109,7 +92,7 @@ void IncrementalMarkingJob::Task::RunInternal() {
// scheduling a new task when starting incremental marking. // scheduling a new task when starting incremental marking.
{ {
base::MutexGuard guard(&job_->mutex_); base::MutexGuard guard(&job_->mutex_);
job_->SetTaskPending(task_type_, false); job_->is_task_pending_ = false;
} }
if (incremental_marking->IsRunning()) { if (incremental_marking->IsRunning()) {
...@@ -124,15 +107,15 @@ void IncrementalMarkingJob::Task::RunInternal() { ...@@ -124,15 +107,15 @@ void IncrementalMarkingJob::Task::RunInternal() {
// immediately here. This was introduced since delayed task were // immediately here. This was introduced since delayed task were
// unreliable at some point. Investigate whether this is still the case // unreliable at some point. Investigate whether this is still the case
// and whether this could be improved. // and whether this could be improved.
job_->ScheduleTask(heap, TaskType::kNormal); job_->ScheduleTask();
} }
} }
} }
double IncrementalMarkingJob::CurrentTimeToTask(Heap* heap) const { double IncrementalMarkingJob::CurrentTimeToTask() const {
if (scheduled_time_ == 0.0) return 0.0; if (scheduled_time_ == 0.0) return 0.0;
return heap->MonotonicallyIncreasingTimeInMs() - scheduled_time_; return heap_->MonotonicallyIncreasingTimeInMs() - scheduled_time_;
} }
} // namespace internal } // namespace internal
......
...@@ -18,37 +18,19 @@ class Isolate; ...@@ -18,37 +18,19 @@ class Isolate;
// step and posts another task until the marking is completed. // step and posts another task until the marking is completed.
class IncrementalMarkingJob final { class IncrementalMarkingJob final {
public: public:
enum class TaskType { kNormal, kDelayed }; explicit IncrementalMarkingJob(Heap* heap) V8_NOEXCEPT : heap_(heap) {}
IncrementalMarkingJob() V8_NOEXCEPT = default; void ScheduleTask();
double CurrentTimeToTask() const;
void Start(Heap* heap);
void ScheduleTask(Heap* heap, TaskType task_type = TaskType::kNormal);
double CurrentTimeToTask(Heap* heap) const;
private: private:
class Task; class Task;
static constexpr double kDelayInSeconds = 10.0 / 1000.0; static constexpr double kDelayInSeconds = 10.0 / 1000.0;
bool IsTaskPending(TaskType task_type) const { Heap* heap_;
return task_type == TaskType::kNormal ? normal_task_pending_
: delayed_task_pending_;
}
void SetTaskPending(TaskType task_type, bool value) {
if (task_type == TaskType::kNormal) {
normal_task_pending_ = value;
} else {
delayed_task_pending_ = value;
}
}
base::Mutex mutex_; base::Mutex mutex_;
double scheduled_time_ = 0.0; double scheduled_time_ = 0.0;
bool normal_task_pending_ = false; bool is_task_pending_ = false;
bool delayed_task_pending_ = false;
}; };
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/heap/incremental-marking-inl.h" #include "src/heap/incremental-marking-inl.h"
#include "src/heap/incremental-marking-job.h"
#include "src/heap/mark-compact-inl.h" #include "src/heap/mark-compact-inl.h"
#include "src/heap/mark-compact.h" #include "src/heap/mark-compact.h"
#include "src/heap/marking-barrier.h" #include "src/heap/marking-barrier.h"
...@@ -53,6 +54,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap, WeakObjects* weak_objects) ...@@ -53,6 +54,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap, WeakObjects* weak_objects)
: heap_(heap), : heap_(heap),
collector_(heap->mark_compact_collector()), collector_(heap->mark_compact_collector()),
weak_objects_(weak_objects), weak_objects_(weak_objects),
incremental_marking_job_(heap),
new_generation_observer_(this, kYoungGenerationAllocatedThreshold), new_generation_observer_(this, kYoungGenerationAllocatedThreshold),
old_generation_observer_(this, kOldGenerationAllocatedThreshold), old_generation_observer_(this, kOldGenerationAllocatedThreshold),
marking_state_(heap->isolate()), marking_state_(heap->isolate()),
...@@ -175,7 +177,7 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) { ...@@ -175,7 +177,7 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
heap_->AddAllocationObserversToAllSpaces(&old_generation_observer_, heap_->AddAllocationObserversToAllSpaces(&old_generation_observer_,
&new_generation_observer_); &new_generation_observer_);
incremental_marking_job()->Start(heap_); incremental_marking_job()->ScheduleTask();
} }
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) { bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
...@@ -543,15 +545,14 @@ double IncrementalMarking::CurrentTimeToMarkingTask() const { ...@@ -543,15 +545,14 @@ double IncrementalMarking::CurrentTimeToMarkingTask() const {
const double recorded_time_to_marking_task = const double recorded_time_to_marking_task =
heap_->tracer()->AverageTimeToIncrementalMarkingTask(); heap_->tracer()->AverageTimeToIncrementalMarkingTask();
const double current_time_to_marking_task = const double current_time_to_marking_task =
incremental_marking_job_.CurrentTimeToTask(heap_); incremental_marking_job_.CurrentTimeToTask();
if (recorded_time_to_marking_task == 0.0) return 0.0; if (recorded_time_to_marking_task == 0.0) return 0.0;
return std::max(recorded_time_to_marking_task, current_time_to_marking_task); return std::max(recorded_time_to_marking_task, current_time_to_marking_task);
} }
bool IncrementalMarking::ShouldWaitForTask() { bool IncrementalMarking::ShouldWaitForTask() {
if (!completion_task_scheduled_) { if (!completion_task_scheduled_) {
incremental_marking_job_.ScheduleTask( incremental_marking_job_.ScheduleTask();
heap(), IncrementalMarkingJob::TaskType::kNormal);
completion_task_scheduled_ = true; completion_task_scheduled_ = true;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment