Commit d88e3a7a authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Pause Sweeper tasks on Scavenge

Pause sweeper tasks when entering the Scavenger. In future CLs this
enables removing the page lock for the Scavenger.

CQ_INCLUDE_TRYBOTS=master.tryserver.v8:v8_linux64_tsan_rel;master.tryserver.v8:v8_linux64_tsan_concurrent_marking_rel_ng;master.tryserver.blink:linux_trusty_blink_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel;master.tryserver.chromium.android:android_optional_gpu_tests_rel

Bug: v8:6923, chromium:774108
Change-Id: Idcb7e14b94704dead70c33a49e67047b88bb6c4c
Reviewed-on: https://chromium-review.googlesource.com/718200
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48603}
parent 0738f0f6
......@@ -1916,6 +1916,7 @@ void Heap::Scavenge() {
mark_compact_collector()->EnsureSweepingCompleted();
}
// TODO(mlippautz): Untangle the dependency of the unmapper from the sweeper.
mark_compact_collector()->sweeper().EnsureNewSpaceCompleted();
SetGCState(SCAVENGE);
......@@ -1948,41 +1949,46 @@ void Heap::Scavenge() {
job.AddItem(new PageScavengingItem(this, chunk));
});
RootScavengeVisitor root_scavenge_visitor(this, scavengers[kMainThreadId]);
{
// Identify weak unmodified handles. Requires an unmodified graph.
TRACE_GC(tracer(),
GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY);
isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
&JSObject::IsUnmodifiedApiObject);
}
{
// Copy roots.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_ROOTS);
IterateRoots(&root_scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
}
{
// Weak collections are held strongly by the Scavenger.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK);
IterateEncounteredWeakCollections(&root_scavenge_visitor);
}
{
// Parallel phase scavenging all copied and promoted objects.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
job.Run();
DCHECK(copied_list.IsGlobalEmpty());
DCHECK(promotion_list.IsGlobalEmpty());
}
{
// Scavenge weak global handles.
TRACE_GC(tracer(),
GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS);
isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
&IsUnscavengedHeapObject);
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&root_scavenge_visitor);
scavengers[kMainThreadId]->Process();
MarkCompactCollector::Sweeper::PauseOrCompleteScope sweeper_scope(
&mark_compact_collector()->sweeper());
RootScavengeVisitor root_scavenge_visitor(this, scavengers[kMainThreadId]);
{
// Identify weak unmodified handles. Requires an unmodified graph.
TRACE_GC(
tracer(),
GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY);
isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
&JSObject::IsUnmodifiedApiObject);
}
{
// Copy roots.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_ROOTS);
IterateRoots(&root_scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
}
{
// Weak collections are held strongly by the Scavenger.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK);
IterateEncounteredWeakCollections(&root_scavenge_visitor);
}
{
// Parallel phase scavenging all copied and promoted objects.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
job.Run();
DCHECK(copied_list.IsGlobalEmpty());
DCHECK(promotion_list.IsGlobalEmpty());
}
{
// Scavenge weak global handles.
TRACE_GC(tracer(),
GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS);
isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
&IsUnscavengedHeapObject);
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&root_scavenge_visitor);
scavengers[kMainThreadId]->Process();
}
}
for (int i = 0; i < num_scavenge_tasks; i++) {
......
......@@ -600,6 +600,32 @@ void MarkCompactCollector::ClearMarkbits() {
heap_->lo_space()->ClearMarkingStateOfLiveObjects();
}
MarkCompactCollector::Sweeper::PauseOrCompleteScope::PauseOrCompleteScope(
MarkCompactCollector::Sweeper* sweeper)
: sweeper_(sweeper) {
sweeper_->stop_sweeper_tasks_.SetValue(true);
if (!sweeper_->sweeping_in_progress()) return;
sweeper_->AbortAndWaitForTasks();
// Complete sweeping if there's nothing more to do.
if (sweeper_->IsDoneSweeping()) {
sweeper_->heap_->mark_compact_collector()->EnsureSweepingCompleted();
DCHECK(!sweeper_->sweeping_in_progress());
} else {
// Unless sweeping is complete the flag still indicates that the sweeper
// is enabled. It just cannot use tasks anymore.
DCHECK(sweeper_->sweeping_in_progress());
}
}
MarkCompactCollector::Sweeper::PauseOrCompleteScope::~PauseOrCompleteScope() {
sweeper_->stop_sweeper_tasks_.SetValue(false);
if (!sweeper_->sweeping_in_progress()) return;
sweeper_->StartSweeperTasks();
}
class MarkCompactCollector::Sweeper::SweeperTask final : public CancelableTask {
public:
SweeperTask(Isolate* isolate, Sweeper* sweeper,
......@@ -624,7 +650,7 @@ class MarkCompactCollector::Sweeper::SweeperTask final : public CancelableTask {
const int space_id = FIRST_SPACE + ((i + offset) % num_spaces);
DCHECK_GE(space_id, FIRST_SPACE);
DCHECK_LE(space_id, LAST_PAGED_SPACE);
sweeper_->ParallelSweepSpace(static_cast<AllocationSpace>(space_id), 0);
sweeper_->SweepSpaceFromTask(static_cast<AllocationSpace>(space_id));
}
num_sweeping_tasks_->Decrement(1);
pending_sweeper_tasks_->Signal();
......@@ -639,6 +665,7 @@ class MarkCompactCollector::Sweeper::SweeperTask final : public CancelableTask {
};
void MarkCompactCollector::Sweeper::StartSweeping() {
CHECK(!stop_sweeper_tasks_.Value());
sweeping_in_progress_ = true;
NonAtomicMarkingState* marking_state =
heap_->mark_compact_collector()->non_atomic_marking_state();
......@@ -654,7 +681,8 @@ void MarkCompactCollector::Sweeper::StartSweeping() {
void MarkCompactCollector::Sweeper::StartSweeperTasks() {
DCHECK_EQ(0, num_tasks_);
DCHECK_EQ(0, num_sweeping_tasks_.Value());
if (FLAG_concurrent_sweeping && sweeping_in_progress_) {
if (FLAG_concurrent_sweeping && sweeping_in_progress_ &&
!heap_->delay_sweeper_tasks_for_testing_) {
ForAllSweepingSpaces([this](AllocationSpace space) {
if (space == NEW_SPACE) return;
num_sweeping_tasks_.Increment(1);
......@@ -700,6 +728,22 @@ Page* MarkCompactCollector::Sweeper::GetSweptPageSafe(PagedSpace* space) {
return nullptr;
}
void MarkCompactCollector::Sweeper::AbortAndWaitForTasks() {
if (!FLAG_concurrent_sweeping) return;
for (int i = 0; i < num_tasks_; i++) {
if (heap_->isolate()->cancelable_task_manager()->TryAbort(task_ids_[i]) !=
CancelableTaskManager::kTaskAborted) {
pending_sweeper_tasks_semaphore_.Wait();
} else {
// Aborted case.
num_sweeping_tasks_.Decrement(1);
}
}
num_tasks_ = 0;
DCHECK_EQ(0, num_sweeping_tasks_.Value());
}
void MarkCompactCollector::Sweeper::EnsureCompleted() {
if (!sweeping_in_progress_) return;
......@@ -708,16 +752,7 @@ void MarkCompactCollector::Sweeper::EnsureCompleted() {
ForAllSweepingSpaces(
[this](AllocationSpace space) { ParallelSweepSpace(space, 0); });
if (FLAG_concurrent_sweeping) {
for (int i = 0; i < num_tasks_; i++) {
if (heap_->isolate()->cancelable_task_manager()->TryAbort(task_ids_[i]) !=
CancelableTaskManager::kTaskAborted) {
pending_sweeper_tasks_semaphore_.Wait();
}
}
num_tasks_ = 0;
num_sweeping_tasks_.SetValue(0);
}
AbortAndWaitForTasks();
ForAllSweepingSpaces([this](AllocationSpace space) {
if (space == NEW_SPACE) {
......@@ -1045,9 +1080,7 @@ void MarkCompactCollector::Finish() {
heap()->VerifyCountersBeforeConcurrentSweeping();
#endif
if (!heap()->delay_sweeper_tasks_for_testing_) {
sweeper().StartSweeperTasks();
}
sweeper().StartSweeperTasks();
// The hashing of weak_object_to_code_table is no longer valid.
heap()->weak_object_to_code_table()->Rehash();
......@@ -4310,6 +4343,15 @@ void MarkCompactCollector::ReleaseEvacuationCandidates() {
compacting_ = false;
}
void MarkCompactCollector::Sweeper::SweepSpaceFromTask(
AllocationSpace identity) {
Page* page = nullptr;
while (!stop_sweeper_tasks_.Value() &&
((page = GetSweepingPageSafe(identity)) != nullptr)) {
ParallelSweepPage(page, identity);
}
}
int MarkCompactCollector::Sweeper::ParallelSweepSpace(AllocationSpace identity,
int required_freed_bytes,
int max_pages) {
......
......@@ -593,6 +593,16 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
class Sweeper {
public:
// Pauses the sweeper tasks or completes sweeping.
class PauseOrCompleteScope {
public:
explicit PauseOrCompleteScope(Sweeper* sweeper);
~PauseOrCompleteScope();
private:
Sweeper* const sweeper_;
};
enum FreeListRebuildingMode { REBUILD_FREE_LIST, IGNORE_FREE_LIST };
enum ClearOldToNewSlotsMode {
DO_NOT_CLEAR,
......@@ -613,9 +623,10 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
num_tasks_(0),
pending_sweeper_tasks_semaphore_(0),
sweeping_in_progress_(false),
num_sweeping_tasks_(0) {}
num_sweeping_tasks_(0),
stop_sweeper_tasks_(false) {}
bool sweeping_in_progress() { return sweeping_in_progress_; }
bool sweeping_in_progress() const { return sweeping_in_progress_; }
void AddPage(AllocationSpace space, Page* page);
......@@ -649,6 +660,18 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
}
}
// Can only be called on the main thread when no tasks are running.
bool IsDoneSweeping() const {
for (int i = 0; i < kAllocationSpaces; i++) {
if (!sweeping_list_[i].empty()) return false;
}
return true;
}
void SweepSpaceFromTask(AllocationSpace identity);
void AbortAndWaitForTasks();
Page* GetSweepingPageSafe(AllocationSpace space);
void PrepareToBeSweptPage(AllocationSpace space, Page* page);
......@@ -665,6 +688,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Counter is actively maintained by the concurrent tasks to avoid querying
// the semaphore for maintaining a task counter on the main thread.
base::AtomicNumber<intptr_t> num_sweeping_tasks_;
// Used by PauseOrCompleteScope to signal early bailout to tasks.
base::AtomicValue<bool> stop_sweeper_tasks_;
};
enum IterationMode {
......
......@@ -191,6 +191,7 @@ UNINITIALIZED_HEAP_TEST(Regress658718) {
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
heap->new_space()->Shrink();
heap->memory_allocator()->unmapper()->WaitUntilCompleted();
heap->delay_sweeper_tasks_for_testing_ = false;
heap->mark_compact_collector()->sweeper().StartSweeperTasks();
heap->mark_compact_collector()->EnsureSweepingCompleted();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment