Commit 2e4b4db0 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Untangle iterability from regular sweeping

Separates restoring iterability from sweeping a page. Since the set
of pages where iterability needs to be restored is small, it is
possible to wait and block for the task to finish if necessary.

A follow up change can now remove the fragile logic for delaying
unmapping of pages since it is guaranteed that no background task
keeps a reference to a page for restoring iterability.

Bug: chromium:791043
Change-Id: Ifba45594cc586df3c99e1bbb20a13b44c18dd9a1
Reviewed-on: https://chromium-review.googlesource.com/796419
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49826}
parent c74b7f3a
......@@ -1847,7 +1847,7 @@ void Heap::EvacuateYoungGeneration() {
DCHECK(CanExpandOldGeneration(new_space()->Size()));
}
mark_compact_collector()->sweeper()->EnsureNewSpaceCompleted();
mark_compact_collector()->sweeper()->EnsureIterabilityCompleted();
SetGCState(SCAVENGE);
LOG(isolate_, ResourceEvent("scavenge", "begin"));
......@@ -1969,8 +1969,7 @@ void Heap::Scavenge() {
mark_compact_collector()->EnsureSweepingCompleted();
}
// TODO(mlippautz): Untangle the dependency of the unmapper from the sweeper.
mark_compact_collector()->sweeper()->EnsureNewSpaceCompleted();
mark_compact_collector()->sweeper()->EnsureIterabilityCompleted();
SetGCState(SCAVENGE);
......@@ -6636,7 +6635,7 @@ Code* Heap::GcSafeFindCodeForInnerPointer(Address inner_pointer) {
// after the inner pointer.
Page* page = Page::FromAddress(inner_pointer);
DCHECK_EQ(page->owner(), code_space());
mark_compact_collector()->sweeper()->SweepOrWaitUntilSweepingCompleted(page);
mark_compact_collector()->sweeper()->EnsurePageIsIterable(page);
Address addr = page->skip_list()->StartFor(inner_pointer);
Address top = code_space()->top();
......
......@@ -925,6 +925,7 @@ void MarkCompactCollector::Finish() {
#endif
sweeper()->StartSweeperTasks();
sweeper()->StartIterabilityTasks();
// The hashing of weak_object_to_code_table is no longer valid.
heap()->weak_object_to_code_table()->Rehash();
......@@ -2169,7 +2170,7 @@ void MinorMarkCompactCollector::ProcessMarkingWorklist() {
void MinorMarkCompactCollector::CollectGarbage() {
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_SWEEPING);
heap()->mark_compact_collector()->sweeper()->EnsureNewSpaceCompleted();
heap()->mark_compact_collector()->sweeper()->EnsureIterabilityCompleted();
CleanupSweepToIteratePages();
}
......@@ -3509,12 +3510,11 @@ void MarkCompactCollector::Evacuate() {
for (Page* p : new_space_evacuation_pages_) {
if (p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION)) {
p->ClearFlag(Page::PAGE_NEW_NEW_PROMOTION);
sweeper()->AddPage(p->owner()->identity(), p, Sweeper::REGULAR);
sweeper()->AddPageForIterability(p);
} else if (p->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION)) {
p->ClearFlag(Page::PAGE_NEW_OLD_PROMOTION);
p->ForAllFreeListCategories(
[](FreeListCategory* category) { DCHECK(!category->is_linked()); });
sweeper()->AddPage(p->owner()->identity(), p, Sweeper::REGULAR);
DCHECK_EQ(OLD_SPACE, p->owner()->identity());
sweeper()->AddPage(OLD_SPACE, p, Sweeper::REGULAR);
}
}
new_space_evacuation_pages_.clear();
......
......@@ -137,7 +137,7 @@ bool NewSpace::FromSpaceContainsSlow(Address a) {
bool NewSpace::ToSpaceContains(Object* o) { return to_space_.Contains(o); }
bool NewSpace::FromSpaceContains(Object* o) { return from_space_.Contains(o); }
void Page::InitializeFreeListCategories() {
void MemoryChunk::InitializeFreeListCategories() {
for (int i = kFirstCategory; i < kNumberOfCategories; i++) {
categories_[i].Initialize(static_cast<FreeListCategoryType>(i));
}
......
......@@ -57,8 +57,7 @@ bool HeapObjectIterator::AdvanceToNextPage() {
Page* cur_page = *(current_page_++);
Heap* heap = space_->heap();
heap->mark_compact_collector()->sweeper()->SweepOrWaitUntilSweepingCompleted(
cur_page);
heap->mark_compact_collector()->sweeper()->EnsurePageIsIterable(cur_page);
if (cur_page->IsFlagSet(Page::SWEEP_TO_ITERATE))
heap->minor_mark_compact_collector()->MakeIterable(
cur_page, MarkingTreatmentMode::CLEAR,
......@@ -602,6 +601,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
chunk->InitializeFreeListCategories();
heap->incremental_marking()->non_atomic_marking_state()->ClearLiveness(chunk);
......@@ -632,7 +632,6 @@ Page* PagedSpace::InitializePage(MemoryChunk* chunk, Executability executable) {
Page* page = static_cast<Page*>(chunk);
DCHECK_GE(Page::kAllocatableMemory, page->area_size());
// Make sure that categories are initialized before freeing the area.
page->InitializeFreeListCategories();
page->ResetAllocatedBytes();
heap()->incremental_marking()->SetOldSpacePageFlags(page);
page->InitializationMemoryFence();
......
......@@ -640,6 +640,8 @@ class MemoryChunk {
void SetReadAndExecutable();
void SetReadAndWritable();
inline void InitializeFreeListCategories();
protected:
static MemoryChunk* Initialize(Heap* heap, Address base, size_t size,
Address area_start, Address area_end,
......@@ -845,8 +847,6 @@ class Page : public MemoryChunk {
return &categories_[type];
}
inline void InitializeFreeListCategories();
bool is_anchor() { return IsFlagSet(Page::ANCHOR); }
size_t wasted_memory() { return wasted_memory_; }
......
......@@ -74,15 +74,15 @@ class Sweeper::SweeperTask final : public CancelableTask {
private:
void RunInternal() final {
DCHECK_GE(space_to_start_, FIRST_SPACE);
DCHECK_GE(space_to_start_, FIRST_PAGED_SPACE);
DCHECK_LE(space_to_start_, LAST_PAGED_SPACE);
const int offset = space_to_start_ - FIRST_SPACE;
const int num_spaces = LAST_PAGED_SPACE - FIRST_SPACE + 1;
const int offset = space_to_start_ - FIRST_PAGED_SPACE;
const int num_spaces = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
for (int i = 0; i < num_spaces; i++) {
const int space_id = FIRST_SPACE + ((i + offset) % num_spaces);
const int space_id = FIRST_PAGED_SPACE + ((i + offset) % num_spaces);
// Do not sweep code space concurrently.
if (static_cast<AllocationSpace>(space_id) == CODE_SPACE) continue;
DCHECK_GE(space_id, FIRST_SPACE);
DCHECK_GE(space_id, FIRST_PAGED_SPACE);
DCHECK_LE(space_id, LAST_PAGED_SPACE);
sweeper_->SweepSpaceFromTask(static_cast<AllocationSpace>(space_id));
}
......@@ -127,6 +127,7 @@ class Sweeper::IncrementalSweeperTask final : public CancelableTask {
void Sweeper::StartSweeping() {
CHECK(!stop_sweeper_tasks_.Value());
sweeping_in_progress_ = true;
iterability_in_progress_ = true;
MajorNonAtomicMarkingState* marking_state =
heap_->mark_compact_collector()->non_atomic_marking_state();
ForAllSweepingSpaces([this, marking_state](AllocationSpace space) {
......@@ -144,7 +145,7 @@ void Sweeper::StartSweeperTasks() {
if (FLAG_concurrent_sweeping && sweeping_in_progress_ &&
!heap_->delay_sweeper_tasks_for_testing_) {
ForAllSweepingSpaces([this](AllocationSpace space) {
if (space == NEW_SPACE) return;
DCHECK(IsValidSweepingSpace(space));
num_sweeping_tasks_.Increment(1);
SweeperTask* task = new SweeperTask(heap_->isolate(), this,
&pending_sweeper_tasks_semaphore_,
......@@ -200,6 +201,8 @@ void Sweeper::AbortAndWaitForTasks() {
void Sweeper::EnsureCompleted() {
if (!sweeping_in_progress_) return;
EnsureIterabilityCompleted();
// If sweeping is not completed or not running at all, we try to complete it
// here.
ForAllSweepingSpaces(
......@@ -207,24 +210,11 @@ void Sweeper::EnsureCompleted() {
AbortAndWaitForTasks();
ForAllSweepingSpaces([this](AllocationSpace space) {
if (space == NEW_SPACE) {
swept_list_[NEW_SPACE].clear();
}
DCHECK(sweeping_list_[space].empty());
});
ForAllSweepingSpaces(
[this](AllocationSpace space) { CHECK(sweeping_list_[space].empty()); });
sweeping_in_progress_ = false;
}
void Sweeper::EnsureNewSpaceCompleted() {
if (!sweeping_in_progress_) return;
if (!FLAG_concurrent_sweeping || sweeping_in_progress()) {
for (Page* p : *heap_->new_space()) {
SweepOrWaitUntilSweepingCompleted(p);
}
}
}
bool Sweeper::AreSweeperTasksRunning() {
return num_sweeping_tasks_.Value() != 0;
}
......@@ -408,6 +398,7 @@ int Sweeper::ParallelSweepPage(Page* page, AllocationSpace identity) {
// path. This check here avoids taking the lock first, avoiding deadlocks.
if (page->SweepingDone()) return 0;
DCHECK(IsValidSweepingSpace(identity));
int max_freed = 0;
{
base::LockGuard<base::Mutex> guard(page->mutex());
......@@ -423,11 +414,7 @@ int Sweeper::ParallelSweepPage(Page* page, AllocationSpace identity) {
page->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress);
const FreeSpaceTreatmentMode free_space_mode =
Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE;
if (identity == NEW_SPACE) {
RawSweep(page, IGNORE_FREE_LIST, free_space_mode);
} else {
max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode);
}
max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode);
DCHECK(page->SweepingDone());
// After finishing sweeping of a page we clean up its remembered set.
......@@ -461,9 +448,9 @@ void Sweeper::ScheduleIncrementalSweepingTask() {
void Sweeper::AddPage(AllocationSpace space, Page* page,
Sweeper::AddPageMode mode) {
base::LockGuard<base::Mutex> guard(&mutex_);
DCHECK(IsValidSweepingSpace(space));
DCHECK(!FLAG_concurrent_sweeping || !AreSweeperTasksRunning());
if (mode == Sweeper::REGULAR) {
DCHECK_EQ(Page::kSweepingDone, page->concurrent_sweeping_state().Value());
PrepareToBeSweptPage(space, page);
} else {
// Page has been temporarily removed from the sweeper. Accounting already
......@@ -475,17 +462,17 @@ void Sweeper::AddPage(AllocationSpace space, Page* page,
}
void Sweeper::PrepareToBeSweptPage(AllocationSpace space, Page* page) {
page->concurrent_sweeping_state().SetValue(Page::kSweepingPending);
DCHECK_GE(page->area_size(),
static_cast<size_t>(marking_state_->live_bytes(page)));
if (space != NEW_SPACE) {
heap_->paged_space(space)->IncreaseAllocatedBytes(
marking_state_->live_bytes(page), page);
}
DCHECK_EQ(Page::kSweepingDone, page->concurrent_sweeping_state().Value());
page->concurrent_sweeping_state().SetValue(Page::kSweepingPending);
heap_->paged_space(space)->IncreaseAllocatedBytes(
marking_state_->live_bytes(page), page);
}
Page* Sweeper::GetSweepingPageSafe(AllocationSpace space) {
base::LockGuard<base::Mutex> guard(&mutex_);
DCHECK(IsValidSweepingSpace(space));
Page* page = nullptr;
if (!sweeping_list_[space].empty()) {
page = sweeping_list_[space].front();
......@@ -494,5 +481,92 @@ Page* Sweeper::GetSweepingPageSafe(AllocationSpace space) {
return page;
}
void Sweeper::EnsurePageIsIterable(Page* page) {
AllocationSpace space = page->owner()->identity();
if (IsValidSweepingSpace(space)) {
SweepOrWaitUntilSweepingCompleted(page);
} else {
DCHECK(IsValidIterabilitySpace(space));
EnsureIterabilityCompleted();
}
}
void Sweeper::EnsureIterabilityCompleted() {
if (!iterability_in_progress_) return;
if (FLAG_concurrent_sweeping && iterability_task_started_) {
if (heap_->isolate()->cancelable_task_manager()->TryAbort(
iterability_task_id_) != CancelableTaskManager::kTaskAborted) {
iterability_task_semaphore_.Wait();
}
iterability_task_started_ = false;
}
for (Page* page : iterability_list_) {
MakeIterable(page);
}
iterability_list_.clear();
iterability_in_progress_ = false;
}
class Sweeper::IterabilityTask final : public CancelableTask {
public:
IterabilityTask(Isolate* isolate, Sweeper* sweeper,
base::Semaphore* pending_iterability_task)
: CancelableTask(isolate),
sweeper_(sweeper),
pending_iterability_task_(pending_iterability_task) {}
virtual ~IterabilityTask() {}
private:
void RunInternal() final {
for (Page* page : sweeper_->iterability_list_) {
sweeper_->MakeIterable(page);
}
sweeper_->iterability_list_.clear();
pending_iterability_task_->Signal();
}
Sweeper* const sweeper_;
base::Semaphore* const pending_iterability_task_;
DISALLOW_COPY_AND_ASSIGN(IterabilityTask);
};
void Sweeper::StartIterabilityTasks() {
if (!iterability_in_progress_) return;
DCHECK(!iterability_task_started_);
if (FLAG_concurrent_sweeping && !iterability_list_.empty()) {
IterabilityTask* task = new IterabilityTask(heap_->isolate(), this,
&iterability_task_semaphore_);
iterability_task_id_ = task->id();
iterability_task_started_ = true;
V8::GetCurrentPlatform()->CallOnBackgroundThread(
task, v8::Platform::kShortRunningTask);
}
}
void Sweeper::AddPageForIterability(Page* page) {
DCHECK(sweeping_in_progress_);
DCHECK(iterability_in_progress_);
DCHECK(!iterability_task_started_);
DCHECK(IsValidIterabilitySpace(page->owner()->identity()));
DCHECK_EQ(Page::kSweepingDone, page->concurrent_sweeping_state().Value());
page->ForAllFreeListCategories(
[](FreeListCategory* category) { DCHECK(!category->is_linked()); });
iterability_list_.push_back(page);
page->concurrent_sweeping_state().SetValue(Page::kSweepingPending);
}
void Sweeper::MakeIterable(Page* page) {
DCHECK(IsValidIterabilitySpace(page->owner()->identity()));
const FreeSpaceTreatmentMode free_space_mode =
Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE;
RawSweep(page, IGNORE_FREE_LIST, free_space_mode);
}
} // namespace internal
} // namespace v8
......@@ -23,6 +23,7 @@ enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE };
class Sweeper {
public:
typedef std::vector<Page*> IterabilityList;
typedef std::deque<Page*> SweepingList;
typedef std::vector<Page*> SweptList;
......@@ -83,7 +84,10 @@ class Sweeper {
incremental_sweeper_pending_(false),
sweeping_in_progress_(false),
num_sweeping_tasks_(0),
stop_sweeper_tasks_(false) {}
stop_sweeper_tasks_(false),
iterability_task_semaphore_(0),
iterability_in_progress_(false),
iterability_task_started_(false) {}
bool sweeping_in_progress() const { return sweeping_in_progress_; }
......@@ -104,32 +108,38 @@ class Sweeper {
void StartSweeping();
void StartSweeperTasks();
void EnsureCompleted();
void EnsureNewSpaceCompleted();
bool AreSweeperTasksRunning();
void SweepOrWaitUntilSweepingCompleted(Page* page);
Page* GetSweptPageSafe(PagedSpace* space);
void EnsurePageIsIterable(Page* page);
void AddPageForIterability(Page* page);
void StartIterabilityTasks();
void EnsureIterabilityCompleted();
private:
class IncrementalSweeperTask;
class IterabilityTask;
class SweeperTask;
static const int kAllocationSpaces = LAST_PAGED_SPACE + 1;
static const int kMaxSweeperTasks = kAllocationSpaces;
static const int kNumberOfSweepingSpaces = LAST_PAGED_SPACE + 1;
static const int kMaxSweeperTasks = 3;
template <typename Callback>
void ForAllSweepingSpaces(Callback callback) {
for (int i = 0; i < kAllocationSpaces; i++) {
callback(static_cast<AllocationSpace>(i));
}
void ForAllSweepingSpaces(Callback callback) const {
callback(OLD_SPACE);
callback(CODE_SPACE);
callback(MAP_SPACE);
}
// Can only be called on the main thread when no tasks are running.
bool IsDoneSweeping() const {
for (int i = 0; i < kAllocationSpaces; i++) {
if (!sweeping_list_[i].empty()) return false;
}
return true;
bool is_done = true;
ForAllSweepingSpaces([this, &is_done](AllocationSpace space) {
if (!sweeping_list_[space].empty()) is_done = false;
});
return is_done;
}
void SweepSpaceFromTask(AllocationSpace identity);
......@@ -144,14 +154,26 @@ class Sweeper {
void PrepareToBeSweptPage(AllocationSpace space, Page* page);
void SweepOrWaitUntilSweepingCompleted(Page* page);
void MakeIterable(Page* page);
bool IsValidIterabilitySpace(AllocationSpace space) {
return space == NEW_SPACE;
}
bool IsValidSweepingSpace(AllocationSpace space) {
return space >= FIRST_PAGED_SPACE && space <= LAST_PAGED_SPACE;
}
Heap* const heap_;
MajorNonAtomicMarkingState* marking_state_;
int num_tasks_;
CancelableTaskManager::Id task_ids_[kMaxSweeperTasks];
CancelableTaskManager::Id task_ids_[kNumberOfSweepingSpaces];
base::Semaphore pending_sweeper_tasks_semaphore_;
base::Mutex mutex_;
SweptList swept_list_[kAllocationSpaces];
SweepingList sweeping_list_[kAllocationSpaces];
SweptList swept_list_[kNumberOfSweepingSpaces];
SweepingList sweeping_list_[kNumberOfSweepingSpaces];
bool incremental_sweeper_pending_;
bool sweeping_in_progress_;
// Counter is actively maintained by the concurrent tasks to avoid querying
......@@ -159,6 +181,13 @@ class Sweeper {
base::AtomicNumber<intptr_t> num_sweeping_tasks_;
// Used by PauseOrCompleteScope to signal early bailout to tasks.
base::AtomicValue<bool> stop_sweeper_tasks_;
// Pages that are only made iterable but have their free lists ignored.
IterabilityList iterability_list_;
CancelableTaskManager::Id iterability_task_id_;
base::Semaphore iterability_task_semaphore_;
bool iterability_in_progress_;
bool iterability_task_started_;
};
} // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment