Commit 4822d3b2 authored by Etienne Pierre-Doray's avatar Etienne Pierre-Doray Committed by Commit Bot

Revert "Reland "[Heap] ScavengerCollector use Jobs.""

This reverts commit 92f815a8.

Reason for revert: broke tests; see https://ci.chromium.org/p/v8/builders/ci/V8%20Linux64%20TSAN/33395?

Original change's description:
> Reland "[Heap] ScavengerCollector use Jobs."
>
> This is a reland of 9e8c54f8
> Safe to reland as-is with fix in AcquireTaskId
> https://chromium-review.googlesource.com/c/v8/v8/+/2401964
>
> Additional changes are made in the reland:
> -TRACE_GC is be split for background/foreground scope.
> -New IndexGenerator is used for dynamic work assignement.
>
> Original change's description:
> > [Heap] ScavengerCollector use Jobs.
> >
> > No yielding is necessary since the main thread Join()s.
> >
> > max concurrency is determined based on either
> > remaining_memory_chunks_ or global pool size
> > (copied_list_ + promotion_list_)
> >
> > Change-Id: Ie30fa86c44d3224b04df5d79569bce126ce7d96b
> > Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2354390
> > Commit-Queue: Etienne Pierre-Doray <etiennep@chromium.org>
> > Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> > Cr-Commit-Position: refs/heads/master@{#69746}
>
> Change-Id: Id9d7a5bf3b2337ae4cf1e76770f4b14ebb8ca256
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2399041
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Commit-Queue: Etienne Pierre-Doray <etiennep@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#70135}

TBR=ulan@chromium.org,etiennep@chromium.org

Change-Id: I4823c642546b82a9a9c8955151cd8784e4b86bc8
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2431551
Commit-Queue: Francis McCabe <fgm@chromium.org>
Reviewed-by: 's avatarFrancis McCabe <fgm@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70138}
parent 21b58516
......@@ -2650,7 +2650,6 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/paged-spaces-inl.h",
"src/heap/paged-spaces.cc",
"src/heap/paged-spaces.h",
"src/heap/parallel-work-item.h",
"src/heap/read-only-heap-inl.h",
"src/heap/read-only-heap.cc",
"src/heap/read-only-heap.h",
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_PARALLEL_WORK_ITEM_H_
#define V8_HEAP_PARALLEL_WORK_ITEM_H_
#include <atomic>
namespace v8 {
namespace internal {
class ParallelWorkItem {
public:
ParallelWorkItem() = default;
bool TryAcquire() {
// memory_order_relaxed is sufficient as the work item's state itself hasn't
// been modified since the beginning of its associated job. This is only
// atomically acquiring the right to work on it.
return reinterpret_cast<std::atomic<bool>*>(&acquire_)->exchange(
true, std::memory_order_relaxed) == false;
}
private:
bool acquire_{false};
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_PARALLEL_WORK_ITEM_H_
......@@ -38,10 +38,6 @@ bool Scavenger::PromotionList::View::Pop(struct PromotionListEntry* entry) {
return promotion_list_->Pop(task_id_, entry);
}
void Scavenger::PromotionList::View::FlushToGlobal() {
promotion_list_->FlushToGlobal(task_id_);
}
bool Scavenger::PromotionList::View::IsGlobalPoolEmpty() {
return promotion_list_->IsGlobalPoolEmpty();
}
......@@ -82,16 +78,6 @@ bool Scavenger::PromotionList::Pop(int task_id,
return large_object_promotion_list_.Pop(task_id, entry);
}
void Scavenger::PromotionList::FlushToGlobal(int task_id) {
regular_object_promotion_list_.FlushToGlobal(task_id);
large_object_promotion_list_.FlushToGlobal(task_id);
}
size_t Scavenger::PromotionList::GlobalPoolSize() const {
return regular_object_promotion_list_.GlobalPoolSize() +
large_object_promotion_list_.GlobalPoolSize();
}
bool Scavenger::PromotionList::IsGlobalPoolEmpty() {
return regular_object_promotion_list_.IsGlobalPoolEmpty() &&
large_object_promotion_list_.IsGlobalPoolEmpty();
......
This diff is collapsed.
......@@ -6,10 +6,8 @@
#define V8_HEAP_SCAVENGER_H_
#include "src/base/platform/condition-variable.h"
#include "src/heap/index-generator.h"
#include "src/heap/local-allocator.h"
#include "src/heap/objects-visiting.h"
#include "src/heap/parallel-work-item.h"
#include "src/heap/slot-set.h"
#include "src/heap/worklist.h"
......@@ -35,7 +33,38 @@ constexpr int kEphemeronTableListSegmentSize = 128;
using EphemeronTableList =
Worklist<EphemeronHashTable, kEphemeronTableListSegmentSize>;
class ScavengerCollector;
class ScavengerCollector {
public:
static const int kMaxScavengerTasks = 8;
static const int kMaxWaitTimeMs = 2;
explicit ScavengerCollector(Heap* heap);
void CollectGarbage();
private:
void MergeSurvivingNewLargeObjects(
const SurvivingNewLargeObjectsMap& objects);
int NumberOfScavengeTasks();
void ProcessWeakReferences(EphemeronTableList* ephemeron_table_list);
void ClearYoungEphemerons(EphemeronTableList* ephemeron_table_list);
void ClearOldEphemerons();
void HandleSurvivingNewLargeObjects();
void SweepArrayBufferExtensions();
void IterateStackAndScavenge(RootScavengeVisitor* root_scavenge_visitor,
Scavenger** scavengers, int num_scavenge_tasks,
int main_thread_id);
Isolate* const isolate_;
Heap* const heap_;
base::Semaphore parallel_scavenge_semaphore_;
SurvivingNewLargeObjectsMap surviving_new_large_objects_;
friend class Scavenger;
};
class Scavenger {
public:
......@@ -59,7 +88,6 @@ class Scavenger {
inline bool Pop(struct PromotionListEntry* entry);
inline bool IsGlobalPoolEmpty();
inline bool ShouldEagerlyProcessPromotionList();
inline void FlushToGlobal();
private:
PromotionList* promotion_list_;
......@@ -74,12 +102,10 @@ class Scavenger {
inline void PushLargeObject(int task_id, HeapObject object, Map map,
int size);
inline bool IsEmpty();
inline size_t GlobalPoolSize() const;
inline size_t LocalPushSegmentSize(int task_id);
inline bool Pop(int task_id, struct PromotionListEntry* entry);
inline bool IsGlobalPoolEmpty();
inline bool ShouldEagerlyProcessPromotionList(int task_id);
inline void FlushToGlobal(int task_id);
private:
static const int kRegularObjectPromotionListSegmentSize = 256;
......@@ -108,11 +134,10 @@ class Scavenger {
// Processes remaining work (=objects) after single objects have been
// manually scavenged using ScavengeObject or CheckAndScavengeObject.
void Process(JobDelegate* delegate = nullptr);
void Process(OneshotBarrier* barrier = nullptr);
// Finalize the Scavenger. Needs to be called from the main thread.
void Finalize();
void Flush();
void AddEphemeronHashTable(EphemeronHashTable table);
......@@ -251,66 +276,6 @@ class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
Scavenger* const scavenger_;
};
class ScavengerCollector {
public:
static const int kMaxScavengerTasks = 8;
static const int kMainThreadId = 0;
explicit ScavengerCollector(Heap* heap);
void CollectGarbage();
private:
class JobTask : public v8::JobTask {
public:
explicit JobTask(
ScavengerCollector* outer,
std::vector<std::unique_ptr<Scavenger>>* scavengers,
std::vector<std::pair<ParallelWorkItem, MemoryChunk*>> memory_chunks,
Scavenger::CopiedList* copied_list,
Scavenger::PromotionList* promotion_list);
void Run(JobDelegate* delegate) override;
size_t GetMaxConcurrency(size_t worker_count) const override;
private:
void ProcessItems(JobDelegate* delegate, Scavenger* scavenger);
void ConcurrentScavengePages(Scavenger* scavenger);
ScavengerCollector* outer_;
std::vector<std::unique_ptr<Scavenger>>* scavengers_;
std::vector<std::pair<ParallelWorkItem, MemoryChunk*>> memory_chunks_;
std::atomic<size_t> remaining_memory_chunks_{0};
IndexGenerator generator_;
Scavenger::CopiedList* copied_list_;
Scavenger::PromotionList* promotion_list_;
};
void MergeSurvivingNewLargeObjects(
const SurvivingNewLargeObjectsMap& objects);
int NumberOfScavengeTasks();
void ProcessWeakReferences(EphemeronTableList* ephemeron_table_list);
void ClearYoungEphemerons(EphemeronTableList* ephemeron_table_list);
void ClearOldEphemerons();
void HandleSurvivingNewLargeObjects();
void SweepArrayBufferExtensions();
void IterateStackAndScavenge(
RootScavengeVisitor* root_scavenge_visitor,
std::vector<std::unique_ptr<Scavenger>>* scavengers, int main_thread_id);
Isolate* const isolate_;
Heap* const heap_;
SurvivingNewLargeObjectsMap surviving_new_large_objects_;
friend class Scavenger;
};
} // namespace internal
} // namespace v8
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment