Commit 9fc0a77b authored by hpayer's avatar hpayer Committed by Commit bot

Add basic support for parallel compaction and flag.

BUG=524425
LOG=n

Review URL: https://codereview.chromium.org/1314903002

Cr-Commit-Position: refs/heads/master@{#30359}
parent d6f224f4
...@@ -664,6 +664,7 @@ DEFINE_INT(min_progress_during_object_groups_marking, 128, ...@@ -664,6 +664,7 @@ DEFINE_INT(min_progress_during_object_groups_marking, 128,
DEFINE_INT(max_object_groups_marking_rounds, 3, DEFINE_INT(max_object_groups_marking_rounds, 3,
"at most try this many times to over approximate the weak closure") "at most try this many times to over approximate the weak closure")
DEFINE_BOOL(concurrent_sweeping, true, "use concurrent sweeping") DEFINE_BOOL(concurrent_sweeping, true, "use concurrent sweeping")
DEFINE_BOOL(parallel_compaction, false, "use parallel compaction")
DEFINE_BOOL(trace_incremental_marking, false, DEFINE_BOOL(trace_incremental_marking, false,
"trace progress of the incremental marking") "trace progress of the incremental marking")
DEFINE_BOOL(track_gc_object_stats, false, DEFINE_BOOL(track_gc_object_stats, false,
...@@ -813,6 +814,7 @@ DEFINE_BOOL(predictable, false, "enable predictable mode") ...@@ -813,6 +814,7 @@ DEFINE_BOOL(predictable, false, "enable predictable mode")
DEFINE_NEG_IMPLICATION(predictable, concurrent_recompilation) DEFINE_NEG_IMPLICATION(predictable, concurrent_recompilation)
DEFINE_NEG_IMPLICATION(predictable, concurrent_osr) DEFINE_NEG_IMPLICATION(predictable, concurrent_osr)
DEFINE_NEG_IMPLICATION(predictable, concurrent_sweeping) DEFINE_NEG_IMPLICATION(predictable, concurrent_sweeping)
DEFINE_NEG_IMPLICATION(predictable, parallel_compaction)
// mark-compact.cc // mark-compact.cc
DEFINE_BOOL(force_marking_deque_overflows, false, DEFINE_BOOL(force_marking_deque_overflows, false,
......
...@@ -48,6 +48,7 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap) ...@@ -48,6 +48,7 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap)
was_marked_incrementally_(false), was_marked_incrementally_(false),
sweeping_in_progress_(false), sweeping_in_progress_(false),
pending_sweeper_jobs_semaphore_(0), pending_sweeper_jobs_semaphore_(0),
pending_compaction_jobs_semaphore_(0),
evacuation_(false), evacuation_(false),
migration_slots_buffer_(NULL), migration_slots_buffer_(NULL),
heap_(heap), heap_(heap),
...@@ -459,6 +460,28 @@ void MarkCompactCollector::ClearMarkbits() { ...@@ -459,6 +460,28 @@ void MarkCompactCollector::ClearMarkbits() {
} }
class MarkCompactCollector::CompactionTask : public v8::Task {
public:
explicit CompactionTask(Heap* heap) : heap_(heap) {}
virtual ~CompactionTask() {}
private:
// v8::Task overrides.
void Run() override {
// TODO(mlippautz, hpayer): EvacuatePages is not thread-safe and can just be
// called by one thread concurrently.
heap_->mark_compact_collector()->EvacuatePages();
heap_->mark_compact_collector()
->pending_compaction_jobs_semaphore_.Signal();
}
Heap* heap_;
DISALLOW_COPY_AND_ASSIGN(CompactionTask);
};
class MarkCompactCollector::SweeperTask : public v8::Task { class MarkCompactCollector::SweeperTask : public v8::Task {
public: public:
SweeperTask(Heap* heap, PagedSpace* space) : heap_(heap), space_(space) {} SweeperTask(Heap* heap, PagedSpace* space) : heap_(heap), space_(space) {}
...@@ -3289,6 +3312,12 @@ void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { ...@@ -3289,6 +3312,12 @@ void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) {
} }
void MarkCompactCollector::EvacuatePagesInParallel() {
V8::GetCurrentPlatform()->CallOnBackgroundThread(
new CompactionTask(heap()), v8::Platform::kShortRunningTask);
}
void MarkCompactCollector::EvacuatePages() { void MarkCompactCollector::EvacuatePages() {
int npages = evacuation_candidates_.length(); int npages = evacuation_candidates_.length();
int abandoned_pages = 0; int abandoned_pages = 0;
...@@ -3595,7 +3624,12 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { ...@@ -3595,7 +3624,12 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_EVACUATE_PAGES); GCTracer::Scope::MC_EVACUATE_PAGES);
EvacuationScope evacuation_scope(this); EvacuationScope evacuation_scope(this);
EvacuatePages(); if (FLAG_parallel_compaction) {
EvacuatePagesInParallel();
pending_compaction_jobs_semaphore_.Wait();
} else {
EvacuatePages();
}
} }
// Second pass: find pointers to new space and update them. // Second pass: find pointers to new space and update them.
......
...@@ -669,6 +669,7 @@ class MarkCompactCollector { ...@@ -669,6 +669,7 @@ class MarkCompactCollector {
void RemoveObjectSlots(Address start_slot, Address end_slot); void RemoveObjectSlots(Address start_slot, Address end_slot);
private: private:
class CompactionTask;
class SweeperTask; class SweeperTask;
explicit MarkCompactCollector(Heap* heap); explicit MarkCompactCollector(Heap* heap);
...@@ -706,8 +707,12 @@ class MarkCompactCollector { ...@@ -706,8 +707,12 @@ class MarkCompactCollector {
// True if concurrent or parallel sweeping is currently in progress. // True if concurrent or parallel sweeping is currently in progress.
bool sweeping_in_progress_; bool sweeping_in_progress_;
// Synchronize sweeper threads.
base::Semaphore pending_sweeper_jobs_semaphore_; base::Semaphore pending_sweeper_jobs_semaphore_;
// Synchronize compaction threads.
base::Semaphore pending_compaction_jobs_semaphore_;
bool evacuation_; bool evacuation_;
SlotsBufferAllocator slots_buffer_allocator_; SlotsBufferAllocator slots_buffer_allocator_;
...@@ -866,6 +871,8 @@ class MarkCompactCollector { ...@@ -866,6 +871,8 @@ class MarkCompactCollector {
void EvacuatePages(); void EvacuatePages();
void EvacuatePagesInParallel();
void EvacuateNewSpaceAndCandidates(); void EvacuateNewSpaceAndCandidates();
void ReleaseEvacuationCandidates(); void ReleaseEvacuationCandidates();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment