Commit ee2b095a authored by jochen@chromium.org's avatar jochen@chromium.org

Introduce --job-based-sweeping flag and use individual jobs for sweeping if set

BUG=v8:3104
R=hpayer@chromium.org
LOG=y

Review URL: https://codereview.chromium.org/143803007

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19357 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 4aabb8ae
......@@ -530,6 +530,7 @@ DEFINE_bool(parallel_sweeping, true, "enable parallel sweeping")
DEFINE_bool(concurrent_sweeping, false, "enable concurrent sweeping")
DEFINE_int(sweeper_threads, 0,
"number of parallel and concurrent sweeping threads")
DEFINE_bool(job_based_sweeping, false, "enable job based sweeping")
#ifdef VERIFY_HEAP
DEFINE_bool(verify_heap, false, "verify heap pointers before and after GC")
#endif
......
......@@ -6360,7 +6360,7 @@ intptr_t Heap::PromotedSpaceSizeOfObjects() {
bool Heap::AdvanceSweepers(int step_size) {
ASSERT(isolate()->num_sweeper_threads() == 0);
ASSERT(!mark_compact_collector()->AreSweeperThreadsActivated());
bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size);
sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size);
return sweeping_complete;
......
......@@ -1671,6 +1671,10 @@ void Isolate::Deinit() {
delete[] sweeper_thread_;
sweeper_thread_ = NULL;
if (FLAG_job_based_sweeping &&
heap_.mark_compact_collector()->IsConcurrentSweepingInProgress()) {
heap_.mark_compact_collector()->WaitUntilSweepingCompleted();
}
if (FLAG_hydrogen_stats) GetHStatistics()->Print();
......@@ -2012,7 +2016,10 @@ bool Isolate::Init(Deserializer* des) {
max_available_threads_ = Max(Min(CPU::NumberOfProcessorsOnline(), 4), 1);
}
num_sweeper_threads_ = SweeperThread::NumberOfThreads(max_available_threads_);
if (!FLAG_job_based_sweeping) {
num_sweeper_threads_ =
SweeperThread::NumberOfThreads(max_available_threads_);
}
if (FLAG_trace_hydrogen || FLAG_trace_hydrogen_stubs) {
PrintF("Concurrent recompilation has been disabled for tracing.\n");
......
......@@ -67,6 +67,7 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap) : // NOLINT
compacting_(false),
was_marked_incrementally_(false),
sweeping_pending_(false),
pending_sweeper_jobs_semaphore_(0),
sequential_sweeping_(false),
tracer_(NULL),
migration_slots_buffer_(NULL),
......@@ -569,6 +570,27 @@ void MarkCompactCollector::ClearMarkbits() {
}
class MarkCompactCollector::SweeperTask : public v8::Task {
public:
SweeperTask(Heap* heap, PagedSpace* space)
: heap_(heap), space_(space) {}
virtual ~SweeperTask() {}
private:
// v8::Task overrides.
virtual void Run() V8_OVERRIDE {
heap_->mark_compact_collector()->SweepInParallel(space_);
heap_->mark_compact_collector()->pending_sweeper_jobs_semaphore_.Signal();
}
Heap* heap_;
PagedSpace* space_;
DISALLOW_COPY_AND_ASSIGN(SweeperTask);
};
void MarkCompactCollector::StartSweeperThreads() {
// TODO(hpayer): This check is just used for debugging purpose and
// should be removed or turned into an assert after investigating the
......@@ -579,6 +601,14 @@ void MarkCompactCollector::StartSweeperThreads() {
for (int i = 0; i < isolate()->num_sweeper_threads(); i++) {
isolate()->sweeper_threads()[i]->StartSweeping();
}
if (FLAG_job_based_sweeping) {
V8::GetCurrentPlatform()->CallOnBackgroundThread(
new SweeperTask(heap(), heap()->old_data_space()),
v8::Platform::kShortRunningTask);
V8::GetCurrentPlatform()->CallOnBackgroundThread(
new SweeperTask(heap(), heap()->old_pointer_space()),
v8::Platform::kShortRunningTask);
}
}
......@@ -587,6 +617,11 @@ void MarkCompactCollector::WaitUntilSweepingCompleted() {
for (int i = 0; i < isolate()->num_sweeper_threads(); i++) {
isolate()->sweeper_threads()[i]->WaitForSweeperThread();
}
if (FLAG_job_based_sweeping) {
// Wait twice for both jobs.
pending_sweeper_jobs_semaphore_.Wait();
pending_sweeper_jobs_semaphore_.Wait();
}
sweeping_pending_ = false;
RefillFreeLists(heap()->paged_space(OLD_DATA_SPACE));
RefillFreeLists(heap()->paged_space(OLD_POINTER_SPACE));
......@@ -616,7 +651,7 @@ intptr_t MarkCompactCollector::RefillFreeLists(PagedSpace* space) {
bool MarkCompactCollector::AreSweeperThreadsActivated() {
return isolate()->sweeper_threads() != NULL;
return isolate()->sweeper_threads() != NULL || FLAG_job_based_sweeping;
}
......@@ -4138,7 +4173,7 @@ void MarkCompactCollector::SweepSpaces() {
#endif
SweeperType how_to_sweep =
FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE;
if (isolate()->num_sweeper_threads() > 0) {
if (AreSweeperThreadsActivated()) {
if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_CONSERVATIVE;
if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_CONSERVATIVE;
}
......
......@@ -744,6 +744,8 @@ class MarkCompactCollector {
void MarkAllocationSite(AllocationSite* site);
private:
class SweeperTask;
explicit MarkCompactCollector(Heap* heap);
~MarkCompactCollector();
......@@ -791,6 +793,8 @@ class MarkCompactCollector {
// True if concurrent or parallel sweeping is currently in progress.
bool sweeping_pending_;
Semaphore pending_sweeper_jobs_semaphore_;
bool sequential_sweeping_;
// A pointer to the current stack-allocated GC tracer object during a full
......
......@@ -45,6 +45,7 @@ SweeperThread::SweeperThread(Isolate* isolate)
start_sweeping_semaphore_(0),
end_sweeping_semaphore_(0),
stop_semaphore_(0) {
ASSERT(!FLAG_job_based_sweeping);
NoBarrier_Store(&stop_thread_, static_cast<AtomicWord>(false));
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment