Commit f46a92c2 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] MC: Parallel marking for the atomic pause

Reuse existing concurrent marking infrastructure for marking in the
atomic pause.

Details:
- Do not stop concurrent marking when entering the atomic pause.
- Start concurrent marking when it was off. This is required for
  non-incremental MC GCs.
- Use atomic marking state in MC.
- Transition through grey and avoid white->black.

CQ_INCLUDE_TRYBOTS=master.tryserver.v8:v8_linux64_tsan_rel;master.tryserver.v8:v8_linux64_tsan_concurrent_marking_rel_ng;master.tryserver.blink:linux_trusty_blink_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel;master.tryserver.chromium.android:android_optional_gpu_tests_rel

Bug: chromium:750084
Change-Id: I9f84661e69beac6254494907bdfe94e865b1e3ab
Reviewed-on: https://chromium-review.googlesource.com/690002
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48281}
parent 94388568
...@@ -310,6 +310,7 @@ ...@@ -310,6 +310,7 @@
F(MC_FINISH) \ F(MC_FINISH) \
F(MC_MARK) \ F(MC_MARK) \
F(MC_MARK_FINISH_INCREMENTAL) \ F(MC_MARK_FINISH_INCREMENTAL) \
F(MC_MARK_MAIN) \
F(MC_MARK_ROOTS) \ F(MC_MARK_ROOTS) \
F(MC_MARK_WEAK_CLOSURE) \ F(MC_MARK_WEAK_CLOSURE) \
F(MC_MARK_WEAK_CLOSURE_EPHEMERAL) \ F(MC_MARK_WEAK_CLOSURE_EPHEMERAL) \
......
...@@ -603,6 +603,7 @@ void GCTracer::PrintNVP() const { ...@@ -603,6 +603,7 @@ void GCTracer::PrintNVP() const {
"mark=%.1f " "mark=%.1f "
"mark.finish_incremental=%.1f " "mark.finish_incremental=%.1f "
"mark.roots=%.1f " "mark.roots=%.1f "
"mark.main=%.1f "
"mark.weak_closure=%.1f " "mark.weak_closure=%.1f "
"mark.weak_closure.ephemeral=%.1f " "mark.weak_closure.ephemeral=%.1f "
"mark.weak_closure.weak_handles=%.1f " "mark.weak_closure.weak_handles=%.1f "
...@@ -686,6 +687,7 @@ void GCTracer::PrintNVP() const { ...@@ -686,6 +687,7 @@ void GCTracer::PrintNVP() const {
current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK], current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK],
current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL], current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
current_.scopes[Scope::MC_MARK_ROOTS], current_.scopes[Scope::MC_MARK_ROOTS],
current_.scopes[Scope::MC_MARK_MAIN],
current_.scopes[Scope::MC_MARK_WEAK_CLOSURE], current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERAL], current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERAL],
current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES], current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
......
...@@ -425,7 +425,6 @@ void IncrementalMarking::ActivateGeneratedStub(Code* stub) { ...@@ -425,7 +425,6 @@ void IncrementalMarking::ActivateGeneratedStub(Code* stub) {
} }
} }
static void PatchIncrementalMarkingRecordWriteStubs( static void PatchIncrementalMarkingRecordWriteStubs(
Heap* heap, RecordWriteStub::Mode mode) { Heap* heap, RecordWriteStub::Mode mode) {
UnseededNumberDictionary* stubs = heap->code_stubs(); UnseededNumberDictionary* stubs = heap->code_stubs();
...@@ -447,6 +446,12 @@ static void PatchIncrementalMarkingRecordWriteStubs( ...@@ -447,6 +446,12 @@ static void PatchIncrementalMarkingRecordWriteStubs(
} }
} }
void IncrementalMarking::Deactivate() {
DeactivateIncrementalWriteBarrier();
PatchIncrementalMarkingRecordWriteStubs(heap_,
RecordWriteStub::STORE_BUFFER_ONLY);
}
void IncrementalMarking::Start(GarbageCollectionReason gc_reason) { void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
if (FLAG_trace_incremental_marking) { if (FLAG_trace_incremental_marking) {
int old_generation_size_mb = int old_generation_size_mb =
...@@ -933,11 +938,6 @@ void IncrementalMarking::Stop() { ...@@ -933,11 +938,6 @@ void IncrementalMarking::Stop() {
} }
IncrementalMarking::set_should_hurry(false); IncrementalMarking::set_should_hurry(false);
if (IsMarking()) {
PatchIncrementalMarkingRecordWriteStubs(heap_,
RecordWriteStub::STORE_BUFFER_ONLY);
DeactivateIncrementalWriteBarrier();
}
heap_->isolate()->stack_guard()->ClearGC(); heap_->isolate()->stack_guard()->ClearGC();
SetState(STOPPED); SetState(STOPPED);
is_compacting_ = false; is_compacting_ = false;
......
...@@ -292,6 +292,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -292,6 +292,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
marking_worklist_ = marking_worklist; marking_worklist_ = marking_worklist;
} }
void Deactivate();
private: private:
class Observer : public AllocationObserver { class Observer : public AllocationObserver {
public: public:
......
...@@ -13,7 +13,7 @@ namespace v8 { ...@@ -13,7 +13,7 @@ namespace v8 {
namespace internal { namespace internal {
void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) { void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
if (non_atomic_marking_state()->WhiteToBlack(obj)) { if (atomic_marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj); marking_worklist()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) { if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainer(host, obj); heap_->AddRetainer(host, obj);
...@@ -22,7 +22,7 @@ void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) { ...@@ -22,7 +22,7 @@ void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
} }
void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) { void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
if (non_atomic_marking_state()->WhiteToBlack(obj)) { if (atomic_marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj); marking_worklist()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) { if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(root, obj); heap_->AddRetainingRoot(root, obj);
...@@ -31,7 +31,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) { ...@@ -31,7 +31,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
} }
void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) { void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
if (non_atomic_marking_state()->WhiteToBlack(obj)) { if (atomic_marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj); marking_worklist()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) { if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(Root::kWrapperTracing, obj); heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
......
...@@ -523,20 +523,11 @@ void MarkCompactCollector::CollectGarbage() { ...@@ -523,20 +523,11 @@ void MarkCompactCollector::CollectGarbage() {
heap()->minor_mark_compact_collector()->CleanupSweepToIteratePages(); heap()->minor_mark_compact_collector()->CleanupSweepToIteratePages();
MarkLiveObjects(); MarkLiveObjects();
DCHECK(heap_->incremental_marking()->IsStopped());
ClearNonLiveReferences(); ClearNonLiveReferences();
VerifyMarking();
RecordObjectStats(); RecordObjectStats();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
FullMarkingVerifier verifier(heap());
verifier.Run();
}
#endif
StartSweepSpaces(); StartSweepSpaces();
Evacuate(); Evacuate();
...@@ -984,19 +975,12 @@ void MarkCompactCollector::Prepare() { ...@@ -984,19 +975,12 @@ void MarkCompactCollector::Prepare() {
// them here. // them here.
heap()->memory_allocator()->unmapper()->WaitUntilCompleted(); heap()->memory_allocator()->unmapper()->WaitUntilCompleted();
heap()->concurrent_marking()->EnsureCompleted();
heap()->concurrent_marking()->FlushLiveBytes(non_atomic_marking_state());
#ifdef VERIFY_HEAP
heap()->old_space()->VerifyLiveBytes();
heap()->map_space()->VerifyLiveBytes();
heap()->code_space()->VerifyLiveBytes();
#endif
// Clear marking bits if incremental marking is aborted. // Clear marking bits if incremental marking is aborted.
if (was_marked_incrementally_ && heap_->ShouldAbortIncrementalMarking()) { if (was_marked_incrementally_ && heap_->ShouldAbortIncrementalMarking()) {
heap()->incremental_marking()->Stop(); heap()->incremental_marking()->Stop();
heap()->incremental_marking()->AbortBlackAllocation(); heap()->incremental_marking()->AbortBlackAllocation();
FinishConcurrentMarking();
heap()->incremental_marking()->Deactivate();
ClearMarkbits(); ClearMarkbits();
AbortWeakCollections(); AbortWeakCollections();
AbortWeakObjects(); AbortWeakObjects();
...@@ -1031,6 +1015,28 @@ void MarkCompactCollector::Prepare() { ...@@ -1031,6 +1015,28 @@ void MarkCompactCollector::Prepare() {
#endif #endif
} }
void MarkCompactCollector::FinishConcurrentMarking() {
if (FLAG_concurrent_marking) {
heap()->concurrent_marking()->EnsureCompleted();
heap()->concurrent_marking()->FlushLiveBytes(non_atomic_marking_state());
}
}
void MarkCompactCollector::VerifyMarking() {
CHECK(marking_worklist()->IsEmpty());
DCHECK(heap_->incremental_marking()->IsStopped());
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
FullMarkingVerifier verifier(heap());
verifier.Run();
}
#endif
#ifdef VERIFY_HEAP
heap()->old_space()->VerifyLiveBytes();
heap()->map_space()->VerifyLiveBytes();
heap()->code_space()->VerifyLiveBytes();
#endif
}
void MarkCompactCollector::Finish() { void MarkCompactCollector::Finish() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH);
...@@ -1126,7 +1132,7 @@ class MarkCompactMarkingVisitor final ...@@ -1126,7 +1132,7 @@ class MarkCompactMarkingVisitor final
// Marks the object black without pushing it on the marking stack. Returns // Marks the object black without pushing it on the marking stack. Returns
// true if object needed marking and false otherwise. // true if object needed marking and false otherwise.
V8_INLINE bool MarkObjectWithoutPush(HeapObject* host, HeapObject* object) { V8_INLINE bool MarkObjectWithoutPush(HeapObject* host, HeapObject* object) {
if (collector_->non_atomic_marking_state()->WhiteToBlack(object)) { if (collector_->atomic_marking_state()->WhiteToBlack(object)) {
if (V8_UNLIKELY(FLAG_track_retaining_path)) { if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainer(host, object); heap_->AddRetainer(host, object);
} }
...@@ -1171,7 +1177,6 @@ class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor { ...@@ -1171,7 +1177,6 @@ class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor {
if (!(*p)->IsHeapObject()) return; if (!(*p)->IsHeapObject()) return;
collector_->MarkRootObject(root, HeapObject::cast(*p)); collector_->MarkRootObject(root, HeapObject::cast(*p));
collector_->EmptyMarkingWorklist();
} }
MarkCompactCollector* const collector_; MarkCompactCollector* const collector_;
...@@ -1795,10 +1800,9 @@ void MarkCompactCollector::MarkStringTable( ...@@ -1795,10 +1800,9 @@ void MarkCompactCollector::MarkStringTable(
ObjectVisitor* custom_root_body_visitor) { ObjectVisitor* custom_root_body_visitor) {
StringTable* string_table = heap()->string_table(); StringTable* string_table = heap()->string_table();
// Mark the string table itself. // Mark the string table itself.
if (non_atomic_marking_state()->WhiteToBlack(string_table)) { if (atomic_marking_state()->WhiteToBlack(string_table)) {
// Explicitly mark the prefix. // Explicitly mark the prefix.
string_table->IteratePrefix(custom_root_body_visitor); string_table->IteratePrefix(custom_root_body_visitor);
EmptyMarkingWorklist();
} }
} }
...@@ -1813,24 +1817,20 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor, ...@@ -1813,24 +1817,20 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
ProcessTopOptimizedFrame(custom_root_body_visitor); ProcessTopOptimizedFrame(custom_root_body_visitor);
} }
// Mark all objects reachable from the objects on the marking work list. void MarkCompactCollector::ProcessMarkingWorklist() {
// Before: the marking work list contains zero or more heap object pointers.
// After: the marking work list is empty, and all objects reachable from the
// marking work list have been marked.
void MarkCompactCollector::EmptyMarkingWorklist() {
HeapObject* object; HeapObject* object;
MarkCompactMarkingVisitor visitor(this); MarkCompactMarkingVisitor visitor(this);
while ((object = marking_worklist()->Pop()) != nullptr) { while ((object = marking_worklist()->Pop()) != nullptr) {
DCHECK(!object->IsFiller()); DCHECK(!object->IsFiller());
DCHECK(object->IsHeapObject()); DCHECK(object->IsHeapObject());
DCHECK(heap()->Contains(object)); DCHECK(heap()->Contains(object));
DCHECK(!(non_atomic_marking_state()->IsWhite(object))); DCHECK(!(atomic_marking_state()->IsWhite(object)));
atomic_marking_state()->GreyToBlack(object);
Map* map = object->map(); Map* map = object->map();
MarkObject(object, map); MarkObject(object, map);
visitor.Visit(map, object); visitor.Visit(map, object);
} }
DCHECK(marking_worklist()->IsEmpty()); DCHECK(marking_worklist()->IsBailoutEmpty());
} }
// Mark all objects reachable (transitively) from objects on the marking // Mark all objects reachable (transitively) from objects on the marking
...@@ -1858,7 +1858,7 @@ void MarkCompactCollector::ProcessEphemeralMarking( ...@@ -1858,7 +1858,7 @@ void MarkCompactCollector::ProcessEphemeralMarking(
} }
ProcessWeakCollections(); ProcessWeakCollections();
work_to_do = !marking_worklist()->IsEmpty(); work_to_do = !marking_worklist()->IsEmpty();
EmptyMarkingWorklist(); ProcessMarkingWorklist();
} }
CHECK(marking_worklist()->IsEmpty()); CHECK(marking_worklist()->IsEmpty());
CHECK_EQ(0, heap()->local_embedder_heap_tracer()->NumberOfWrappersToTrace()); CHECK_EQ(0, heap()->local_embedder_heap_tracer()->NumberOfWrappersToTrace());
...@@ -1875,7 +1875,6 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { ...@@ -1875,7 +1875,6 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
if (!code->CanDeoptAt(it.frame()->pc())) { if (!code->CanDeoptAt(it.frame()->pc())) {
Code::BodyDescriptor::IterateBody(code, visitor); Code::BodyDescriptor::IterateBody(code, visitor);
} }
EmptyMarkingWorklist();
return; return;
} }
} }
...@@ -2010,7 +2009,7 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor { ...@@ -2010,7 +2009,7 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
if (marking_state_->WhiteToGrey(object)) { if (marking_state_->WhiteToGrey(object)) {
collector_->main_marking_visitor()->Visit(object); collector_->main_marking_visitor()->Visit(object);
collector_->EmptyMarkingWorklist(); collector_->ProcessMarkingWorklist();
} }
} }
...@@ -2313,7 +2312,7 @@ void MinorMarkCompactCollector::MarkLiveObjects() { ...@@ -2313,7 +2312,7 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
{ {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK); TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK);
heap()->IterateEncounteredWeakCollections(&root_visitor); heap()->IterateEncounteredWeakCollections(&root_visitor);
EmptyMarkingWorklist(); ProcessMarkingWorklist();
} }
{ {
...@@ -2322,11 +2321,11 @@ void MinorMarkCompactCollector::MarkLiveObjects() { ...@@ -2322,11 +2321,11 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
&IsUnmarkedObjectForYoungGeneration); &IsUnmarkedObjectForYoungGeneration);
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots( isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&root_visitor); &root_visitor);
EmptyMarkingWorklist(); ProcessMarkingWorklist();
} }
} }
void MinorMarkCompactCollector::EmptyMarkingWorklist() { void MinorMarkCompactCollector::ProcessMarkingWorklist() {
MarkingWorklist::View marking_worklist(worklist(), kMainMarker); MarkingWorklist::View marking_worklist(worklist(), kMainMarker);
HeapObject* object = nullptr; HeapObject* object = nullptr;
while (marking_worklist.Pop(&object)) { while (marking_worklist.Pop(&object)) {
...@@ -2546,9 +2545,22 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -2546,9 +2545,22 @@ void MarkCompactCollector::MarkLiveObjects() {
MarkRoots(&root_visitor, &custom_root_body_visitor); MarkRoots(&root_visitor, &custom_root_body_visitor);
} }
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_MAIN);
if (FLAG_concurrent_marking) {
heap_->concurrent_marking()->RescheduleTasksIfNeeded();
}
ProcessMarkingWorklist();
FinishConcurrentMarking();
ProcessMarkingWorklist();
}
{ {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE);
DCHECK(marking_worklist()->IsEmpty());
// The objects reachable from the roots are marked, yet unreachable // The objects reachable from the roots are marked, yet unreachable
// objects are unmarked. Mark objects reachable due to host // objects are unmarked. Mark objects reachable due to host
// application specific logic or through Harmony weak maps. // application specific logic or through Harmony weak maps.
...@@ -2570,7 +2582,7 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -2570,7 +2582,7 @@ void MarkCompactCollector::MarkLiveObjects() {
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES); GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES);
heap()->isolate()->global_handles()->IdentifyWeakHandles( heap()->isolate()->global_handles()->IdentifyWeakHandles(
&IsUnmarkedHeapObject); &IsUnmarkedHeapObject);
EmptyMarkingWorklist(); ProcessMarkingWorklist();
} }
// Then we mark the objects. // Then we mark the objects.
...@@ -2578,7 +2590,7 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -2578,7 +2590,7 @@ void MarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(), TRACE_GC(heap()->tracer(),
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS); GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS);
heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor); heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
EmptyMarkingWorklist(); ProcessMarkingWorklist();
} }
// Repeat Harmony weak maps marking to mark unmarked objects reachable from // Repeat Harmony weak maps marking to mark unmarked objects reachable from
...@@ -2595,6 +2607,10 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -2595,6 +2607,10 @@ void MarkCompactCollector::MarkLiveObjects() {
} }
} }
} }
if (was_marked_incrementally_) {
heap()->incremental_marking()->Deactivate();
}
} }
......
...@@ -265,8 +265,8 @@ class MarkCompactCollectorBase { ...@@ -265,8 +265,8 @@ class MarkCompactCollectorBase {
// Marking operations for objects reachable from roots. // Marking operations for objects reachable from roots.
virtual void MarkLiveObjects() = 0; virtual void MarkLiveObjects() = 0;
// Mark objects reachable (transitively) from objects in the marking // Mark objects reachable (transitively) from objects in the marking
// stack. // work list.
virtual void EmptyMarkingWorklist() = 0; virtual void ProcessMarkingWorklist() = 0;
// Clear non-live references held in side data structures. // Clear non-live references held in side data structures.
virtual void ClearNonLiveReferences() = 0; virtual void ClearNonLiveReferences() = 0;
virtual void EvacuatePrologue() = 0; virtual void EvacuatePrologue() = 0;
...@@ -388,7 +388,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -388,7 +388,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
void MarkLiveObjects() override; void MarkLiveObjects() override;
void MarkRootSetInParallel(); void MarkRootSetInParallel();
void EmptyMarkingWorklist() override; void ProcessMarkingWorklist() override;
void ClearNonLiveReferences() override; void ClearNonLiveReferences() override;
void EvacuatePrologue() override; void EvacuatePrologue() override;
...@@ -474,6 +474,7 @@ struct WeakObjects { ...@@ -474,6 +474,7 @@ struct WeakObjects {
// Collector for young and old generation. // Collector for young and old generation.
class MarkCompactCollector final : public MarkCompactCollectorBase { class MarkCompactCollector final : public MarkCompactCollectorBase {
public: public:
using AtomicMarkingState = MajorAtomicMarkingState;
using NonAtomicMarkingState = MajorNonAtomicMarkingState; using NonAtomicMarkingState = MajorNonAtomicMarkingState;
static const int kMainThread = 0; static const int kMainThread = 0;
...@@ -655,6 +656,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -655,6 +656,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
kClearMarkbits, kClearMarkbits,
}; };
AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; }
NonAtomicMarkingState* non_atomic_marking_state() { NonAtomicMarkingState* non_atomic_marking_state() {
return &non_atomic_marking_state_; return &non_atomic_marking_state_;
} }
...@@ -672,6 +675,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -672,6 +675,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// choosing spaces to compact. // choosing spaces to compact.
void Prepare(); void Prepare();
void FinishConcurrentMarking();
bool StartCompaction(); bool StartCompaction();
void AbortCompaction(); void AbortCompaction();
...@@ -731,6 +736,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -731,6 +736,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; } bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
#endif #endif
void VerifyMarking();
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
void VerifyValidStoreAndSlotsBufferEntries(); void VerifyValidStoreAndSlotsBufferEntries();
void VerifyMarkbitsAreClean(); void VerifyMarkbitsAreClean();
...@@ -792,9 +798,9 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -792,9 +798,9 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Collects a list of dependent code from maps embedded in optimize code. // Collects a list of dependent code from maps embedded in optimize code.
DependentCode* DependentCodeListFromNonLiveMaps(); DependentCode* DependentCodeListFromNonLiveMaps();
// This function empties the marking stack, but may leave overflowed objects // Drains the main thread marking work list. Will mark all pending objects
// in the heap, in which case the marking stack's overflow flag will be set. // if no concurrent threads are running.
void EmptyMarkingWorklist() override; void ProcessMarkingWorklist() override;
// Callback function for telling whether the object *p is an unmarked // Callback function for telling whether the object *p is an unmarked
// heap object. // heap object.
...@@ -908,6 +914,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -908,6 +914,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
Sweeper sweeper_; Sweeper sweeper_;
AtomicMarkingState atomic_marking_state_;
NonAtomicMarkingState non_atomic_marking_state_; NonAtomicMarkingState non_atomic_marking_state_;
friend class FullEvacuator; friend class FullEvacuator;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment