Commit f46a92c2 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] MC: Parallel marking for the atomic pause

Reuse existing concurrent marking infrastructure for marking in the
atomic pause.

Details:
- Do not stop concurrent marking when entering the atomic pause.
- Start concurrent marking when it was off. This is required for
  non-incremental MC GCs.
- Use atomic marking state in MC.
- Transition through grey and avoid white->black.

CQ_INCLUDE_TRYBOTS=master.tryserver.v8:v8_linux64_tsan_rel;master.tryserver.v8:v8_linux64_tsan_concurrent_marking_rel_ng;master.tryserver.blink:linux_trusty_blink_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel;master.tryserver.chromium.android:android_optional_gpu_tests_rel

Bug: chromium:750084
Change-Id: I9f84661e69beac6254494907bdfe94e865b1e3ab
Reviewed-on: https://chromium-review.googlesource.com/690002
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48281}
parent 94388568
......@@ -310,6 +310,7 @@
F(MC_FINISH) \
F(MC_MARK) \
F(MC_MARK_FINISH_INCREMENTAL) \
F(MC_MARK_MAIN) \
F(MC_MARK_ROOTS) \
F(MC_MARK_WEAK_CLOSURE) \
F(MC_MARK_WEAK_CLOSURE_EPHEMERAL) \
......
......@@ -603,6 +603,7 @@ void GCTracer::PrintNVP() const {
"mark=%.1f "
"mark.finish_incremental=%.1f "
"mark.roots=%.1f "
"mark.main=%.1f "
"mark.weak_closure=%.1f "
"mark.weak_closure.ephemeral=%.1f "
"mark.weak_closure.weak_handles=%.1f "
......@@ -686,6 +687,7 @@ void GCTracer::PrintNVP() const {
current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK],
current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
current_.scopes[Scope::MC_MARK_ROOTS],
current_.scopes[Scope::MC_MARK_MAIN],
current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERAL],
current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
......
......@@ -425,7 +425,6 @@ void IncrementalMarking::ActivateGeneratedStub(Code* stub) {
}
}
static void PatchIncrementalMarkingRecordWriteStubs(
Heap* heap, RecordWriteStub::Mode mode) {
UnseededNumberDictionary* stubs = heap->code_stubs();
......@@ -447,6 +446,12 @@ static void PatchIncrementalMarkingRecordWriteStubs(
}
}
void IncrementalMarking::Deactivate() {
DeactivateIncrementalWriteBarrier();
PatchIncrementalMarkingRecordWriteStubs(heap_,
RecordWriteStub::STORE_BUFFER_ONLY);
}
void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
if (FLAG_trace_incremental_marking) {
int old_generation_size_mb =
......@@ -933,11 +938,6 @@ void IncrementalMarking::Stop() {
}
IncrementalMarking::set_should_hurry(false);
if (IsMarking()) {
PatchIncrementalMarkingRecordWriteStubs(heap_,
RecordWriteStub::STORE_BUFFER_ONLY);
DeactivateIncrementalWriteBarrier();
}
heap_->isolate()->stack_guard()->ClearGC();
SetState(STOPPED);
is_compacting_ = false;
......
......@@ -292,6 +292,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
marking_worklist_ = marking_worklist;
}
void Deactivate();
private:
class Observer : public AllocationObserver {
public:
......
......@@ -13,7 +13,7 @@ namespace v8 {
namespace internal {
void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
if (non_atomic_marking_state()->WhiteToBlack(obj)) {
if (atomic_marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainer(host, obj);
......@@ -22,7 +22,7 @@ void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
}
void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
if (non_atomic_marking_state()->WhiteToBlack(obj)) {
if (atomic_marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(root, obj);
......@@ -31,7 +31,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
}
void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
if (non_atomic_marking_state()->WhiteToBlack(obj)) {
if (atomic_marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
......
......@@ -523,20 +523,11 @@ void MarkCompactCollector::CollectGarbage() {
heap()->minor_mark_compact_collector()->CleanupSweepToIteratePages();
MarkLiveObjects();
DCHECK(heap_->incremental_marking()->IsStopped());
ClearNonLiveReferences();
VerifyMarking();
RecordObjectStats();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
FullMarkingVerifier verifier(heap());
verifier.Run();
}
#endif
StartSweepSpaces();
Evacuate();
......@@ -984,19 +975,12 @@ void MarkCompactCollector::Prepare() {
// them here.
heap()->memory_allocator()->unmapper()->WaitUntilCompleted();
heap()->concurrent_marking()->EnsureCompleted();
heap()->concurrent_marking()->FlushLiveBytes(non_atomic_marking_state());
#ifdef VERIFY_HEAP
heap()->old_space()->VerifyLiveBytes();
heap()->map_space()->VerifyLiveBytes();
heap()->code_space()->VerifyLiveBytes();
#endif
// Clear marking bits if incremental marking is aborted.
if (was_marked_incrementally_ && heap_->ShouldAbortIncrementalMarking()) {
heap()->incremental_marking()->Stop();
heap()->incremental_marking()->AbortBlackAllocation();
FinishConcurrentMarking();
heap()->incremental_marking()->Deactivate();
ClearMarkbits();
AbortWeakCollections();
AbortWeakObjects();
......@@ -1031,6 +1015,28 @@ void MarkCompactCollector::Prepare() {
#endif
}
void MarkCompactCollector::FinishConcurrentMarking() {
if (FLAG_concurrent_marking) {
heap()->concurrent_marking()->EnsureCompleted();
heap()->concurrent_marking()->FlushLiveBytes(non_atomic_marking_state());
}
}
void MarkCompactCollector::VerifyMarking() {
CHECK(marking_worklist()->IsEmpty());
DCHECK(heap_->incremental_marking()->IsStopped());
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
FullMarkingVerifier verifier(heap());
verifier.Run();
}
#endif
#ifdef VERIFY_HEAP
heap()->old_space()->VerifyLiveBytes();
heap()->map_space()->VerifyLiveBytes();
heap()->code_space()->VerifyLiveBytes();
#endif
}
void MarkCompactCollector::Finish() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH);
......@@ -1126,7 +1132,7 @@ class MarkCompactMarkingVisitor final
// Marks the object black without pushing it on the marking stack. Returns
// true if object needed marking and false otherwise.
V8_INLINE bool MarkObjectWithoutPush(HeapObject* host, HeapObject* object) {
if (collector_->non_atomic_marking_state()->WhiteToBlack(object)) {
if (collector_->atomic_marking_state()->WhiteToBlack(object)) {
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainer(host, object);
}
......@@ -1171,7 +1177,6 @@ class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor {
if (!(*p)->IsHeapObject()) return;
collector_->MarkRootObject(root, HeapObject::cast(*p));
collector_->EmptyMarkingWorklist();
}
MarkCompactCollector* const collector_;
......@@ -1795,10 +1800,9 @@ void MarkCompactCollector::MarkStringTable(
ObjectVisitor* custom_root_body_visitor) {
StringTable* string_table = heap()->string_table();
// Mark the string table itself.
if (non_atomic_marking_state()->WhiteToBlack(string_table)) {
if (atomic_marking_state()->WhiteToBlack(string_table)) {
// Explicitly mark the prefix.
string_table->IteratePrefix(custom_root_body_visitor);
EmptyMarkingWorklist();
}
}
......@@ -1813,24 +1817,20 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
ProcessTopOptimizedFrame(custom_root_body_visitor);
}
// Mark all objects reachable from the objects on the marking work list.
// Before: the marking work list contains zero or more heap object pointers.
// After: the marking work list is empty, and all objects reachable from the
// marking work list have been marked.
void MarkCompactCollector::EmptyMarkingWorklist() {
void MarkCompactCollector::ProcessMarkingWorklist() {
HeapObject* object;
MarkCompactMarkingVisitor visitor(this);
while ((object = marking_worklist()->Pop()) != nullptr) {
DCHECK(!object->IsFiller());
DCHECK(object->IsHeapObject());
DCHECK(heap()->Contains(object));
DCHECK(!(non_atomic_marking_state()->IsWhite(object)));
DCHECK(!(atomic_marking_state()->IsWhite(object)));
atomic_marking_state()->GreyToBlack(object);
Map* map = object->map();
MarkObject(object, map);
visitor.Visit(map, object);
}
DCHECK(marking_worklist()->IsEmpty());
DCHECK(marking_worklist()->IsBailoutEmpty());
}
// Mark all objects reachable (transitively) from objects on the marking
......@@ -1858,7 +1858,7 @@ void MarkCompactCollector::ProcessEphemeralMarking(
}
ProcessWeakCollections();
work_to_do = !marking_worklist()->IsEmpty();
EmptyMarkingWorklist();
ProcessMarkingWorklist();
}
CHECK(marking_worklist()->IsEmpty());
CHECK_EQ(0, heap()->local_embedder_heap_tracer()->NumberOfWrappersToTrace());
......@@ -1875,7 +1875,6 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
if (!code->CanDeoptAt(it.frame()->pc())) {
Code::BodyDescriptor::IterateBody(code, visitor);
}
EmptyMarkingWorklist();
return;
}
}
......@@ -2010,7 +2009,7 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
if (marking_state_->WhiteToGrey(object)) {
collector_->main_marking_visitor()->Visit(object);
collector_->EmptyMarkingWorklist();
collector_->ProcessMarkingWorklist();
}
}
......@@ -2313,7 +2312,7 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK);
heap()->IterateEncounteredWeakCollections(&root_visitor);
EmptyMarkingWorklist();
ProcessMarkingWorklist();
}
{
......@@ -2322,11 +2321,11 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
&IsUnmarkedObjectForYoungGeneration);
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&root_visitor);
EmptyMarkingWorklist();
ProcessMarkingWorklist();
}
}
void MinorMarkCompactCollector::EmptyMarkingWorklist() {
void MinorMarkCompactCollector::ProcessMarkingWorklist() {
MarkingWorklist::View marking_worklist(worklist(), kMainMarker);
HeapObject* object = nullptr;
while (marking_worklist.Pop(&object)) {
......@@ -2546,9 +2545,22 @@ void MarkCompactCollector::MarkLiveObjects() {
MarkRoots(&root_visitor, &custom_root_body_visitor);
}
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_MAIN);
if (FLAG_concurrent_marking) {
heap_->concurrent_marking()->RescheduleTasksIfNeeded();
}
ProcessMarkingWorklist();
FinishConcurrentMarking();
ProcessMarkingWorklist();
}
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE);
DCHECK(marking_worklist()->IsEmpty());
// The objects reachable from the roots are marked, yet unreachable
// objects are unmarked. Mark objects reachable due to host
// application specific logic or through Harmony weak maps.
......@@ -2570,7 +2582,7 @@ void MarkCompactCollector::MarkLiveObjects() {
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES);
heap()->isolate()->global_handles()->IdentifyWeakHandles(
&IsUnmarkedHeapObject);
EmptyMarkingWorklist();
ProcessMarkingWorklist();
}
// Then we mark the objects.
......@@ -2578,7 +2590,7 @@ void MarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS);
heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
EmptyMarkingWorklist();
ProcessMarkingWorklist();
}
// Repeat Harmony weak maps marking to mark unmarked objects reachable from
......@@ -2595,6 +2607,10 @@ void MarkCompactCollector::MarkLiveObjects() {
}
}
}
if (was_marked_incrementally_) {
heap()->incremental_marking()->Deactivate();
}
}
......
......@@ -265,8 +265,8 @@ class MarkCompactCollectorBase {
// Marking operations for objects reachable from roots.
virtual void MarkLiveObjects() = 0;
// Mark objects reachable (transitively) from objects in the marking
// stack.
virtual void EmptyMarkingWorklist() = 0;
// work list.
virtual void ProcessMarkingWorklist() = 0;
// Clear non-live references held in side data structures.
virtual void ClearNonLiveReferences() = 0;
virtual void EvacuatePrologue() = 0;
......@@ -388,7 +388,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
void MarkLiveObjects() override;
void MarkRootSetInParallel();
void EmptyMarkingWorklist() override;
void ProcessMarkingWorklist() override;
void ClearNonLiveReferences() override;
void EvacuatePrologue() override;
......@@ -474,6 +474,7 @@ struct WeakObjects {
// Collector for young and old generation.
class MarkCompactCollector final : public MarkCompactCollectorBase {
public:
using AtomicMarkingState = MajorAtomicMarkingState;
using NonAtomicMarkingState = MajorNonAtomicMarkingState;
static const int kMainThread = 0;
......@@ -655,6 +656,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
kClearMarkbits,
};
AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; }
NonAtomicMarkingState* non_atomic_marking_state() {
return &non_atomic_marking_state_;
}
......@@ -672,6 +675,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// choosing spaces to compact.
void Prepare();
void FinishConcurrentMarking();
bool StartCompaction();
void AbortCompaction();
......@@ -731,6 +736,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
#endif
void VerifyMarking();
#ifdef VERIFY_HEAP
void VerifyValidStoreAndSlotsBufferEntries();
void VerifyMarkbitsAreClean();
......@@ -792,9 +798,9 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Collects a list of dependent code from maps embedded in optimize code.
DependentCode* DependentCodeListFromNonLiveMaps();
// This function empties the marking stack, but may leave overflowed objects
// in the heap, in which case the marking stack's overflow flag will be set.
void EmptyMarkingWorklist() override;
// Drains the main thread marking work list. Will mark all pending objects
// if no concurrent threads are running.
void ProcessMarkingWorklist() override;
// Callback function for telling whether the object *p is an unmarked
// heap object.
......@@ -908,6 +914,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
Sweeper sweeper_;
AtomicMarkingState atomic_marking_state_;
NonAtomicMarkingState non_atomic_marking_state_;
friend class FullEvacuator;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment