Commit d29299f9 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Consolidate visiting of objects in MarkCompactCollector

This removes object visiting logic from IncrementalMarking and makes it
call the corresponding methods of MarkCompactCollector. As a result
we have one place where objects are visited (on the main thread), which
is necessary for implementing per-context visitation.

Bug: chromium:973627
Change-Id: Ibdfbb9a910b592307bdba2bd73eada35c80a0d61
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1940154Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65278}
parent 83fc8559
......@@ -6399,7 +6399,7 @@ void Heap::MarkingBarrierForDescriptorArraySlow(Heap* heap, HeapObject host,
if (NumberOfMarkedDescriptors::decode(heap->mark_compact_collector()->epoch(),
raw_marked) <
number_of_own_descriptors) {
heap->incremental_marking()->MarkDescriptorArrayFromWriteBarrier(
heap->mark_compact_collector()->MarkDescriptorArrayFromWriteBarrier(
host, descriptor_array, number_of_own_descriptors);
}
}
......
......@@ -68,11 +68,6 @@ IncrementalMarking::IncrementalMarking(
SetState(STOPPED);
}
IncrementalMarking::~IncrementalMarking() {
// Avoid default destructor, which would be inlined in the header file
// and cause compile errors due marking_visitor_ not fully defined.
}
void IncrementalMarking::RecordWriteSlow(HeapObject obj, HeapObjectSlot slot,
HeapObject value) {
if (BaseRecordWrite(obj, value) && slot.address() != kNullAddress) {
......@@ -105,7 +100,7 @@ void IncrementalMarking::MarkBlackAndVisitObjectDueToLayoutChange(
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingLayoutChange");
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_LAYOUT_CHANGE);
marking_state()->WhiteToGrey(obj);
marking_visitor_->Visit(obj.map(), obj);
collector_->VisitObject(obj);
}
void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
......@@ -334,25 +329,12 @@ void IncrementalMarking::StartMarking() {
}
is_compacting_ = !FLAG_never_compact && collector_->StartCompaction();
collector_->StartMarking();
SetState(MARKING);
ActivateIncrementalWriteBarrier();
marking_visitor_ = std::make_unique<MarkCompactCollector::MarkingVisitor>(
collector_->marking_state(), collector_->marking_worklist()->shared(),
collector_->marking_worklist()->embedder(), collector_->weak_objects(),
heap_, collector_->epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
// Marking bits are cleared by the sweeper.
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
collector_->VerifyMarkbitsAreClean();
}
#endif
heap_->isolate()->compilation_cache()->MarkCompactPrologue();
StartBlackAllocation();
......@@ -689,28 +671,10 @@ void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject obj) {
if (IsMarking() && marking_state()->IsBlack(obj)) {
RevisitObject(obj);
collector_->RevisitObject(obj);
}
}
void IncrementalMarking::RevisitObject(HeapObject obj) {
DCHECK(IsMarking());
DCHECK(marking_state()->IsBlack(obj));
DCHECK_IMPLIES(MemoryChunk::FromHeapObject(obj)->IsFlagSet(
MemoryChunk::HAS_PROGRESS_BAR),
0u == MemoryChunk::FromHeapObject(obj)->ProgressBar());
MarkCompactCollector::MarkingVisitor::RevisitScope revisit(
marking_visitor_.get());
marking_visitor_->Visit(obj.map(), obj);
}
void IncrementalMarking::MarkDescriptorArrayFromWriteBarrier(
HeapObject host, DescriptorArray descriptors,
int number_of_own_descriptors) {
marking_visitor_->MarkDescriptorArrayFromWriteBarrier(
host, descriptors, number_of_own_descriptors);
}
StepResult IncrementalMarking::EmbedderStep(double duration_ms) {
if (!ShouldDoEmbedderStep()) return StepResult::kNoImmediateWork;
......
......@@ -89,7 +89,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
IncrementalMarking(Heap* heap,
MarkCompactCollector::MarkingWorklist* marking_worklist,
WeakObjects* weak_objects);
~IncrementalMarking();
MarkingState* marking_state() { return &marking_state_; }
......@@ -199,13 +198,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
template <typename TSlot>
V8_INLINE void RecordWrite(HeapObject obj, TSlot slot,
typename TSlot::TObject value);
void RevisitObject(HeapObject obj);
// Ensures that all descriptors int range [0, number_of_own_descripts)
// are visited.
void MarkDescriptorArrayFromWriteBarrier(HeapObject host,
DescriptorArray array,
int number_of_own_descriptors);
void RecordWriteSlow(HeapObject obj, HeapObjectSlot slot, HeapObject value);
void RecordWriteIntoCode(Code host, RelocInfo* rinfo, HeapObject value);
......@@ -316,8 +308,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
MarkCompactCollector::MarkingWorklist* const marking_worklist_;
WeakObjects* weak_objects_;
std::unique_ptr<MarkCompactCollector::MarkingVisitor> marking_visitor_;
double start_time_ms_;
size_t initial_old_generation_size_;
size_t old_generation_allocation_counter_;
......
......@@ -498,6 +498,21 @@ bool MarkCompactCollector::StartCompaction() {
return compacting_;
}
void MarkCompactCollector::StartMarking() {
marking_visitor_ = std::make_unique<MarkingVisitor>(
marking_state(), marking_worklist()->shared(),
marking_worklist()->embedder(), weak_objects(), heap_, epoch(),
Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
// Marking bits are cleared by the sweeper.
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyMarkbitsAreClean();
}
#endif
}
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
......@@ -811,15 +826,15 @@ void MarkCompactCollector::Prepare() {
}
if (!was_marked_incrementally_) {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_PROLOGUE);
heap_->local_embedder_heap_tracer()->TracePrologue(
heap_->flags_for_embedder_tracer());
}
// Don't start compaction if we are in the middle of incremental
// marking cycle. We did not collect any slots.
if (!FLAG_never_compact && !was_marked_incrementally_) {
StartCompaction();
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_PROLOGUE);
heap_->local_embedder_heap_tracer()->TracePrologue(
heap_->flags_for_embedder_tracer());
}
if (!FLAG_never_compact) {
StartCompaction();
}
StartMarking();
}
PagedSpaceIterator spaces(heap());
......@@ -828,12 +843,6 @@ void MarkCompactCollector::Prepare() {
space->PrepareForMarkCompact();
}
heap()->account_external_memory_concurrently_freed();
#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
VerifyMarkbitsAreClean();
}
#endif
}
void MarkCompactCollector::FinishConcurrentMarking(
......@@ -1550,6 +1559,26 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
ProcessTopOptimizedFrame(custom_root_body_visitor);
}
void MarkCompactCollector::VisitObject(HeapObject obj) {
marking_visitor_->Visit(obj.map(), obj);
}
void MarkCompactCollector::RevisitObject(HeapObject obj) {
DCHECK(marking_state()->IsBlack(obj));
DCHECK_IMPLIES(MemoryChunk::FromHeapObject(obj)->IsFlagSet(
MemoryChunk::HAS_PROGRESS_BAR),
0u == MemoryChunk::FromHeapObject(obj)->ProgressBar());
MarkingVisitor::RevisitScope revisit(marking_visitor_.get());
marking_visitor_->Visit(obj.map(), obj);
}
void MarkCompactCollector::MarkDescriptorArrayFromWriteBarrier(
HeapObject host, DescriptorArray descriptors,
int number_of_own_descriptors) {
marking_visitor_->MarkDescriptorArrayFromWriteBarrier(
host, descriptors, number_of_own_descriptors);
}
void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
bool work_to_do = true;
int iterations = 0;
......@@ -1735,11 +1764,6 @@ void MarkCompactCollector::DrainMarkingWorklist() { ProcessMarkingWorklist(0); }
template <MarkCompactCollector::MarkingWorklistProcessingMode mode>
size_t MarkCompactCollector::ProcessMarkingWorklist(size_t bytes_to_process) {
HeapObject object;
MarkingVisitor visitor(marking_state(), marking_worklist()->shared(),
marking_worklist()->embedder(), weak_objects(), heap_,
epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
size_t bytes_processed = 0;
while (!(object = marking_worklist()->Pop()).is_null()) {
// Left trimming may result in grey or black filler objects on the marking
......@@ -1764,7 +1788,7 @@ size_t MarkCompactCollector::ProcessMarkingWorklist(size_t bytes_to_process) {
kTrackNewlyDiscoveredObjects) {
AddNewlyDiscovered(object);
}
bytes_processed += visitor.Visit(object.map(), object);
bytes_processed += marking_visitor_->Visit(object.map(), object);
if (bytes_to_process && bytes_processed >= bytes_to_process) {
break;
}
......
......@@ -586,6 +586,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
void AbortCompaction();
void StartMarking();
static inline bool IsOnEvacuationCandidate(Object obj) {
return Page::FromAddress(obj.ptr())->IsEvacuationCandidate();
}
......@@ -673,6 +675,15 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Used by wrapper tracing.
V8_INLINE void MarkExternallyReferencedObject(HeapObject obj);
// Used by incremental marking for object that change their layout.
void VisitObject(HeapObject obj);
// Used by incremental marking for black-allocated objects.
void RevisitObject(HeapObject obj);
// Ensures that all descriptors int range [0, number_of_own_descripts)
// are visited.
void MarkDescriptorArrayFromWriteBarrier(HeapObject host,
DescriptorArray array,
int number_of_own_descriptors);
// Drains the main thread marking worklist until the specified number of
// bytes are processed. If the number of bytes is zero, then the worklist
......@@ -851,6 +862,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
WeakObjects weak_objects_;
EphemeronMarking ephemeron_marking_;
std::unique_ptr<MarkingVisitor> marking_visitor_;
// Candidates for pages that should be evacuated.
std::vector<Page*> evacuation_candidates_;
// Pages that are actually processed during evacuation.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment