Commit 4a5a1daa authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Unify draining of the main thread marking worklist

This removes the marking worklist draining loop from IncrementalMarking
and makes it use the one of MarkCompactCollector.

Bug: chromium:973627
Change-Id: I226b4b45be7d542a82bba20162ad210dfb419c39
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1940250
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65212}
parent 1bde17ce
......@@ -48,6 +48,7 @@ IncrementalMarking::IncrementalMarking(
Heap* heap, MarkCompactCollector::MarkingWorklist* marking_worklist,
WeakObjects* weak_objects)
: heap_(heap),
collector_(heap->mark_compact_collector()),
marking_worklist_(marking_worklist),
weak_objects_(weak_objects),
initial_old_generation_size_(0),
......@@ -76,7 +77,7 @@ void IncrementalMarking::RecordWriteSlow(HeapObject obj, HeapObjectSlot slot,
HeapObject value) {
if (BaseRecordWrite(obj, value) && slot.address() != kNullAddress) {
// Object is not going to be rescanned we need to record the slot.
heap_->mark_compact_collector()->RecordSlot(obj, slot, value);
collector_->RecordSlot(obj, slot, value);
}
}
......@@ -95,7 +96,7 @@ void IncrementalMarking::RecordWriteIntoCode(Code host, RelocInfo* rinfo,
DCHECK(IsMarking());
if (BaseRecordWrite(host, value)) {
// Object is not going to be rescanned. We need to record the slot.
heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
collector_->RecordRelocSlot(host, rinfo, value);
}
}
......@@ -300,7 +301,7 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
should_hurry_ = false;
was_activated_ = true;
if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
if (!collector_->sweeping_in_progress()) {
StartMarking();
} else {
if (FLAG_trace_incremental_marking) {
......@@ -332,26 +333,23 @@ void IncrementalMarking::StartMarking() {
"[IncrementalMarking] Start marking\n");
}
is_compacting_ =
!FLAG_never_compact && heap_->mark_compact_collector()->StartCompaction();
is_compacting_ = !FLAG_never_compact && collector_->StartCompaction();
SetState(MARKING);
ActivateIncrementalWriteBarrier();
MarkCompactCollector* collector = heap_->mark_compact_collector();
marking_visitor_ = std::make_unique<MarkCompactCollector::MarkingVisitor>(
collector->marking_state(), collector->marking_worklist()->shared(),
collector->marking_worklist()->embedder(), collector->weak_objects(),
heap_, collector->epoch(), Heap::GetBytecodeFlushMode(),
collector_->marking_state(), collector_->marking_worklist()->shared(),
collector_->marking_worklist()->embedder(), collector_->weak_objects(),
heap_, collector_->epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
// Marking bits are cleared by the sweeper.
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
collector_->VerifyMarkbitsAreClean();
}
#endif
......@@ -713,32 +711,6 @@ void IncrementalMarking::MarkDescriptorArrayFromWriteBarrier(
host, descriptors, number_of_own_descriptors);
}
intptr_t IncrementalMarking::ProcessMarkingWorklist(
intptr_t bytes_to_process, ForceCompletionAction completion) {
intptr_t bytes_processed = 0;
while (bytes_processed < bytes_to_process || completion == FORCE_COMPLETION) {
HeapObject obj = marking_worklist()->Pop();
if (obj.is_null()) break;
// Left trimming may result in grey or black filler objects on the marking
// worklist. Ignore these objects.
if (obj.IsFreeSpaceOrFiller()) {
// Due to copying mark bits and the fact that grey and black have their
// first bit set, one word fillers are always black.
DCHECK_IMPLIES(
obj.map() == ReadOnlyRoots(heap()).one_pointer_filler_map(),
marking_state()->IsBlack(obj));
// Other fillers may be black or grey depending on the color of the object
// that was trimmed.
DCHECK_IMPLIES(
obj.map() != ReadOnlyRoots(heap()).one_pointer_filler_map(),
marking_state()->IsBlackOrGrey(obj));
continue;
}
bytes_processed += marking_visitor_->Visit(obj.map(), obj);
}
return bytes_processed;
}
StepResult IncrementalMarking::EmbedderStep(double duration_ms) {
if (!ShouldDoEmbedderStep()) return StepResult::kNoImmediateWork;
......@@ -785,9 +757,7 @@ void IncrementalMarking::Hurry() {
heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n");
}
}
// TODO(gc) hurry can mark objects it encounters black as mutator
// was stopped.
ProcessMarkingWorklist(0, FORCE_COMPLETION);
collector_->ProcessMarkingWorklist(0);
SetState(COMPLETE);
if (FLAG_trace_incremental_marking) {
double end = heap_->MonotonicallyIncreasingTimeInMs();
......@@ -965,12 +935,12 @@ StepResult IncrementalMarking::AdvanceWithDeadline(
void IncrementalMarking::FinalizeSweeping() {
DCHECK(state_ == SWEEPING);
if (heap_->mark_compact_collector()->sweeping_in_progress() &&
if (collector_->sweeping_in_progress() &&
(!FLAG_concurrent_sweeping ||
!heap_->mark_compact_collector()->sweeper()->AreSweeperTasksRunning())) {
heap_->mark_compact_collector()->EnsureSweepingCompleted();
!collector_->sweeper()->AreSweeperTasksRunning())) {
collector_->EnsureSweepingCompleted();
}
if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
if (!collector_->sweeping_in_progress()) {
#ifdef DEBUG
heap_->VerifyCountersAfterSweeping();
#endif
......@@ -1124,8 +1094,8 @@ StepResult IncrementalMarking::V8Step(double max_step_size_in_ms,
result = StepResult::kNoImmediateWork;
}
bytes_processed =
ProcessMarkingWorklist(Max(bytes_to_process, kMinStepSizeInBytes));
bytes_processed = collector_->ProcessMarkingWorklist(
Max(bytes_to_process, kMinStepSizeInBytes));
bytes_marked_ += bytes_processed;
......
......@@ -32,8 +32,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
enum CompletionAction { GC_VIA_STACK_GUARD, NO_GC_VIA_STACK_GUARD };
enum ForceCompletionAction { FORCE_COMPLETION, DO_NOT_FORCE_COMPLETION };
enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
using MarkingState = MarkCompactCollector::MarkingState;
......@@ -283,10 +281,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void DeactivateIncrementalWriteBarrierForSpace(NewSpace* space);
void DeactivateIncrementalWriteBarrier();
V8_INLINE intptr_t ProcessMarkingWorklist(
intptr_t bytes_to_process,
ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION);
// Updates scheduled_bytes_to_mark_ to ensure marking progress based on
// time.
void ScheduleBytesToMarkBasedOnTime(double time_ms);
......@@ -318,6 +312,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
}
Heap* const heap_;
MarkCompactCollector* const collector_;
MarkCompactCollector::MarkingWorklist* const marking_worklist_;
WeakObjects* weak_objects_;
......
......@@ -1611,7 +1611,7 @@ bool MarkCompactCollector::ProcessEphemerons() {
// Drain marking worklist and push discovered ephemerons into
// discovered_ephemerons.
ProcessMarkingWorklist();
DrainMarkingWorklist();
// Drain discovered_ephemerons (filled in the drain MarkingWorklist-phase
// before) and push ephemerons where key and value are still unreachable into
......@@ -1661,9 +1661,9 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING);
// Drain marking worklist and push all discovered objects into
// newly_discovered.
ProcessMarkingWorklistInternal<
ProcessMarkingWorklist<
MarkCompactCollector::MarkingWorklistProcessingMode::
kTrackNewlyDiscoveredObjects>();
kTrackNewlyDiscoveredObjects>(0);
}
while (
......@@ -1730,19 +1730,17 @@ void MarkCompactCollector::PerformWrapperTracing() {
}
}
void MarkCompactCollector::ProcessMarkingWorklist() {
ProcessMarkingWorklistInternal<
MarkCompactCollector::MarkingWorklistProcessingMode::kDefault>();
}
void MarkCompactCollector::DrainMarkingWorklist() { ProcessMarkingWorklist(0); }
template <MarkCompactCollector::MarkingWorklistProcessingMode mode>
void MarkCompactCollector::ProcessMarkingWorklistInternal() {
size_t MarkCompactCollector::ProcessMarkingWorklist(size_t bytes_to_process) {
HeapObject object;
MarkingVisitor visitor(marking_state(), marking_worklist()->shared(),
marking_worklist()->embedder(), weak_objects(), heap_,
epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
size_t bytes_processed = 0;
while (!(object = marking_worklist()->Pop()).is_null()) {
// Left trimming may result in grey or black filler objects on the marking
// worklist. Ignore these objects.
......@@ -1766,8 +1764,12 @@ void MarkCompactCollector::ProcessMarkingWorklistInternal() {
kTrackNewlyDiscoveredObjects) {
AddNewlyDiscovered(object);
}
visitor.Visit(object.map(), object);
bytes_processed += visitor.Visit(object.map(), object);
if (bytes_to_process && bytes_processed >= bytes_to_process) {
break;
}
}
return bytes_processed;
}
bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) {
......@@ -1875,11 +1877,11 @@ void MarkCompactCollector::MarkLiveObjects() {
if (FLAG_parallel_marking) {
heap_->concurrent_marking()->RescheduleTasksIfNeeded();
}
ProcessMarkingWorklist();
DrainMarkingWorklist();
FinishConcurrentMarking(
ConcurrentMarking::StopRequest::COMPLETE_ONGOING_TASKS);
ProcessMarkingWorklist();
DrainMarkingWorklist();
}
{
......@@ -1898,7 +1900,7 @@ void MarkCompactCollector::MarkLiveObjects() {
// concurrent markers. As a result this call needs to happen at least
// once.
PerformWrapperTracing();
ProcessMarkingWorklist();
DrainMarkingWorklist();
} while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone() ||
!marking_worklist()->IsEmbedderEmpty());
DCHECK(marking_worklist()->IsEmbedderEmpty());
......@@ -1927,7 +1929,7 @@ void MarkCompactCollector::MarkLiveObjects() {
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES);
heap()->isolate()->global_handles()->IterateWeakRootsIdentifyFinalizers(
&IsUnmarkedHeapObject);
ProcessMarkingWorklist();
DrainMarkingWorklist();
}
// Process finalizers, effectively keeping them alive until the next
......@@ -1937,7 +1939,7 @@ void MarkCompactCollector::MarkLiveObjects() {
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS);
heap()->isolate()->global_handles()->IterateWeakRootsForFinalizers(
&root_visitor);
ProcessMarkingWorklist();
DrainMarkingWorklist();
}
// Repeat ephemeron processing from the newly marked objects.
......@@ -4829,7 +4831,7 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
// Mark rest on the main thread.
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK);
ProcessMarkingWorklist();
DrainMarkingWorklist();
}
{
......@@ -4842,11 +4844,11 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
->global_handles()
->IterateYoungWeakUnmodifiedRootsForPhantomHandles(
&root_visitor, &IsUnmarkedObjectForYoungGeneration);
ProcessMarkingWorklist();
DrainMarkingWorklist();
}
}
void MinorMarkCompactCollector::ProcessMarkingWorklist() {
void MinorMarkCompactCollector::DrainMarkingWorklist() {
MarkingWorklist::View marking_worklist(worklist(), kMainMarker);
HeapObject object;
while (marking_worklist.Pop(&object)) {
......
......@@ -202,7 +202,7 @@ class MarkCompactCollectorBase {
virtual void MarkLiveObjects() = 0;
// Mark objects reachable (transitively) from objects in the marking
// work list.
virtual void ProcessMarkingWorklist() = 0;
virtual void DrainMarkingWorklist() = 0;
// Clear non-live references held in side data structures.
virtual void ClearNonLiveReferences() = 0;
virtual void EvacuatePrologue() = 0;
......@@ -554,6 +554,11 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
kClearMarkbits,
};
enum class MarkingWorklistProcessingMode {
kDefault,
kTrackNewlyDiscoveredObjects
};
MarkingState* marking_state() { return &marking_state_; }
NonAtomicMarkingState* non_atomic_marking_state() {
......@@ -669,6 +674,13 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Used by wrapper tracing.
V8_INLINE void MarkExternallyReferencedObject(HeapObject obj);
// Drains the main thread marking worklist until the specified number of
// bytes are processed. If the number of bytes is zero, then the worklist
// is drained until it is empty.
template <MarkingWorklistProcessingMode mode =
MarkingWorklistProcessingMode::kDefault>
size_t ProcessMarkingWorklist(size_t bytes_to_process);
private:
void ComputeEvacuationHeuristics(size_t area_size,
int* target_fragmentation_percent,
......@@ -707,15 +719,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Drains the main thread marking work list. Will mark all pending objects
// if no concurrent threads are running.
void ProcessMarkingWorklist() override;
enum class MarkingWorklistProcessingMode {
kDefault,
kTrackNewlyDiscoveredObjects
};
template <MarkingWorklistProcessingMode mode>
void ProcessMarkingWorklistInternal();
void DrainMarkingWorklist() override;
// Implements ephemeron semantics: Marks value if key is already reachable.
// Returns true if value was actually marked.
......@@ -924,7 +928,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
void MarkLiveObjects() override;
void MarkRootSetInParallel(RootMarkingVisitor* root_visitor);
V8_INLINE void MarkRootObject(HeapObject obj);
void ProcessMarkingWorklist() override;
void DrainMarkingWorklist() override;
void ClearNonLiveReferences() override;
void EvacuatePrologue() override;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment