Commit fb767676 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Switch main thread marking visitors to MarkingVisitorBase

Now incremental marker and stop-the-world marker use the same visitor,
which is derived from MarkingVisitorBase. This removes code duplication
and also should reduce binary size.

The marking worklist processing code also changes to not color the
object black before visiting it. Instead the visitor colors the
object black in ShouldVisit method.

Bug: chromium:1019218
Change-Id: I57971122f3c77ad2770b6754d696b79d802ef1a4
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1901271
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64848}
parent f3b9d9e5
......@@ -1615,6 +1615,8 @@ V8_INLINE bool operator!(ExceptionStatus status) {
return !static_cast<bool>(status);
}
enum class TraceRetainingPathMode { kEnabled, kDisabled };
} // namespace internal
} // namespace v8
......
......@@ -81,13 +81,13 @@ class ConcurrentMarkingVisitor final
public:
ConcurrentMarkingVisitor(int task_id, MarkingWorklist* marking_worklist,
EmbedderTracingWorklist* embedder_worklist,
WeakObjects* weak_objects,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc,
MemoryChunkDataMap* memory_chunk_data)
: MarkingVisitorBase(task_id, marking_worklist, embedder_worklist,
weak_objects, mark_compact_epoch,
weak_objects, heap, mark_compact_epoch,
bytecode_flush_mode, embedder_tracing_enabled,
is_forced_gc),
marking_state_(memory_chunk_data),
......@@ -157,6 +157,11 @@ class ConcurrentMarkingVisitor final
return false;
}
// HeapVisitor override.
bool ShouldVisit(HeapObject object) {
return marking_state_.GreyToBlack(object);
}
private:
// Helper class for collecting in-object slot addresses and values.
class SlotSnapshottingVisitor final : public ObjectVisitor {
......@@ -298,6 +303,10 @@ class ConcurrentMarkingVisitor final
ConcurrentMarkingState* marking_state() { return &marking_state_; }
TraceRetainingPathMode retaining_path_mode() {
return TraceRetainingPathMode::kDisabled;
}
ConcurrentMarkingState marking_state_;
MemoryChunkDataMap* memory_chunk_data_;
SlotSnapshot slot_snapshot_;
......@@ -384,7 +393,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000;
ConcurrentMarkingVisitor visitor(
task_id, marking_worklist_, embedder_worklist_, weak_objects_,
task_id, marking_worklist_, embedder_worklist_, weak_objects_, heap_,
task_state->mark_compact_epoch, Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(), task_state->is_forced_gc,
&task_state->memory_chunk_data);
......
......@@ -6351,8 +6351,8 @@ void Heap::MarkingBarrierForDescriptorArraySlow(Heap* heap, HeapObject host,
if (NumberOfMarkedDescriptors::decode(heap->mark_compact_collector()->epoch(),
raw_marked) <
number_of_own_descriptors) {
heap->incremental_marking()->VisitDescriptors(host, descriptor_array,
number_of_own_descriptors);
heap->incremental_marking()->MarkDescriptorArrayFromWriteBarrier(
host, descriptor_array, number_of_own_descriptors);
}
}
......
......@@ -97,10 +97,6 @@ enum class ClearFreedMemoryMode { kClearFreedMemory, kDontClearFreedMemory };
enum ExternalBackingStoreType { kArrayBuffer, kExternalString, kNumTypes };
enum class FixedArrayVisitationMode { kRegular, kIncremental };
enum class TraceRetainingPathMode { kEnabled, kDisabled };
enum class RetainingPathOption { kDefault, kTrackEphemeronPath };
enum class AllocationOrigin {
......@@ -2099,9 +2095,8 @@ class Heap {
friend class IncrementalMarking;
friend class IncrementalMarkingJob;
friend class OldLargeObjectSpace;
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
friend class MarkingVisitor;
template <typename ConcreteVisitor, typename MarkingState>
friend class MarkingVisitorBase;
friend class MarkCompactCollector;
friend class MarkCompactCollectorBase;
friend class MinorMarkCompactCollector;
......
......@@ -13,6 +13,8 @@
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/marking-visitor-inl.h"
#include "src/heap/marking-visitor.h"
#include "src/heap/object-stats.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/objects-visiting.h"
......@@ -30,11 +32,6 @@
namespace v8 {
namespace internal {
using IncrementalMarkingMarkingVisitor =
MarkingVisitor<FixedArrayVisitationMode::kIncremental,
TraceRetainingPathMode::kDisabled,
IncrementalMarking::MarkingState>;
void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
size_t size) {
Heap* heap = incremental_marking_->heap();
......@@ -70,6 +67,11 @@ IncrementalMarking::IncrementalMarking(
SetState(STOPPED);
}
IncrementalMarking::~IncrementalMarking() {
// Avoid default destructor, which would be inlined in the header file
// and cause compile errors due marking_visitor_ not fully defined.
}
void IncrementalMarking::RecordWriteSlow(HeapObject obj, HeapObjectSlot slot,
HeapObject value) {
if (BaseRecordWrite(obj, value) && slot.address() != kNullAddress) {
......@@ -102,9 +104,7 @@ void IncrementalMarking::MarkBlackAndVisitObjectDueToLayoutChange(
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingLayoutChange");
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_LAYOUT_CHANGE);
marking_state()->WhiteToGrey(obj);
if (marking_state()->GreyToBlack(obj)) {
RevisitObject(obj);
}
marking_visitor_->Visit(obj.map(), obj);
}
void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
......@@ -339,6 +339,15 @@ void IncrementalMarking::StartMarking() {
ActivateIncrementalWriteBarrier();
MarkCompactCollector* collector = heap_->mark_compact_collector();
marking_visitor_ = std::make_unique<MarkCompactCollector::MarkingVisitor>(
collector->marking_state(), collector->marking_worklist()->shared(),
collector->marking_worklist()->embedder(), collector->weak_objects(),
heap_, collector->epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
// Marking bits are cleared by the sweeper.
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
......@@ -680,29 +689,6 @@ void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
bytes_marked_ -= Min(bytes_marked_, dead_bytes_in_new_space);
}
int IncrementalMarking::VisitObject(Map map, HeapObject obj) {
DCHECK(marking_state()->IsGrey(obj) || marking_state()->IsBlack(obj));
if (!marking_state()->GreyToBlack(obj)) {
// The object can already be black in these cases:
// 1. The object is a fixed array with the progress bar.
// 2. The object is a JSObject that was colored black before
// unsafe layout change.
// 3. The object is a string that was colored black before
// unsafe layout change.
// 4. The object is materizalized by the deoptimizer.
// 5. The object is a descriptor array marked black by
// the descriptor array marking barrier.
DCHECK(obj.IsHashTable() || obj.IsPropertyArray() || obj.IsFixedArray() ||
obj.IsContext() || obj.IsJSObject() || obj.IsString() ||
obj.IsDescriptorArray());
}
DCHECK(marking_state()->IsBlack(obj));
WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
marking_state());
return visitor.Visit(map, obj);
}
void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject obj) {
if (IsMarking() && marking_state()->IsBlack(obj)) {
RevisitObject(obj);
......@@ -715,23 +701,16 @@ void IncrementalMarking::RevisitObject(HeapObject obj) {
DCHECK_IMPLIES(MemoryChunk::FromHeapObject(obj)->IsFlagSet(
MemoryChunk::HAS_PROGRESS_BAR),
0u == MemoryChunk::FromHeapObject(obj)->ProgressBar());
Map map = obj.map();
WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
marking_state());
visitor.Visit(map, obj);
MarkCompactCollector::MarkingVisitor::RevisitScope revisit(
marking_visitor_.get());
marking_visitor_->Visit(obj.map(), obj);
}
void IncrementalMarking::VisitDescriptors(HeapObject host,
DescriptorArray descriptors,
void IncrementalMarking::MarkDescriptorArrayFromWriteBarrier(
HeapObject host, DescriptorArray descriptors,
int number_of_own_descriptors) {
IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
marking_state());
// This is necessary because the Scavenger records slots only for the
// promoted black objects and the marking visitor of DescriptorArray skips
// the descriptors marked by the visitor.VisitDescriptors() below.
visitor.MarkDescriptorArrayBlack(host, descriptors);
visitor.VisitDescriptors(descriptors, number_of_own_descriptors);
marking_visitor_->MarkDescriptorArrayFromWriteBarrier(
host, descriptors, number_of_own_descriptors);
}
intptr_t IncrementalMarking::ProcessMarkingWorklist(
......@@ -755,7 +734,7 @@ intptr_t IncrementalMarking::ProcessMarkingWorklist(
marking_state()->IsBlackOrGrey(obj));
continue;
}
bytes_processed += VisitObject(obj.map(), obj);
bytes_processed += marking_visitor_->Visit(obj.map(), obj);
}
return bytes_processed;
}
......
......@@ -36,13 +36,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
#ifdef V8_CONCURRENT_MARKING
using MarkingState = IncrementalMarkingState;
#else
using MarkingState = MajorNonAtomicMarkingState;
#endif // V8_CONCURRENT_MARKING
using AtomicMarkingState = MajorAtomicMarkingState;
using NonAtomicMarkingState = MajorNonAtomicMarkingState;
using MarkingState = MarkCompactCollector::MarkingState;
using AtomicMarkingState = MarkCompactCollector::AtomicMarkingState;
using NonAtomicMarkingState = MarkCompactCollector::NonAtomicMarkingState;
class PauseBlackAllocationScope {
public:
......@@ -95,6 +91,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
IncrementalMarking(Heap* heap,
MarkCompactCollector::MarkingWorklist* marking_worklist,
WeakObjects* weak_objects);
~IncrementalMarking();
MarkingState* marking_state() { return &marking_state_; }
......@@ -207,7 +204,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void RevisitObject(HeapObject obj);
// Ensures that all descriptors int range [0, number_of_own_descripts)
// are visited.
void VisitDescriptors(HeapObject host, DescriptorArray array,
void MarkDescriptorArrayFromWriteBarrier(HeapObject host,
DescriptorArray array,
int number_of_own_descriptors);
void RecordWriteSlow(HeapObject obj, HeapObjectSlot slot, HeapObject value);
......@@ -289,9 +287,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
intptr_t bytes_to_process,
ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION);
// Visits the object and returns its size.
V8_INLINE int VisitObject(Map map, HeapObject obj);
// Updates scheduled_bytes_to_mark_ to ensure marking progress based on
// time.
void ScheduleBytesToMarkBasedOnTime(double time_ms);
......@@ -326,6 +321,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
MarkCompactCollector::MarkingWorklist* const marking_worklist_;
WeakObjects* weak_objects_;
std::unique_ptr<MarkCompactCollector::MarkingVisitor> marking_visitor_;
double start_time_ms_;
size_t initial_old_generation_size_;
size_t old_generation_allocation_counter_;
......
This diff is collapsed.
......@@ -21,6 +21,8 @@
#include "src/heap/item-parallel-job.h"
#include "src/heap/local-allocator-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/marking-visitor-inl.h"
#include "src/heap/marking-visitor.h"
#include "src/heap/object-stats.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/read-only-heap.h"
......@@ -368,11 +370,6 @@ class FullEvacuationVerifier : public EvacuationVerifier {
// MarkCompactCollectorBase, MinorMarkCompactCollector, MarkCompactCollector
// =============================================================================
using MarkCompactMarkingVisitor =
MarkingVisitor<FixedArrayVisitationMode::kRegular,
TraceRetainingPathMode::kEnabled,
MarkCompactCollector::MarkingState>;
namespace {
int NumberOfAvailableCores() {
......@@ -1606,7 +1603,7 @@ bool MarkCompactCollector::ProcessEphemerons() {
// Drain current_ephemerons and push ephemerons where key and value are still
// unreachable into next_ephemerons.
while (weak_objects_.current_ephemerons.Pop(kMainThread, &ephemeron)) {
while (weak_objects_.current_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
......@@ -1619,15 +1616,15 @@ bool MarkCompactCollector::ProcessEphemerons() {
// Drain discovered_ephemerons (filled in the drain MarkingWorklist-phase
// before) and push ephemerons where key and value are still unreachable into
// next_ephemerons.
while (weak_objects_.discovered_ephemerons.Pop(kMainThread, &ephemeron)) {
while (weak_objects_.discovered_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
}
// Flush local ephemerons for main task to global pool.
weak_objects_.ephemeron_hash_tables.FlushToGlobal(kMainThread);
weak_objects_.next_ephemerons.FlushToGlobal(kMainThread);
weak_objects_.ephemeron_hash_tables.FlushToGlobal(kMainThreadTask);
weak_objects_.next_ephemerons.FlushToGlobal(kMainThreadTask);
return ephemeron_marked;
}
......@@ -1642,7 +1639,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
DCHECK(weak_objects_.current_ephemerons.IsEmpty());
weak_objects_.current_ephemerons.Swap(weak_objects_.next_ephemerons);
while (weak_objects_.current_ephemerons.Pop(kMainThread, &ephemeron)) {
while (weak_objects_.current_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
ProcessEphemeron(ephemeron.key, ephemeron.value);
if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
......@@ -1669,7 +1666,8 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
kTrackNewlyDiscoveredObjects>();
}
while (weak_objects_.discovered_ephemerons.Pop(kMainThread, &ephemeron)) {
while (
weak_objects_.discovered_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
ProcessEphemeron(ephemeron.key, ephemeron.value);
if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
......@@ -1723,7 +1721,7 @@ void MarkCompactCollector::PerformWrapperTracing() {
LocalEmbedderHeapTracer::ProcessingScope scope(
heap_->local_embedder_heap_tracer());
HeapObject object;
while (marking_worklist()->embedder()->Pop(kMainThread, &object)) {
while (marking_worklist()->embedder()->Pop(kMainThreadTask, &object)) {
scope.TracePossibleWrapper(JSObject::cast(object));
}
}
......@@ -1740,7 +1738,11 @@ void MarkCompactCollector::ProcessMarkingWorklist() {
template <MarkCompactCollector::MarkingWorklistProcessingMode mode>
void MarkCompactCollector::ProcessMarkingWorklistInternal() {
HeapObject object;
MarkCompactMarkingVisitor visitor(this, marking_state());
MarkingVisitor visitor(marking_state(), marking_worklist()->shared(),
marking_worklist()->embedder(), weak_objects(), heap_,
epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
while (!(object = marking_worklist()->Pop()).is_null()) {
// Left trimming may result in grey or black filler objects on the marking
// worklist. Ignore these objects.
......@@ -1760,14 +1762,11 @@ void MarkCompactCollector::ProcessMarkingWorklistInternal() {
DCHECK(object.IsHeapObject());
DCHECK(heap()->Contains(object));
DCHECK(!(marking_state()->IsWhite(object)));
marking_state()->GreyToBlack(object);
if (mode == MarkCompactCollector::MarkingWorklistProcessingMode::
kTrackNewlyDiscoveredObjects) {
AddNewlyDiscovered(object);
}
Map map = object.map();
MarkObject(object, map);
visitor.Visit(map, object);
visitor.Visit(object.map(), object);
}
}
......@@ -1779,7 +1778,7 @@ bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) {
}
} else if (marking_state()->IsWhite(value)) {
weak_objects_.next_ephemerons.Push(kMainThread, Ephemeron{key, value});
weak_objects_.next_ephemerons.Push(kMainThreadTask, Ephemeron{key, value});
}
return false;
......@@ -1790,7 +1789,7 @@ void MarkCompactCollector::ProcessEphemeronMarking() {
// Incremental marking might leave ephemerons in main task's local
// buffer, flush it into global pool.
weak_objects_.next_ephemerons.FlushToGlobal(kMainThread);
weak_objects_.next_ephemerons.FlushToGlobal(kMainThreadTask);
ProcessEphemeronsUntilFixpoint();
......@@ -2025,7 +2024,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
void MarkCompactCollector::MarkDependentCodeForDeoptimization() {
std::pair<HeapObject, Code> weak_object_in_code;
while (weak_objects_.weak_objects_in_code.Pop(kMainThread,
while (weak_objects_.weak_objects_in_code.Pop(kMainThreadTask,
&weak_object_in_code)) {
HeapObject object = weak_object_in_code.first;
Code code = weak_object_in_code.second;
......@@ -2141,7 +2140,7 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() {
DCHECK(FLAG_flush_bytecode ||
weak_objects_.bytecode_flushing_candidates.IsEmpty());
SharedFunctionInfo flushing_candidate;
while (weak_objects_.bytecode_flushing_candidates.Pop(kMainThread,
while (weak_objects_.bytecode_flushing_candidates.Pop(kMainThreadTask,
&flushing_candidate)) {
// If the BytecodeArray is dead, flush it, which will replace the field with
// an uncompiled data object.
......@@ -2161,7 +2160,7 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() {
void MarkCompactCollector::ClearFlushedJsFunctions() {
DCHECK(FLAG_flush_bytecode || weak_objects_.flushed_js_functions.IsEmpty());
JSFunction flushed_js_function;
while (weak_objects_.flushed_js_functions.Pop(kMainThread,
while (weak_objects_.flushed_js_functions.Pop(kMainThreadTask,
&flushed_js_function)) {
flushed_js_function.ResetIfBytecodeFlushed();
}
......@@ -2169,7 +2168,7 @@ void MarkCompactCollector::ClearFlushedJsFunctions() {
void MarkCompactCollector::ClearFullMapTransitions() {
TransitionArray array;
while (weak_objects_.transition_arrays.Pop(kMainThread, &array)) {
while (weak_objects_.transition_arrays.Pop(kMainThreadTask, &array)) {
int num_transitions = array.number_of_entries();
if (num_transitions > 0) {
Map map;
......@@ -2312,7 +2311,7 @@ void MarkCompactCollector::ClearWeakCollections() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
EphemeronHashTable table;
while (weak_objects_.ephemeron_hash_tables.Pop(kMainThread, &table)) {
while (weak_objects_.ephemeron_hash_tables.Pop(kMainThreadTask, &table)) {
for (InternalIndex i : table.IterateEntries()) {
HeapObject key = HeapObject::cast(table.KeyAt(i));
#ifdef VERIFY_HEAP
......@@ -2344,7 +2343,7 @@ void MarkCompactCollector::ClearWeakReferences() {
std::pair<HeapObject, HeapObjectSlot> slot;
HeapObjectReference cleared_weak_ref =
HeapObjectReference::ClearedValue(isolate());
while (weak_objects_.weak_references.Pop(kMainThread, &slot)) {
while (weak_objects_.weak_references.Pop(kMainThreadTask, &slot)) {
HeapObject value;
// The slot could have been overwritten, so we have to treat it
// as MaybeObjectSlot.
......@@ -2370,7 +2369,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
return;
}
JSWeakRef weak_ref;
while (weak_objects_.js_weak_refs.Pop(kMainThread, &weak_ref)) {
while (weak_objects_.js_weak_refs.Pop(kMainThreadTask, &weak_ref)) {
HeapObject target = HeapObject::cast(weak_ref.target());
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value());
......@@ -2381,7 +2380,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
}
}
WeakCell weak_cell;
while (weak_objects_.weak_cells.Pop(kMainThread, &weak_cell)) {
while (weak_objects_.weak_cells.Pop(kMainThreadTask, &weak_cell)) {
HeapObject target = HeapObject::cast(weak_cell.target());
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
DCHECK(!target.IsUndefined());
......
This diff is collapsed.
......@@ -23,6 +23,10 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkObject(
concrete_visitor()->SynchronizePageAccess(object);
if (concrete_visitor()->marking_state()->WhiteToGrey(object)) {
marking_worklist_->Push(task_id_, object);
if (V8_UNLIKELY(concrete_visitor()->retaining_path_mode() ==
TraceRetainingPathMode::kEnabled)) {
heap_->AddRetainer(host, object);
}
}
}
......@@ -110,7 +114,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitCodeTarget(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitBytecodeArray(
Map map, BytecodeArray object) {
if (!ShouldVisit(object)) return 0;
if (!concrete_visitor()->ShouldVisit(object)) return 0;
int size = BytecodeArray::BodyDescriptor::SizeOf(map, object);
this->VisitMapPointer(object);
BytecodeArray::BodyDescriptor::IterateBody(map, object, size, this);
......@@ -135,7 +139,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo(
Map map, SharedFunctionInfo shared_info) {
if (!ShouldVisit(shared_info)) return 0;
if (!concrete_visitor()->ShouldVisit(shared_info)) return 0;
int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
this->VisitMapPointer(shared_info);
......@@ -252,7 +256,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSTypedArray(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
Map map, EphemeronHashTable table) {
if (!ShouldVisit(table)) return 0;
if (!concrete_visitor()->ShouldVisit(table)) return 0;
weak_objects_->ephemeron_hash_tables.Push(task_id_, table);
for (InternalIndex i : table.IterateEntries()) {
......@@ -313,7 +317,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
Map map, WeakCell weak_cell) {
if (!ShouldVisit(weak_cell)) return 0;
if (!concrete_visitor()->ShouldVisit(weak_cell)) return 0;
int size = WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
this->VisitMapPointer(weak_cell);
......@@ -367,7 +371,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptors(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptorArray(
Map map, DescriptorArray array) {
if (!ShouldVisit(array)) return 0;
if (!concrete_visitor()->ShouldVisit(array)) return 0;
this->VisitMapPointer(array);
int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
VisitPointers(array, array.GetFirstPointerSlot(), array.GetDescriptorSlot(0));
......@@ -378,7 +382,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptorArray(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitMap(Map meta_map,
Map map) {
if (!ShouldVisit(map)) return 0;
if (!concrete_visitor()->ShouldVisit(map)) return 0;
int size = Map::BodyDescriptor::SizeOf(meta_map, map);
if (map.CanTransition()) {
// Maps that can transition share their descriptor arrays and require
......@@ -413,7 +417,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitMap(Map meta_map,
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray(
Map map, TransitionArray array) {
if (!ShouldVisit(array)) return 0;
if (!concrete_visitor()->ShouldVisit(array)) return 0;
this->VisitMapPointer(array);
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
......
......@@ -5,6 +5,7 @@
#ifndef V8_HEAP_MARKING_VISITOR_H_
#define V8_HEAP_MARKING_VISITOR_H_
#include "src/common/globals.h"
#include "src/heap/marking.h"
#include "src/heap/objects-visiting.h"
#include "src/heap/spaces.h"
......@@ -136,33 +137,35 @@ class MarkingStateBase {
//
// Derived classes are expected to provide the following:
// - ConcreteVisitor::marking_state method,
// - ConcreteVisitor::VisitJSObjectSubclass method,
// - ConcreteVisitor::VisitLeftTrimmableArray method,
// - ConcreteVisitor::retaining_path_mode method,
// - ConcreteVisitor::RecordSlot method,
// - ConcreteVisitor::RecordRelocSlot method,
// - ConcreteVisitor::SynchronizePageAccess method.
// - ConcreteVisitor::SynchronizePageAccess method,
// - ConcreteVisitor::VisitJSObjectSubclass method,
// - ConcreteVisitor::VisitLeftTrimmableArray method.
// These methods capture the difference between the concurrent and main thread
// marking visitors. For example, the concurrent visitor has to use the
// snapshotting protocol to visit JSObject and left-trimmable FixedArrays.
template <typename ConcreteVisitor, typename MarkingState>
class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
public:
MarkingVisitorBase(int task_id, MarkingWorklist* marking_worklist,
EmbedderTracingWorklist* embedder_worklist,
WeakObjects* weak_objects, unsigned mark_compact_epoch,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool is_embedder_tracing_enabled, bool is_forced_gc)
: marking_worklist_(marking_worklist),
embedder_worklist_(embedder_worklist),
weak_objects_(weak_objects),
heap_(heap),
task_id_(task_id),
mark_compact_epoch_(mark_compact_epoch),
bytecode_flush_mode_(bytecode_flush_mode),
is_embedder_tracing_enabled_(is_embedder_tracing_enabled),
is_forced_gc_(is_forced_gc) {}
// HeapVisitor overrides for objects that require custom visitation.
V8_INLINE bool ShouldVisit(HeapObject object) {
return concrete_visitor()->marking_state()->GreyToBlack(object);
}
V8_INLINE int VisitBytecodeArray(Map map, BytecodeArray object);
V8_INLINE int VisitDescriptorArray(Map map, DescriptorArray object);
V8_INLINE int VisitEphemeronHashTable(Map map, EphemeronHashTable object);
......@@ -235,6 +238,7 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
MarkingWorklist* const marking_worklist_;
EmbedderTracingWorklist* const embedder_worklist_;
WeakObjects* const weak_objects_;
Heap* const heap_;
const int task_id_;
const unsigned mark_compact_epoch_;
const BytecodeFlushMode bytecode_flush_mode_;
......
......@@ -968,7 +968,7 @@ class MemoryChunk : public BasicMemoryChunk {
void InitializeReservedMemory() { reservation_.Reset(); }
friend class ConcurrentMarkingState;
friend class IncrementalMarkingState;
friend class MajorMarkingState;
friend class MajorAtomicMarkingState;
friend class MajorNonAtomicMarkingState;
friend class MemoryAllocator;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment