Commit 6407a3c0 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Switch incremental marker to instance-based visitor

Last marker to use the instance based visitors. Delete StaticMarkingVisitor.

Bug: chromium:738368
Change-Id: I7b5345805268aab277f2961c8598536dfa1a4eeb
Reviewed-on: https://chromium-review.googlesource.com/556037Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46344}
parent 1520a851
...@@ -5739,10 +5739,8 @@ V8_DECLARE_ONCE(initialize_gc_once); ...@@ -5739,10 +5739,8 @@ V8_DECLARE_ONCE(initialize_gc_once);
static void InitializeGCOnce() { static void InitializeGCOnce() {
Scavenger::Initialize(); Scavenger::Initialize();
MarkCompactCollector::Initialize();
} }
bool Heap::SetUp() { bool Heap::SetUp() {
#ifdef DEBUG #ifdef DEBUG
allocation_timeout_ = FLAG_gc_interval; allocation_timeout_ = FLAG_gc_interval;
......
...@@ -213,41 +213,41 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) { ...@@ -213,41 +213,41 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) {
} }
} }
class IncrementalMarkingMarkingVisitor class IncrementalMarkingMarkingVisitor final
: public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { : public MarkingVisitor<IncrementalMarkingMarkingVisitor> {
public: public:
static void Initialize() { typedef MarkingVisitor<IncrementalMarkingMarkingVisitor> Parent;
StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
}
static const int kProgressBarScanningChunk = 32 * 1024; static const int kProgressBarScanningChunk = 32 * 1024;
static void VisitFixedArrayIncremental(Map* map, HeapObject* object) { explicit IncrementalMarkingMarkingVisitor(MarkCompactCollector* collector)
: MarkingVisitor<IncrementalMarkingMarkingVisitor>(collector->heap(),
collector),
incremental_marking_(collector->heap()->incremental_marking()) {}
V8_INLINE int VisitFixedArray(Map* map, FixedArray* object) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) { if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
DCHECK(!FLAG_use_marking_progress_bar || DCHECK(!FLAG_use_marking_progress_bar ||
chunk->owner()->identity() == LO_SPACE); chunk->owner()->identity() == LO_SPACE);
Heap* heap = map->GetHeap();
// When using a progress bar for large fixed arrays, scan only a chunk of // When using a progress bar for large fixed arrays, scan only a chunk of
// the array and try to push it onto the marking deque again until it is // the array and try to push it onto the marking deque again until it is
// fully scanned. Fall back to scanning it through to the end in case this // fully scanned. Fall back to scanning it through to the end in case this
// fails because of a full deque. // fails because of a full deque.
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
int start_offset = int start_offset =
Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar()); Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
if (start_offset < object_size) { if (start_offset < object_size) {
#ifdef CONCURRENT_MARKING #ifdef CONCURRENT_MARKING
heap->incremental_marking()->marking_worklist()->PushBailout(object); incremental_marking_->marking_worklist()->PushBailout(object);
#else #else
if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>( if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object))) { object, incremental_marking_->marking_state(object))) {
heap->incremental_marking()->marking_worklist()->Push(object); incremental_marking_->marking_worklist()->Push(object);
} else { } else {
DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>( DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object))); object, incremental_marking_->marking_state(object)));
heap->mark_compact_collector()->PushBlack(object); collector_->PushBlack(object);
} }
#endif #endif
int end_offset = int end_offset =
...@@ -255,28 +255,25 @@ class IncrementalMarkingMarkingVisitor ...@@ -255,28 +255,25 @@ class IncrementalMarkingMarkingVisitor
int already_scanned_offset = start_offset; int already_scanned_offset = start_offset;
bool scan_until_end = false; bool scan_until_end = false;
do { do {
VisitPointers(heap, object, VisitPointers(object, HeapObject::RawField(object, start_offset),
HeapObject::RawField(object, start_offset),
HeapObject::RawField(object, end_offset)); HeapObject::RawField(object, end_offset));
start_offset = end_offset; start_offset = end_offset;
end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
scan_until_end = scan_until_end = incremental_marking_->marking_worklist()->IsFull();
heap->incremental_marking()->marking_worklist()->IsFull();
} while (scan_until_end && start_offset < object_size); } while (scan_until_end && start_offset < object_size);
chunk->set_progress_bar(start_offset); chunk->set_progress_bar(start_offset);
if (start_offset < object_size) { if (start_offset < object_size) {
heap->incremental_marking()->NotifyIncompleteScanOfObject( incremental_marking_->NotifyIncompleteScanOfObject(
object_size - (start_offset - already_scanned_offset)); object_size - (start_offset - already_scanned_offset));
} }
} }
} else { } else {
FixedArrayVisitor::Visit(map, object); FixedArray::BodyDescriptor::IterateBody(object, object_size, this);
} }
return object_size;
} }
static void VisitNativeContextIncremental(Map* map, HeapObject* object) { V8_INLINE int VisitNativeContext(Map* map, Context* context) {
Context* context = Context::cast(object);
// We will mark cache black with a separate pass when we finish marking. // We will mark cache black with a separate pass when we finish marking.
// Note that GC can happen when the context is not fully initialized, // Note that GC can happen when the context is not fully initialized,
// so the cache can be undefined. // so the cache can be undefined.
...@@ -286,47 +283,47 @@ class IncrementalMarkingMarkingVisitor ...@@ -286,47 +283,47 @@ class IncrementalMarkingMarkingVisitor
HeapObject* heap_obj = HeapObject::cast(cache); HeapObject* heap_obj = HeapObject::cast(cache);
// Mark the object grey if it is white, do not enque it into the marking // Mark the object grey if it is white, do not enque it into the marking
// deque. // deque.
Heap* heap = map->GetHeap(); ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>(
bool ignored = heap_obj, incremental_marking_->marking_state(heap_obj));
ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>(
heap_obj, heap->incremental_marking()->marking_state(heap_obj));
USE(ignored);
} }
} }
VisitNativeContext(map, context); return Parent::VisitNativeContext(map, context);
} }
INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { inline void VisitPointer(HeapObject* host, Object** p) final {
Object* target = *p; Object* target = *p;
if (target->IsHeapObject()) { if (target->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(object, p, target); collector_->RecordSlot(host, p, target);
MarkObject(heap, target); MarkObject(target);
} }
} }
INLINE(static void VisitPointers(Heap* heap, HeapObject* object, inline void VisitPointers(HeapObject* host, Object** start,
Object** start, Object** end)) { Object** end) final {
for (Object** p = start; p < end; p++) { for (Object** p = start; p < end; p++) {
Object* target = *p; Object* target = *p;
if (target->IsHeapObject()) { if (target->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(object, p, target); collector_->RecordSlot(host, p, target);
MarkObject(heap, target); MarkObject(target);
} }
} }
} }
// Marks the object grey and pushes it on the marking stack. // Marks the object grey and pushes it on the marking stack.
INLINE(static void MarkObject(Heap* heap, Object* obj)) { V8_INLINE void MarkObject(Object* obj) {
heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); incremental_marking_->WhiteToGreyAndPush(HeapObject::cast(obj));
} }
// Marks the object black without pushing it on the marking stack. // Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise. // Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { V8_INLINE bool MarkObjectWithoutPush(Object* obj) {
HeapObject* heap_object = HeapObject::cast(obj); HeapObject* heap_object = HeapObject::cast(obj);
return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>( return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>(
heap_object, heap->incremental_marking()->marking_state(heap_object)); heap_object, incremental_marking_->marking_state(heap_object));
} }
private:
IncrementalMarking* const incremental_marking_;
}; };
class IncrementalMarkingRootMarkingVisitor : public RootVisitor { class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
...@@ -354,12 +351,6 @@ class IncrementalMarkingRootMarkingVisitor : public RootVisitor { ...@@ -354,12 +351,6 @@ class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
Heap* heap_; Heap* heap_;
}; };
void IncrementalMarking::Initialize() {
IncrementalMarkingMarkingVisitor::Initialize();
}
void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk, void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk,
bool is_marking, bool is_marking,
bool is_compacting) { bool is_compacting) {
...@@ -893,7 +884,8 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { ...@@ -893,7 +884,8 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
} }
DCHECK(ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj))); DCHECK(ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj)));
WhiteToGreyAndPush(map); WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor::IterateBody(map, obj); IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector());
visitor.Visit(map, obj);
} }
void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject* obj) { void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject* obj) {
...@@ -913,7 +905,8 @@ void IncrementalMarking::RevisitObject(HeapObject* obj) { ...@@ -913,7 +905,8 @@ void IncrementalMarking::RevisitObject(HeapObject* obj) {
} }
Map* map = obj->map(); Map* map = obj->map();
WhiteToGreyAndPush(map); WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor::IterateBody(map, obj); IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector());
visitor.Visit(map, obj);
} }
intptr_t IncrementalMarking::ProcessMarkingWorklist( intptr_t IncrementalMarking::ProcessMarkingWorklist(
......
...@@ -53,8 +53,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -53,8 +53,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
bool paused_; bool paused_;
}; };
static void Initialize();
explicit IncrementalMarking(Heap* heap); explicit IncrementalMarking(Heap* heap);
MarkingState marking_state(HeapObject* object) const { MarkingState marking_state(HeapObject* object) const {
......
...@@ -4810,10 +4810,6 @@ void MarkCompactCollector::StartSweepSpaces() { ...@@ -4810,10 +4810,6 @@ void MarkCompactCollector::StartSweepSpaces() {
heap_->lo_space()->FreeUnmarkedObjects(); heap_->lo_space()->FreeUnmarkedObjects();
} }
void MarkCompactCollector::Initialize() {
IncrementalMarking::Initialize();
}
void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* host, Address slot, void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* host, Address slot,
Code* target) { Code* target) {
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
......
...@@ -541,8 +541,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -541,8 +541,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
kClearMarkbits, kClearMarkbits,
}; };
static void Initialize();
MarkingState marking_state(HeapObject* object) const override { MarkingState marking_state(HeapObject* object) const override {
return MarkingState::Internal(object); return MarkingState::Internal(object);
} }
......
This diff is collapsed.
...@@ -6,22 +6,11 @@ ...@@ -6,22 +6,11 @@
#define V8_OBJECTS_VISITING_H_ #define V8_OBJECTS_VISITING_H_
#include "src/allocation.h" #include "src/allocation.h"
#include "src/heap/embedder-tracing.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/heap/spaces.h"
#include "src/layout-descriptor.h" #include "src/layout-descriptor.h"
#include "src/objects-body-descriptors.h" #include "src/objects-body-descriptors.h"
#include "src/objects/string.h" #include "src/objects/string.h"
// This file provides base classes and auxiliary methods for defining
// static object visitors used during GC.
// Visiting HeapObject body with a normal ObjectVisitor requires performing
// two switches on object's instance type to determine object size and layout
// and one or more virtual method calls on visitor itself.
// Static visitor is different: it provides a dispatch table which contains
// pointers to specialized visit functions. Each map has the visitor_id
// field which contains an index of specialized visitor to use.
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -121,122 +110,6 @@ class VisitorDispatchTable { ...@@ -121,122 +110,6 @@ class VisitorDispatchTable {
base::AtomicWord callbacks_[kVisitorIdCount]; base::AtomicWord callbacks_[kVisitorIdCount];
}; };
template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
class FlexibleBodyVisitor : public AllStatic {
public:
INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
int object_size = BodyDescriptor::SizeOf(map, object);
BodyDescriptor::template IterateBody<StaticVisitor>(object, object_size);
return static_cast<ReturnType>(object_size);
}
};
template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
class FixedBodyVisitor : public AllStatic {
public:
INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
BodyDescriptor::template IterateBody<StaticVisitor>(object);
return static_cast<ReturnType>(BodyDescriptor::kSize);
}
};
// Base class for visitors used to transitively mark the entire heap.
// IterateBody returns nothing.
// Certain types of objects might not be handled by this base class and
// no visitor function is registered by the generic initialization. A
// specialized visitor function needs to be provided by the inheriting
// class itself for those cases.
//
// This class is intended to be used in the following way:
//
// class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
// ...
// }
//
// This is an example of Curiously recurring template pattern.
template <typename StaticVisitor>
class StaticMarkingVisitor : public StaticVisitorBase {
public:
static void Initialize();
INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
table_.GetVisitor(map)(map, obj);
}
INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
INLINE(static void VisitTransitionArray(Map* map, HeapObject* object));
INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
Address entry_address));
INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
INLINE(static void VisitInternalReference(RelocInfo* rinfo)) {}
INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
// Skip the weak next code link in a code object.
INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
protected:
INLINE(static void VisitMap(Map* map, HeapObject* object));
INLINE(static void VisitCode(Map* map, HeapObject* object));
INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
// Mark pointers in a Map treating some elements of the descriptor array weak.
static void MarkMapContents(Heap* heap, Map* map);
class DataObjectVisitor {
public:
template <int size>
static inline void VisitSpecialized(Map* map, HeapObject* object) {}
INLINE(static void Visit(Map* map, HeapObject* object)) {}
};
typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
FixedArrayVisitor;
typedef FlexibleBodyVisitor<StaticVisitor, JSObject::FastBodyDescriptor, void>
JSObjectFastVisitor;
typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
JSObjectVisitor;
class JSApiObjectVisitor : AllStatic {
public:
INLINE(static void Visit(Map* map, HeapObject* object)) {
TracePossibleWrapper(object);
JSObjectVisitor::Visit(map, object);
}
private:
INLINE(static void TracePossibleWrapper(HeapObject* object)) {
if (object->GetHeap()->local_embedder_heap_tracer()->InUse()) {
DCHECK(object->IsJSObject());
object->GetHeap()->TracePossibleWrapper(JSObject::cast(object));
}
}
};
typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
StructObjectVisitor;
typedef void (*Callback)(Map* map, HeapObject* object);
static VisitorDispatchTable<Callback> table_;
};
template <typename StaticVisitor>
VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
StaticMarkingVisitor<StaticVisitor>::table_;
#define TYPED_VISITOR_ID_LIST(V) \ #define TYPED_VISITOR_ID_LIST(V) \
V(AllocationSite) \ V(AllocationSite) \
V(ByteArray) \ V(ByteArray) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment