Commit 64329a4b authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Refactor Scavenger

- Avoid static methods.
- Instantiate the actual visitor on the stack.
- Get rid of unnecessary abstractions.

Bug: chromium:738865
Change-Id: I4115d7b88f17a7118aa9ee129eb39a28ec413696
Reviewed-on: https://chromium-review.googlesource.com/558878
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46384}
parent 48dfb607
...@@ -134,7 +134,6 @@ Heap::Heap() ...@@ -134,7 +134,6 @@ Heap::Heap()
maximum_size_scavenges_(0), maximum_size_scavenges_(0),
last_idle_notification_time_(0.0), last_idle_notification_time_(0.0),
last_gc_time_(0.0), last_gc_time_(0.0),
scavenge_collector_(nullptr),
mark_compact_collector_(nullptr), mark_compact_collector_(nullptr),
minor_mark_compact_collector_(nullptr), minor_mark_compact_collector_(nullptr),
memory_allocator_(nullptr), memory_allocator_(nullptr),
...@@ -1742,6 +1741,13 @@ void Heap::EvacuateYoungGeneration() { ...@@ -1742,6 +1741,13 @@ void Heap::EvacuateYoungGeneration() {
SetGCState(NOT_IN_GC); SetGCState(NOT_IN_GC);
} }
static bool IsLogging(Isolate* isolate) {
return FLAG_verify_predictable || isolate->logger()->is_logging() ||
isolate->is_profiling() ||
(isolate->heap_profiler() != nullptr &&
isolate->heap_profiler()->is_tracking_object_moves());
}
void Heap::Scavenge() { void Heap::Scavenge() {
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE); TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE);
base::LockGuard<base::Mutex> guard(relocation_mutex()); base::LockGuard<base::Mutex> guard(relocation_mutex());
...@@ -1767,8 +1773,6 @@ void Heap::Scavenge() { ...@@ -1767,8 +1773,6 @@ void Heap::Scavenge() {
// Used for updating survived_since_last_expansion_ at function end. // Used for updating survived_since_last_expansion_ at function end.
size_t survived_watermark = PromotedSpaceSizeOfObjects(); size_t survived_watermark = PromotedSpaceSizeOfObjects();
scavenge_collector_->UpdateConstraints();
// Flip the semispaces. After flipping, to space is empty, from space has // Flip the semispaces. After flipping, to space is empty, from space has
// live objects. // live objects.
new_space_->Flip(); new_space_->Flip();
...@@ -1794,7 +1798,9 @@ void Heap::Scavenge() { ...@@ -1794,7 +1798,9 @@ void Heap::Scavenge() {
Address new_space_front = new_space_->ToSpaceStart(); Address new_space_front = new_space_->ToSpaceStart();
promotion_queue_.Initialize(); promotion_queue_.Initialize();
RootScavengeVisitor root_scavenge_visitor(this); Scavenger scavenger(this, IsLogging(isolate()),
incremental_marking()->IsMarking());
RootScavengeVisitor root_scavenge_visitor(this, &scavenger);
isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
&JSObject::IsUnmodifiedApiObject); &JSObject::IsUnmodifiedApiObject);
...@@ -1809,19 +1815,19 @@ void Heap::Scavenge() { ...@@ -1809,19 +1815,19 @@ void Heap::Scavenge() {
// Copy objects reachable from the old generation. // Copy objects reachable from the old generation.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS);
RememberedSet<OLD_TO_NEW>::Iterate( RememberedSet<OLD_TO_NEW>::Iterate(
this, SYNCHRONIZED, [this](Address addr) { this, SYNCHRONIZED, [this, &scavenger](Address addr) {
return Scavenger::CheckAndScavengeObject(this, addr); return scavenger.CheckAndScavengeObject(this, addr);
}); });
RememberedSet<OLD_TO_NEW>::IterateTyped( RememberedSet<OLD_TO_NEW>::IterateTyped(
this, SYNCHRONIZED, this, SYNCHRONIZED,
[this](SlotType type, Address host_addr, Address addr) { [this, &scavenger](SlotType type, Address host_addr, Address addr) {
return UpdateTypedSlotHelper::UpdateTypedSlot( return UpdateTypedSlotHelper::UpdateTypedSlot(
isolate(), type, addr, [this](Object** addr) { isolate(), type, addr, [this, &scavenger](Object** addr) {
// We expect that objects referenced by code are long living. // We expect that objects referenced by code are long living.
// If we do not force promotion, then we need to clear // If we do not force promotion, then we need to clear
// old_to_new slots in dead code objects after mark-compact. // old_to_new slots in dead code objects after mark-compact.
return Scavenger::CheckAndScavengeObject( return scavenger.CheckAndScavengeObject(
this, reinterpret_cast<Address>(addr)); this, reinterpret_cast<Address>(addr));
}); });
}); });
...@@ -1834,7 +1840,7 @@ void Heap::Scavenge() { ...@@ -1834,7 +1840,7 @@ void Heap::Scavenge() {
{ {
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE);
new_space_front = DoScavenge(new_space_front); new_space_front = DoScavenge(&scavenger, new_space_front);
} }
isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
...@@ -1842,7 +1848,7 @@ void Heap::Scavenge() { ...@@ -1842,7 +1848,7 @@ void Heap::Scavenge() {
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots( isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&root_scavenge_visitor); &root_scavenge_visitor);
new_space_front = DoScavenge(new_space_front); new_space_front = DoScavenge(&scavenger, new_space_front);
UpdateNewSpaceReferencesInExternalStringTable( UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry); &UpdateNewSpaceReferenceInExternalStringTableEntry);
...@@ -2050,8 +2056,8 @@ void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { ...@@ -2050,8 +2056,8 @@ void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
external_string_table_.IterateAll(&external_string_table_visitor); external_string_table_.IterateAll(&external_string_table_visitor);
} }
Address Heap::DoScavenge(Address new_space_front) { Address Heap::DoScavenge(Scavenger* scavenger, Address new_space_front) {
ScavengeVisitor scavenge_visitor(this); ScavengeVisitor scavenge_visitor(this, scavenger);
do { do {
SemiSpace::AssertValidRange(new_space_front, new_space_->top()); SemiSpace::AssertValidRange(new_space_front, new_space_->top());
// The addresses new_space_front and new_space_.top() define a // The addresses new_space_front and new_space_.top() define a
...@@ -2081,7 +2087,8 @@ Address Heap::DoScavenge(Address new_space_front) { ...@@ -2081,7 +2087,8 @@ Address Heap::DoScavenge(Address new_space_front) {
// to new space. // to new space.
DCHECK(!target->IsMap()); DCHECK(!target->IsMap());
IterateAndScavengePromotedObject(target, static_cast<int>(size)); IterateAndScavengePromotedObject(scavenger, target,
static_cast<int>(size));
} }
} }
...@@ -5051,8 +5058,9 @@ void Heap::ZapFromSpace() { ...@@ -5051,8 +5058,9 @@ void Heap::ZapFromSpace() {
class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
public: public:
IterateAndScavengePromotedObjectsVisitor(Heap* heap, bool record_slots) IterateAndScavengePromotedObjectsVisitor(Heap* heap, Scavenger* scavenger,
: heap_(heap), record_slots_(record_slots) {} bool record_slots)
: heap_(heap), scavenger_(scavenger), record_slots_(record_slots) {}
inline void VisitPointers(HeapObject* host, Object** start, inline void VisitPointers(HeapObject* host, Object** start,
Object** end) override { Object** end) override {
...@@ -5065,8 +5073,8 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { ...@@ -5065,8 +5073,8 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
if (target->IsHeapObject()) { if (target->IsHeapObject()) {
if (heap_->InFromSpace(target)) { if (heap_->InFromSpace(target)) {
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot), scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(slot),
HeapObject::cast(target)); HeapObject::cast(target));
target = *slot; target = *slot;
if (heap_->InNewSpace(target)) { if (heap_->InNewSpace(target)) {
SLOW_DCHECK(heap_->InToSpace(target)); SLOW_DCHECK(heap_->InToSpace(target));
...@@ -5097,11 +5105,13 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { ...@@ -5097,11 +5105,13 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
} }
private: private:
Heap* heap_; Heap* const heap_;
Scavenger* const scavenger_;
bool record_slots_; bool record_slots_;
}; };
void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size) { void Heap::IterateAndScavengePromotedObject(Scavenger* scavenger,
HeapObject* target, int size) {
// We are not collecting slots on new space objects during mutation // We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we // thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black // promote objects. But we should not record any slots in non-black
...@@ -5114,7 +5124,8 @@ void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size) { ...@@ -5114,7 +5124,8 @@ void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size) {
ObjectMarking::IsBlack(target, MarkingState::Internal(target)); ObjectMarking::IsBlack(target, MarkingState::Internal(target));
} }
IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots); IterateAndScavengePromotedObjectsVisitor visitor(this, scavenger,
record_slots);
if (target->IsJSFunction()) { if (target->IsJSFunction()) {
// JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
// this links are recorded during processing of weak lists. // this links are recorded during processing of weak lists.
...@@ -5800,7 +5811,6 @@ bool Heap::SetUp() { ...@@ -5800,7 +5811,6 @@ bool Heap::SetUp() {
} }
tracer_ = new GCTracer(this); tracer_ = new GCTracer(this);
scavenge_collector_ = new Scavenger(this);
mark_compact_collector_ = new MarkCompactCollector(this); mark_compact_collector_ = new MarkCompactCollector(this);
incremental_marking_->set_marking_worklist( incremental_marking_->set_marking_worklist(
mark_compact_collector_->marking_worklist()); mark_compact_collector_->marking_worklist());
...@@ -5949,9 +5959,6 @@ void Heap::TearDown() { ...@@ -5949,9 +5959,6 @@ void Heap::TearDown() {
delete idle_scavenge_observer_; delete idle_scavenge_observer_;
idle_scavenge_observer_ = nullptr; idle_scavenge_observer_ = nullptr;
delete scavenge_collector_;
scavenge_collector_ = nullptr;
if (mark_compact_collector_ != nullptr) { if (mark_compact_collector_ != nullptr) {
mark_compact_collector_->TearDown(); mark_compact_collector_->TearDown();
delete mark_compact_collector_; delete mark_compact_collector_;
......
...@@ -1186,7 +1186,8 @@ class Heap { ...@@ -1186,7 +1186,8 @@ class Heap {
void IterateWeakRoots(RootVisitor* v, VisitMode mode); void IterateWeakRoots(RootVisitor* v, VisitMode mode);
// Iterate pointers of promoted objects. // Iterate pointers of promoted objects.
void IterateAndScavengePromotedObject(HeapObject* target, int size); void IterateAndScavengePromotedObject(Scavenger* scavenger,
HeapObject* target, int size);
// =========================================================================== // ===========================================================================
// Store buffer API. ========================================================= // Store buffer API. =========================================================
...@@ -1839,7 +1840,7 @@ class Heap { ...@@ -1839,7 +1840,7 @@ class Heap {
void Scavenge(); void Scavenge();
void EvacuateYoungGeneration(); void EvacuateYoungGeneration();
Address DoScavenge(Address new_space_front); Address DoScavenge(Scavenger* scavenger, Address new_space_front);
void UpdateNewSpaceReferencesInExternalStringTable( void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func); ExternalStringTableUpdaterCallback updater_func);
...@@ -2305,8 +2306,6 @@ class Heap { ...@@ -2305,8 +2306,6 @@ class Heap {
// Last time a garbage collection happened. // Last time a garbage collection happened.
double last_gc_time_; double last_gc_time_;
Scavenger* scavenge_collector_;
MarkCompactCollector* mark_compact_collector_; MarkCompactCollector* mark_compact_collector_;
MinorMarkCompactCollector* minor_mark_compact_collector_; MinorMarkCompactCollector* minor_mark_compact_collector_;
......
...@@ -203,7 +203,7 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot, ...@@ -203,7 +203,7 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
return; return;
} }
Scavenger::ScavengeObjectSlow(slot, first); EvacuateObject(slot, first_word.ToMap(), first);
object->set_map_word(MapWord::FromForwardingAddress(*slot)); object->set_map_word(MapWord::FromForwardingAddress(*slot));
return; return;
} }
...@@ -213,6 +213,8 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot, ...@@ -213,6 +213,8 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
void Scavenger::EvacuateObject(HeapObject** slot, Map* map, void Scavenger::EvacuateObject(HeapObject** slot, Map* map,
HeapObject* source) { HeapObject* source) {
SLOW_DCHECK(heap_->InFromSpace(source));
SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
int size = source->SizeFromMap(map); int size = source->SizeFromMap(map);
switch (static_cast<VisitorId>(map->visitor_id())) { switch (static_cast<VisitorId>(map->visitor_id())) {
case kVisitThinString: case kVisitThinString:
...@@ -254,16 +256,7 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) { ...@@ -254,16 +256,7 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
// AllocationMementos are unrooted and shouldn't survive a scavenge // AllocationMementos are unrooted and shouldn't survive a scavenge
DCHECK(object->map() != object->GetHeap()->allocation_memento_map()); DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
// Call the slow part of scavenge object. // Call the slow part of scavenge object.
return ScavengeObjectSlow(p, object); EvacuateObject(p, first_word.ToMap(), object);
}
void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
MapWord first_word = object->map_word();
SLOW_DCHECK(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
scavenger->EvacuateObject(p, map, object);
} }
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
...@@ -295,8 +288,8 @@ void ScavengeVisitor::VisitPointers(HeapObject* host, Object** start, ...@@ -295,8 +288,8 @@ void ScavengeVisitor::VisitPointers(HeapObject* host, Object** start,
for (Object** p = start; p < end; p++) { for (Object** p = start; p < end; p++) {
Object* object = *p; Object* object = *p;
if (!heap_->InNewSpace(object)) continue; if (!heap_->InNewSpace(object)) continue;
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object)); reinterpret_cast<HeapObject*>(object));
} }
} }
......
...@@ -4,14 +4,8 @@ ...@@ -4,14 +4,8 @@
#include "src/heap/scavenger.h" #include "src/heap/scavenger.h"
#include "src/contexts.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/scavenger-inl.h" #include "src/heap/scavenger-inl.h"
#include "src/isolate.h"
#include "src/log.h"
#include "src/profiler/heap-profiler.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -30,16 +24,6 @@ void Scavenger::RecordCopiedObject(HeapObject* obj) { ...@@ -30,16 +24,6 @@ void Scavenger::RecordCopiedObject(HeapObject* obj) {
} }
} }
void Scavenger::UpdateConstraints() {
is_logging_ = FLAG_verify_predictable || isolate()->logger()->is_logging() ||
isolate()->is_profiling() ||
(isolate()->heap_profiler() != NULL &&
isolate()->heap_profiler()->is_tracking_object_moves());
is_incremental_marking_ = heap()->incremental_marking()->IsMarking();
}
Isolate* Scavenger::isolate() { return heap()->isolate(); }
void RootScavengeVisitor::VisitRootPointer(Root root, Object** p) { void RootScavengeVisitor::VisitRootPointer(Root root, Object** p) {
ScavengePointer(p); ScavengePointer(p);
} }
...@@ -54,8 +38,8 @@ void RootScavengeVisitor::ScavengePointer(Object** p) { ...@@ -54,8 +38,8 @@ void RootScavengeVisitor::ScavengePointer(Object** p) {
Object* object = *p; Object* object = *p;
if (!heap_->InNewSpace(object)) return; if (!heap_->InNewSpace(object)) return;
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object)); reinterpret_cast<HeapObject*>(object));
} }
} // namespace internal } // namespace internal
......
...@@ -16,31 +16,25 @@ class Scavenger { ...@@ -16,31 +16,25 @@ class Scavenger {
explicit Scavenger(Heap* heap) explicit Scavenger(Heap* heap)
: heap_(heap), is_logging_(false), is_incremental_marking_(false) {} : heap_(heap), is_logging_(false), is_incremental_marking_(false) {}
V8_INLINE void EvacuateObject(HeapObject** slot, Map* map, Scavenger(Heap* heap, bool is_logging, bool is_incremental_marking)
HeapObject* source); : heap_(heap),
is_logging_(is_logging),
is_incremental_marking_(is_incremental_marking) {}
// Callback function passed to Heap::Iterate etc. Copies an object if // Callback function passed to Heap::Iterate etc. Copies an object if
// necessary, the object might be promoted to an old space. The caller must // necessary, the object might be promoted to an old space. The caller must
// ensure the precondition that the object is (a) a heap object and (b) in // ensure the precondition that the object is (a) a heap object and (b) in
// the heap's from space. // the heap's from space.
static inline void ScavengeObject(HeapObject** p, HeapObject* object); inline void ScavengeObject(HeapObject** p, HeapObject* object);
static inline SlotCallbackResult CheckAndScavengeObject(Heap* heap,
Address slot_address);
// Slow part of {ScavengeObject} above.
static inline void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
void UpdateConstraints();
Isolate* isolate(); inline SlotCallbackResult CheckAndScavengeObject(Heap* heap,
Heap* heap() { return heap_; } Address slot_address);
inline Heap* heap() { return heap_; }
private: private:
// White list for objects that for sure only contain data. // White list for objects that for sure only contain data.
V8_INLINE static bool ContainsOnlyData(VisitorId visitor_id); V8_INLINE static bool ContainsOnlyData(VisitorId visitor_id);
void RecordCopiedObject(HeapObject* obj);
V8_INLINE HeapObject* MigrateObject(HeapObject* source, HeapObject* target, V8_INLINE HeapObject* MigrateObject(HeapObject* source, HeapObject* target,
int size); int size);
...@@ -50,19 +44,24 @@ class Scavenger { ...@@ -50,19 +44,24 @@ class Scavenger {
V8_INLINE bool PromoteObject(Map* map, HeapObject** slot, HeapObject* object, V8_INLINE bool PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
int object_size); int object_size);
V8_INLINE void EvacuateObject(HeapObject** slot, Map* map,
HeapObject* source);
// Different cases for object evacuation.
V8_INLINE void EvacuateObjectDefault(Map* map, HeapObject** slot, V8_INLINE void EvacuateObjectDefault(Map* map, HeapObject** slot,
HeapObject* object, int object_size); HeapObject* object, int object_size);
// Special cases.
V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot, V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot,
JSFunction* object, int object_size); JSFunction* object, int object_size);
V8_INLINE void EvacuateThinString(Map* map, HeapObject** slot, inline void EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object, int object_size); ThinString* object, int object_size);
inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size);
V8_INLINE void EvacuateShortcutCandidate(Map* map, HeapObject** slot, void RecordCopiedObject(HeapObject* obj);
ConsString* object, int object_size);
Heap* heap_; Heap* heap_;
bool is_logging_; bool is_logging_;
...@@ -71,27 +70,32 @@ class Scavenger { ...@@ -71,27 +70,32 @@ class Scavenger {
// Helper class for turning the scavenger into an object visitor that is also // Helper class for turning the scavenger into an object visitor that is also
// filtering out non-HeapObjects and objects which do not reside in new space. // filtering out non-HeapObjects and objects which do not reside in new space.
class RootScavengeVisitor : public RootVisitor { class RootScavengeVisitor final : public RootVisitor {
public: public:
explicit RootScavengeVisitor(Heap* heap) : heap_(heap) {} RootScavengeVisitor(Heap* heap, Scavenger* scavenger)
: heap_(heap), scavenger_(scavenger) {}
void VisitRootPointer(Root root, Object** p) override; void VisitRootPointer(Root root, Object** p) final;
void VisitRootPointers(Root root, Object** start, Object** end) override; void VisitRootPointers(Root root, Object** start, Object** end) final;
private: private:
inline void ScavengePointer(Object** p); void ScavengePointer(Object** p);
Heap* heap_; Heap* const heap_;
Scavenger* const scavenger_;
}; };
class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> { class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
public: public:
explicit ScavengeVisitor(Heap* heap) : heap_(heap) {} ScavengeVisitor(Heap* heap, Scavenger* scavenger)
: heap_(heap), scavenger_(scavenger) {}
V8_INLINE void VisitPointers(HeapObject* host, Object** start, V8_INLINE void VisitPointers(HeapObject* host, Object** start,
Object** end) final; Object** end) final;
private: private:
Heap* heap_; Heap* const heap_;
Scavenger* const scavenger_;
}; };
} // namespace internal } // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment