Commit 64329a4b authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Refactor Scavenger

- Avoid static methods.
- Instantiate the actual visitor on the stack.
- Get rid of unnecessary abstractions.

Bug: chromium:738865
Change-Id: I4115d7b88f17a7118aa9ee129eb39a28ec413696
Reviewed-on: https://chromium-review.googlesource.com/558878
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46384}
parent 48dfb607
......@@ -134,7 +134,6 @@ Heap::Heap()
maximum_size_scavenges_(0),
last_idle_notification_time_(0.0),
last_gc_time_(0.0),
scavenge_collector_(nullptr),
mark_compact_collector_(nullptr),
minor_mark_compact_collector_(nullptr),
memory_allocator_(nullptr),
......@@ -1742,6 +1741,13 @@ void Heap::EvacuateYoungGeneration() {
SetGCState(NOT_IN_GC);
}
static bool IsLogging(Isolate* isolate) {
return FLAG_verify_predictable || isolate->logger()->is_logging() ||
isolate->is_profiling() ||
(isolate->heap_profiler() != nullptr &&
isolate->heap_profiler()->is_tracking_object_moves());
}
void Heap::Scavenge() {
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE);
base::LockGuard<base::Mutex> guard(relocation_mutex());
......@@ -1767,8 +1773,6 @@ void Heap::Scavenge() {
// Used for updating survived_since_last_expansion_ at function end.
size_t survived_watermark = PromotedSpaceSizeOfObjects();
scavenge_collector_->UpdateConstraints();
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
new_space_->Flip();
......@@ -1794,7 +1798,9 @@ void Heap::Scavenge() {
Address new_space_front = new_space_->ToSpaceStart();
promotion_queue_.Initialize();
RootScavengeVisitor root_scavenge_visitor(this);
Scavenger scavenger(this, IsLogging(isolate()),
incremental_marking()->IsMarking());
RootScavengeVisitor root_scavenge_visitor(this, &scavenger);
isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
&JSObject::IsUnmodifiedApiObject);
......@@ -1809,19 +1815,19 @@ void Heap::Scavenge() {
// Copy objects reachable from the old generation.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS);
RememberedSet<OLD_TO_NEW>::Iterate(
this, SYNCHRONIZED, [this](Address addr) {
return Scavenger::CheckAndScavengeObject(this, addr);
this, SYNCHRONIZED, [this, &scavenger](Address addr) {
return scavenger.CheckAndScavengeObject(this, addr);
});
RememberedSet<OLD_TO_NEW>::IterateTyped(
this, SYNCHRONIZED,
[this](SlotType type, Address host_addr, Address addr) {
[this, &scavenger](SlotType type, Address host_addr, Address addr) {
return UpdateTypedSlotHelper::UpdateTypedSlot(
isolate(), type, addr, [this](Object** addr) {
isolate(), type, addr, [this, &scavenger](Object** addr) {
// We expect that objects referenced by code are long living.
// If we do not force promotion, then we need to clear
// old_to_new slots in dead code objects after mark-compact.
return Scavenger::CheckAndScavengeObject(
return scavenger.CheckAndScavengeObject(
this, reinterpret_cast<Address>(addr));
});
});
......@@ -1834,7 +1840,7 @@ void Heap::Scavenge() {
{
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE);
new_space_front = DoScavenge(new_space_front);
new_space_front = DoScavenge(&scavenger, new_space_front);
}
isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
......@@ -1842,7 +1848,7 @@ void Heap::Scavenge() {
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&root_scavenge_visitor);
new_space_front = DoScavenge(new_space_front);
new_space_front = DoScavenge(&scavenger, new_space_front);
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
......@@ -2050,8 +2056,8 @@ void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
external_string_table_.IterateAll(&external_string_table_visitor);
}
Address Heap::DoScavenge(Address new_space_front) {
ScavengeVisitor scavenge_visitor(this);
Address Heap::DoScavenge(Scavenger* scavenger, Address new_space_front) {
ScavengeVisitor scavenge_visitor(this, scavenger);
do {
SemiSpace::AssertValidRange(new_space_front, new_space_->top());
// The addresses new_space_front and new_space_.top() define a
......@@ -2081,7 +2087,8 @@ Address Heap::DoScavenge(Address new_space_front) {
// to new space.
DCHECK(!target->IsMap());
IterateAndScavengePromotedObject(target, static_cast<int>(size));
IterateAndScavengePromotedObject(scavenger, target,
static_cast<int>(size));
}
}
......@@ -5051,8 +5058,9 @@ void Heap::ZapFromSpace() {
class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
public:
IterateAndScavengePromotedObjectsVisitor(Heap* heap, bool record_slots)
: heap_(heap), record_slots_(record_slots) {}
IterateAndScavengePromotedObjectsVisitor(Heap* heap, Scavenger* scavenger,
bool record_slots)
: heap_(heap), scavenger_(scavenger), record_slots_(record_slots) {}
inline void VisitPointers(HeapObject* host, Object** start,
Object** end) override {
......@@ -5065,8 +5073,8 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
if (target->IsHeapObject()) {
if (heap_->InFromSpace(target)) {
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot),
HeapObject::cast(target));
scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(slot),
HeapObject::cast(target));
target = *slot;
if (heap_->InNewSpace(target)) {
SLOW_DCHECK(heap_->InToSpace(target));
......@@ -5097,11 +5105,13 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
}
private:
Heap* heap_;
Heap* const heap_;
Scavenger* const scavenger_;
bool record_slots_;
};
void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size) {
void Heap::IterateAndScavengePromotedObject(Scavenger* scavenger,
HeapObject* target, int size) {
// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
......@@ -5114,7 +5124,8 @@ void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size) {
ObjectMarking::IsBlack(target, MarkingState::Internal(target));
}
IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots);
IterateAndScavengePromotedObjectsVisitor visitor(this, scavenger,
record_slots);
if (target->IsJSFunction()) {
// JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
// this links are recorded during processing of weak lists.
......@@ -5800,7 +5811,6 @@ bool Heap::SetUp() {
}
tracer_ = new GCTracer(this);
scavenge_collector_ = new Scavenger(this);
mark_compact_collector_ = new MarkCompactCollector(this);
incremental_marking_->set_marking_worklist(
mark_compact_collector_->marking_worklist());
......@@ -5949,9 +5959,6 @@ void Heap::TearDown() {
delete idle_scavenge_observer_;
idle_scavenge_observer_ = nullptr;
delete scavenge_collector_;
scavenge_collector_ = nullptr;
if (mark_compact_collector_ != nullptr) {
mark_compact_collector_->TearDown();
delete mark_compact_collector_;
......
......@@ -1186,7 +1186,8 @@ class Heap {
void IterateWeakRoots(RootVisitor* v, VisitMode mode);
// Iterate pointers of promoted objects.
void IterateAndScavengePromotedObject(HeapObject* target, int size);
void IterateAndScavengePromotedObject(Scavenger* scavenger,
HeapObject* target, int size);
// ===========================================================================
// Store buffer API. =========================================================
......@@ -1839,7 +1840,7 @@ class Heap {
void Scavenge();
void EvacuateYoungGeneration();
Address DoScavenge(Address new_space_front);
Address DoScavenge(Scavenger* scavenger, Address new_space_front);
void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
......@@ -2305,8 +2306,6 @@ class Heap {
// Last time a garbage collection happened.
double last_gc_time_;
Scavenger* scavenge_collector_;
MarkCompactCollector* mark_compact_collector_;
MinorMarkCompactCollector* minor_mark_compact_collector_;
......
......@@ -203,7 +203,7 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
return;
}
Scavenger::ScavengeObjectSlow(slot, first);
EvacuateObject(slot, first_word.ToMap(), first);
object->set_map_word(MapWord::FromForwardingAddress(*slot));
return;
}
......@@ -213,6 +213,8 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
void Scavenger::EvacuateObject(HeapObject** slot, Map* map,
HeapObject* source) {
SLOW_DCHECK(heap_->InFromSpace(source));
SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
int size = source->SizeFromMap(map);
switch (static_cast<VisitorId>(map->visitor_id())) {
case kVisitThinString:
......@@ -254,16 +256,7 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
// AllocationMementos are unrooted and shouldn't survive a scavenge
DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
// Call the slow part of scavenge object.
return ScavengeObjectSlow(p, object);
}
void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
MapWord first_word = object->map_word();
SLOW_DCHECK(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
scavenger->EvacuateObject(p, map, object);
EvacuateObject(p, first_word.ToMap(), object);
}
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
......@@ -295,8 +288,8 @@ void ScavengeVisitor::VisitPointers(HeapObject* host, Object** start,
for (Object** p = start; p < end; p++) {
Object* object = *p;
if (!heap_->InNewSpace(object)) continue;
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object));
scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object));
}
}
......
......@@ -4,14 +4,8 @@
#include "src/heap/scavenger.h"
#include "src/contexts.h"
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/scavenger-inl.h"
#include "src/isolate.h"
#include "src/log.h"
#include "src/profiler/heap-profiler.h"
namespace v8 {
namespace internal {
......@@ -30,16 +24,6 @@ void Scavenger::RecordCopiedObject(HeapObject* obj) {
}
}
void Scavenger::UpdateConstraints() {
is_logging_ = FLAG_verify_predictable || isolate()->logger()->is_logging() ||
isolate()->is_profiling() ||
(isolate()->heap_profiler() != NULL &&
isolate()->heap_profiler()->is_tracking_object_moves());
is_incremental_marking_ = heap()->incremental_marking()->IsMarking();
}
Isolate* Scavenger::isolate() { return heap()->isolate(); }
void RootScavengeVisitor::VisitRootPointer(Root root, Object** p) {
ScavengePointer(p);
}
......@@ -54,8 +38,8 @@ void RootScavengeVisitor::ScavengePointer(Object** p) {
Object* object = *p;
if (!heap_->InNewSpace(object)) return;
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object));
scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object));
}
} // namespace internal
......
......@@ -16,31 +16,25 @@ class Scavenger {
explicit Scavenger(Heap* heap)
: heap_(heap), is_logging_(false), is_incremental_marking_(false) {}
V8_INLINE void EvacuateObject(HeapObject** slot, Map* map,
HeapObject* source);
Scavenger(Heap* heap, bool is_logging, bool is_incremental_marking)
: heap_(heap),
is_logging_(is_logging),
is_incremental_marking_(is_incremental_marking) {}
// Callback function passed to Heap::Iterate etc. Copies an object if
// necessary, the object might be promoted to an old space. The caller must
// ensure the precondition that the object is (a) a heap object and (b) in
// the heap's from space.
static inline void ScavengeObject(HeapObject** p, HeapObject* object);
static inline SlotCallbackResult CheckAndScavengeObject(Heap* heap,
Address slot_address);
// Slow part of {ScavengeObject} above.
static inline void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
void UpdateConstraints();
inline void ScavengeObject(HeapObject** p, HeapObject* object);
Isolate* isolate();
Heap* heap() { return heap_; }
inline SlotCallbackResult CheckAndScavengeObject(Heap* heap,
Address slot_address);
inline Heap* heap() { return heap_; }
private:
// White list for objects that for sure only contain data.
V8_INLINE static bool ContainsOnlyData(VisitorId visitor_id);
void RecordCopiedObject(HeapObject* obj);
V8_INLINE HeapObject* MigrateObject(HeapObject* source, HeapObject* target,
int size);
......@@ -50,19 +44,24 @@ class Scavenger {
V8_INLINE bool PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
int object_size);
V8_INLINE void EvacuateObject(HeapObject** slot, Map* map,
HeapObject* source);
// Different cases for object evacuation.
V8_INLINE void EvacuateObjectDefault(Map* map, HeapObject** slot,
HeapObject* object, int object_size);
// Special cases.
V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot,
JSFunction* object, int object_size);
V8_INLINE void EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object, int object_size);
inline void EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object, int object_size);
inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size);
V8_INLINE void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size);
void RecordCopiedObject(HeapObject* obj);
Heap* heap_;
bool is_logging_;
......@@ -71,27 +70,32 @@ class Scavenger {
// Helper class for turning the scavenger into an object visitor that is also
// filtering out non-HeapObjects and objects which do not reside in new space.
class RootScavengeVisitor : public RootVisitor {
class RootScavengeVisitor final : public RootVisitor {
public:
explicit RootScavengeVisitor(Heap* heap) : heap_(heap) {}
RootScavengeVisitor(Heap* heap, Scavenger* scavenger)
: heap_(heap), scavenger_(scavenger) {}
void VisitRootPointer(Root root, Object** p) override;
void VisitRootPointers(Root root, Object** start, Object** end) override;
void VisitRootPointer(Root root, Object** p) final;
void VisitRootPointers(Root root, Object** start, Object** end) final;
private:
inline void ScavengePointer(Object** p);
void ScavengePointer(Object** p);
Heap* heap_;
Heap* const heap_;
Scavenger* const scavenger_;
};
class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
public:
explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
ScavengeVisitor(Heap* heap, Scavenger* scavenger)
: heap_(heap), scavenger_(scavenger) {}
V8_INLINE void VisitPointers(HeapObject* host, Object** start,
Object** end) final;
private:
Heap* heap_;
Heap* const heap_;
Scavenger* const scavenger_;
};
} // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment