Commit 2241b247 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Move scavenging logic to Scavenger

Bug: chromium:738865
Change-Id: I93721f535ecf8518bf0355e62d5848147460abc8
Reviewed-on: https://chromium-review.googlesource.com/565198Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46519}
parent 11484e7e
......@@ -1772,7 +1772,7 @@ void Heap::Scavenge() {
{
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE);
DoScavenge(&scavenger);
scavenger.Process();
}
isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
......@@ -1780,7 +1780,7 @@ void Heap::Scavenge() {
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&root_scavenge_visitor);
DoScavenge(&scavenger);
scavenger.Process();
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
......@@ -1984,41 +1984,6 @@ void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
external_string_table_.IterateAll(&external_string_table_visitor);
}
void Heap::DoScavenge(Scavenger* scavenger) {
// Threshold when to switch processing the promotion list to avoid
// allocating too much backing store in the worklist.
const int kProcessPromotionListThreshold = kPromotionListSegmentSize / 2;
ScavengeVisitor scavenge_visitor(this, scavenger);
PromotionList::View* promotion_list = scavenger->promotion_list();
CopiedRangesList* copied_list = scavenger->copied_list();
bool done;
do {
done = true;
AddressRange range;
while ((promotion_list->LocalPushSegmentSize() <
kProcessPromotionListThreshold) &&
copied_list->Pop(&range)) {
for (Address current = range.first; current < range.second;) {
HeapObject* object = HeapObject::FromAddress(current);
int size = object->Size();
scavenge_visitor.Visit(object);
current += size;
}
done = false;
}
ObjectAndSize object_and_size;
while (promotion_list->Pop(&object_and_size)) {
HeapObject* target = object_and_size.first;
int size = object_and_size.second;
DCHECK(!target->IsMap());
IterateAndScavengePromotedObject(scavenger, target, size);
done = false;
}
} while (!done);
}
STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) ==
0); // NOLINT
STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) ==
......@@ -4979,85 +4944,6 @@ void Heap::ZapFromSpace() {
}
}
class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
public:
IterateAndScavengePromotedObjectsVisitor(Heap* heap, Scavenger* scavenger,
bool record_slots)
: heap_(heap), scavenger_(scavenger), record_slots_(record_slots) {}
inline void VisitPointers(HeapObject* host, Object** start,
Object** end) override {
Address slot_address = reinterpret_cast<Address>(start);
Page* page = Page::FromAddress(slot_address);
while (slot_address < reinterpret_cast<Address>(end)) {
Object** slot = reinterpret_cast<Object**>(slot_address);
Object* target = *slot;
if (target->IsHeapObject()) {
if (heap_->InFromSpace(target)) {
scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(slot),
HeapObject::cast(target));
target = *slot;
if (heap_->InNewSpace(target)) {
SLOW_DCHECK(heap_->InToSpace(target));
SLOW_DCHECK(target->IsHeapObject());
RememberedSet<OLD_TO_NEW>::Insert(page, slot_address);
}
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
HeapObject::cast(target)));
} else if (record_slots_ &&
MarkCompactCollector::IsOnEvacuationCandidate(
HeapObject::cast(target))) {
heap_->mark_compact_collector()->RecordSlot(host, slot, target);
}
}
slot_address += kPointerSize;
}
}
inline void VisitCodeEntry(JSFunction* host,
Address code_entry_slot) override {
// Black allocation requires us to process objects referenced by
// promoted objects.
if (heap_->incremental_marking()->black_allocation()) {
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
heap_->incremental_marking()->WhiteToGreyAndPush(code);
}
}
private:
Heap* const heap_;
Scavenger* const scavenger_;
bool record_slots_;
};
void Heap::IterateAndScavengePromotedObject(Scavenger* scavenger,
HeapObject* target, int size) {
// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
// objects. Grey object's slots would be rescanned.
// White object might not survive until the end of collection
// it would be a violation of the invariant to record it's slots.
bool record_slots = false;
if (incremental_marking()->IsCompacting()) {
record_slots =
ObjectMarking::IsBlack(target, MarkingState::Internal(target));
}
IterateAndScavengePromotedObjectsVisitor visitor(this, scavenger,
record_slots);
if (target->IsJSFunction()) {
// JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
// this links are recorded during processing of weak lists.
JSFunction::BodyDescriptorWeak::IterateBody(target, size, &visitor);
} else {
target->IterateBody(target->map()->instance_type(), size, &visitor);
}
}
void Heap::IterateRoots(RootVisitor* v, VisitMode mode) {
IterateStrongRoots(v, mode);
IterateWeakRoots(v, mode);
......
......@@ -1128,10 +1128,6 @@ class Heap {
// Iterates over all the other roots in the heap.
void IterateWeakRoots(RootVisitor* v, VisitMode mode);
// Iterate pointers of promoted objects.
void IterateAndScavengePromotedObject(Scavenger* scavenger,
HeapObject* target, int size);
// ===========================================================================
// Store buffer API. =========================================================
// ===========================================================================
......@@ -1783,8 +1779,6 @@ class Heap {
void Scavenge();
void EvacuateYoungGeneration();
void DoScavenge(Scavenger* scavenger);
void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
......
......@@ -78,7 +78,7 @@ bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
// Update slot to new target.
*slot = target;
copied_list()->Insert(target, object_size);
copied_list_.Insert(target, object_size);
heap()->IncrementSemiSpaceCopiedObjectSize(object_size);
return true;
}
......@@ -105,7 +105,7 @@ bool Scavenger::PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
reinterpret_cast<base::AtomicWord>(target));
if (!ContainsOnlyData(static_cast<VisitorId>(map->visitor_id()))) {
promotion_list()->Push(ObjectAndSize(target, object_size));
promotion_list_.Push(ObjectAndSize(target, object_size));
}
heap()->IncrementPromotedObjectsSize(object_size);
return true;
......@@ -167,7 +167,7 @@ void Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
*slot = actual;
// ThinStrings always refer to internalized strings, which are
// always in old space.
DCHECK(!map->GetHeap()->InNewSpace(actual));
DCHECK(!heap()->InNewSpace(actual));
object->set_map_word(MapWord::FromForwardingAddress(actual));
return;
}
......@@ -228,7 +228,7 @@ void Scavenger::EvacuateObject(HeapObject** slot, Map* map,
}
void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
DCHECK(heap()->InFromSpace(object));
// We use the first word (where the map pointer usually is) of a heap
// object to record the forwarding pointer. A forwarding pointer can
......@@ -245,11 +245,11 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
return;
}
object->GetHeap()->UpdateAllocationSite<Heap::kGlobal>(
object, object->GetHeap()->global_pretenuring_feedback_);
heap()->UpdateAllocationSite<Heap::kGlobal>(
object, heap()->global_pretenuring_feedback_);
// AllocationMementos are unrooted and shouldn't survive a scavenge
DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
DCHECK_NE(heap()->allocation_memento_map(), object->map());
// Call the slow part of scavenge object.
EvacuateObject(p, first_word.ToMap(), object);
}
......
......@@ -5,11 +5,119 @@
#include "src/heap/scavenger.h"
#include "src/heap/heap-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/scavenger-inl.h"
#include "src/objects-body-descriptors-inl.h"
namespace v8 {
namespace internal {
class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
public:
IterateAndScavengePromotedObjectsVisitor(Heap* heap, Scavenger* scavenger,
bool record_slots)
: heap_(heap), scavenger_(scavenger), record_slots_(record_slots) {}
inline void VisitPointers(HeapObject* host, Object** start,
Object** end) final {
Address slot_address = reinterpret_cast<Address>(start);
Page* page = Page::FromAddress(slot_address);
while (slot_address < reinterpret_cast<Address>(end)) {
Object** slot = reinterpret_cast<Object**>(slot_address);
Object* target = *slot;
if (target->IsHeapObject()) {
if (heap_->InFromSpace(target)) {
scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(slot),
HeapObject::cast(target));
target = *slot;
if (heap_->InNewSpace(target)) {
SLOW_DCHECK(target->IsHeapObject());
SLOW_DCHECK(heap_->InToSpace(target));
RememberedSet<OLD_TO_NEW>::Insert(page, slot_address);
}
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
HeapObject::cast(target)));
} else if (record_slots_ &&
MarkCompactCollector::IsOnEvacuationCandidate(
HeapObject::cast(target))) {
heap_->mark_compact_collector()->RecordSlot(host, slot, target);
}
}
slot_address += kPointerSize;
}
}
inline void VisitCodeEntry(JSFunction* host, Address code_entry_slot) final {
// Black allocation requires us to process objects referenced by
// promoted objects.
if (heap_->incremental_marking()->black_allocation()) {
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
heap_->incremental_marking()->WhiteToGreyAndPush(code);
}
}
private:
Heap* const heap_;
Scavenger* const scavenger_;
bool record_slots_;
};
void Scavenger::IterateAndScavengePromotedObject(HeapObject* target, int size) {
// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
// objects. Grey object's slots would be rescanned.
// White object might not survive until the end of collection
// it would be a violation of the invariant to record it's slots.
const bool record_slots =
heap()->incremental_marking()->IsCompacting() &&
ObjectMarking::IsBlack(target, MarkingState::Internal(target));
IterateAndScavengePromotedObjectsVisitor visitor(heap(), this, record_slots);
if (target->IsJSFunction()) {
// JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
// this links are recorded during processing of weak lists.
JSFunction::BodyDescriptorWeak::IterateBody(target, size, &visitor);
} else {
target->IterateBody(target->map()->instance_type(), size, &visitor);
}
}
void Scavenger::Process() {
// Threshold when to switch processing the promotion list to avoid
// allocating too much backing store in the worklist.
const int kProcessPromotionListThreshold = kPromotionListSegmentSize / 2;
ScavengeVisitor scavenge_visitor(heap(), this);
bool done;
do {
done = true;
AddressRange range;
while ((promotion_list_.LocalPushSegmentSize() <
kProcessPromotionListThreshold) &&
copied_list_.Pop(&range)) {
for (Address current = range.first; current < range.second;) {
HeapObject* object = HeapObject::FromAddress(current);
int size = object->Size();
scavenge_visitor.Visit(object);
current += size;
}
done = false;
}
ObjectAndSize object_and_size;
while (promotion_list_.Pop(&object_and_size)) {
HeapObject* target = object_and_size.first;
int size = object_and_size.second;
DCHECK(!target->IsMap());
IterateAndScavengePromotedObject(target, size);
done = false;
}
} while (!done);
}
void Scavenger::RecordCopiedObject(HeapObject* obj) {
bool should_record = FLAG_log_gc;
#ifdef DEBUG
......
......@@ -76,19 +76,22 @@ class Scavenger {
is_logging_(is_logging),
is_incremental_marking_(is_incremental_marking) {}
// Callback function passed to Heap::Iterate etc. Copies an object if
// necessary, the object might be promoted to an old space. The caller must
// ensure the precondition that the object is (a) a heap object and (b) in
// the heap's from space.
// Scavenges an object |object| referenced from slot |p|. |object| is required
// to be in from space.
inline void ScavengeObject(HeapObject** p, HeapObject* object);
// Potentially scavenges an object referenced from |slot_address| if it is
// indeed a HeapObject and resides in from space.
inline SlotCallbackResult CheckAndScavengeObject(Heap* heap,
Address slot_address);
inline Heap* heap() { return heap_; }
inline PromotionList::View* promotion_list() { return &promotion_list_; }
inline CopiedRangesList* copied_list() { return &copied_list_; }
// Processes remaining work (=objects) after single objects have been
// manually scavenged using ScavengeObject or CheckAndScavengeObject.
void Process();
private:
inline Heap* heap() { return heap_; }
V8_INLINE HeapObject* MigrateObject(HeapObject* source, HeapObject* target,
int size);
......@@ -115,6 +118,8 @@ class Scavenger {
inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size);
void IterateAndScavengePromotedObject(HeapObject* target, int size);
void RecordCopiedObject(HeapObject* obj);
Heap* const heap_;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment