Commit b1eecfd5 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Scavenger fixes

Various Scavenger fixes for smaller issues that accumulated over the
last years.

Bug: chromium:738865
Change-Id: I7573e438eba030944b99c65807944c662526a171
Reviewed-on: https://chromium-review.googlesource.com/567190
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46578}
parent f6f86e74
......@@ -34,11 +34,8 @@ bool ContainsOnlyData(VisitorId visitor_id) {
} // namespace
// Helper function used by CopyObject to copy a source object to an
// allocated target object and update the forwarding pointer in the source
// object. Returns the target object.
HeapObject* Scavenger::MigrateObject(HeapObject* source, HeapObject* target,
int size) {
void Scavenger::MigrateObject(HeapObject* source, HeapObject* target,
int size) {
// If we migrate into to-space, then the to-space top pointer should be
// right after the target object. Incorporate double alignment
// over-allocation.
......@@ -61,7 +58,6 @@ HeapObject* Scavenger::MigrateObject(HeapObject* source, HeapObject* target,
if (is_incremental_marking_) {
heap()->incremental_marking()->TransferColor(source, target);
}
return target;
}
bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
......@@ -71,11 +67,11 @@ bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
AllocationResult allocation =
heap()->new_space()->AllocateRaw(object_size, alignment);
HeapObject* target = NULL; // Initialization to please compiler.
HeapObject* target = nullptr;
if (allocation.To(&target)) {
DCHECK(ObjectMarking::IsWhite(
target, heap()->mark_compact_collector()->marking_state(target)));
MigrateObject(object, target, object_size);
// Update slot to new target.
*slot = target;
copied_list_.Insert(target, object_size);
......@@ -91,18 +87,12 @@ bool Scavenger::PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
AllocationResult allocation =
heap()->old_space()->AllocateRaw(object_size, alignment);
HeapObject* target = NULL; // Initialization to please compiler.
HeapObject* target = nullptr;
if (allocation.To(&target)) {
DCHECK(ObjectMarking::IsWhite(
target, heap()->mark_compact_collector()->marking_state(target)));
MigrateObject(object, target, object_size);
// Update slot to new target using CAS. A concurrent sweeper thread my
// filter the slot concurrently.
HeapObject* old = *slot;
base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot),
reinterpret_cast<base::AtomicWord>(old),
reinterpret_cast<base::AtomicWord>(target));
*slot = target;
if (!ContainsOnlyData(static_cast<VisitorId>(map->visitor_id()))) {
promotion_list_.Push(ObjectAndSize(target, object_size));
......@@ -248,10 +238,11 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
heap()->UpdateAllocationSite<Heap::kCached>(object,
&local_pretenuring_feedback_);
Map* map = first_word.ToMap();
// AllocationMementos are unrooted and shouldn't survive a scavenge
DCHECK_NE(heap()->allocation_memento_map(), object->map());
DCHECK_NE(heap()->allocation_memento_map(), map);
// Call the slow part of scavenge object.
EvacuateObject(p, first_word.ToMap(), object);
EvacuateObject(p, map, object);
}
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
......
......@@ -21,10 +21,9 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
inline void VisitPointers(HeapObject* host, Object** start,
Object** end) final {
Address slot_address = reinterpret_cast<Address>(start);
Page* page = Page::FromAddress(slot_address);
while (slot_address < reinterpret_cast<Address>(end)) {
for (Address slot_address = reinterpret_cast<Address>(start);
slot_address < reinterpret_cast<Address>(end);
slot_address += kPointerSize) {
Object** slot = reinterpret_cast<Object**>(slot_address);
Object* target = *slot;
......@@ -36,7 +35,8 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
if (heap_->InNewSpace(target)) {
SLOW_DCHECK(target->IsHeapObject());
SLOW_DCHECK(heap_->InToSpace(target));
RememberedSet<OLD_TO_NEW>::Insert(page, slot_address);
RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot_address),
slot_address);
}
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
HeapObject::cast(target)));
......@@ -46,8 +46,6 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
heap_->mark_compact_collector()->RecordSlot(host, slot, target);
}
}
slot_address += kPointerSize;
}
}
......@@ -63,7 +61,7 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
private:
Heap* const heap_;
Scavenger* const scavenger_;
bool record_slots_;
const bool record_slots_;
};
void Scavenger::IterateAndScavengePromotedObject(HeapObject* target, int size) {
......
......@@ -100,8 +100,9 @@ class Scavenger {
inline Heap* heap() { return heap_; }
V8_INLINE HeapObject* MigrateObject(HeapObject* source, HeapObject* target,
int size);
// Copies |source| to |target| and sets the forwarding pointer in |source|.
V8_INLINE void MigrateObject(HeapObject* source, HeapObject* target,
int size);
V8_INLINE bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
HeapObject* object, int object_size);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment