Commit 5876d8f5 authored by Michael Achenbach's avatar Michael Achenbach Committed by Commit Bot

Revert "[heap] Reuse object evacuation information for slot recording in Scavenger."

This reverts commit adea021b.

Reason for revert:
https://crbug.com/v8/8087

Original change's description:
> [heap] Reuse object evacuation information for slot recording in Scavenger.
> 
> Bug: chromium:852420
> Change-Id: If092b5c8b093b313807687a27bc29bebd1c4ee5f
> Reviewed-on: https://chromium-review.googlesource.com/1187143
> Commit-Queue: Hannes Payer <hpayer@chromium.org>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#55364}

TBR=ulan@chromium.org,hpayer@chromium.org

Change-Id: I480328e852c09f460a38e141eb7d2960c5001d35
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: chromium:852420
Reviewed-on: https://chromium-review.googlesource.com/1187301Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55367}
parent eb1eaf63
...@@ -125,33 +125,29 @@ bool Scavenger::PromoteObject(Map* map, HeapObjectReference** slot, ...@@ -125,33 +125,29 @@ bool Scavenger::PromoteObject(Map* map, HeapObjectReference** slot,
return false; return false;
} }
SlotCallbackResult Scavenger::EvacuateObjectDefault(Map* map, void Scavenger::EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
HeapObjectReference** slot, HeapObject* object, int object_size) {
HeapObject* object,
int object_size) {
SLOW_DCHECK(object_size <= Page::kAllocatableMemory); SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
SLOW_DCHECK(object->SizeFromMap(map) == object_size); SLOW_DCHECK(object->SizeFromMap(map) == object_size);
if (!heap()->ShouldBePromoted(object->address())) { if (!heap()->ShouldBePromoted(object->address())) {
// A semi-space copy may fail due to fragmentation. In that case, we // A semi-space copy may fail due to fragmentation. In that case, we
// try to promote the object. // try to promote the object.
if (SemiSpaceCopyObject(map, slot, object, object_size)) return KEEP_SLOT; if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
} }
if (PromoteObject(map, slot, object, object_size)) return REMOVE_SLOT; if (PromoteObject(map, slot, object, object_size)) return;
// If promotion failed, we try to copy the object to the other semi-space // If promotion failed, we try to copy the object to the other semi-space
if (SemiSpaceCopyObject(map, slot, object, object_size)) return KEEP_SLOT; if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
heap()->FatalProcessOutOfMemory("Scavenger: semi-space copy"); heap()->FatalProcessOutOfMemory("Scavenger: semi-space copy");
UNREACHABLE();
} }
SlotCallbackResult Scavenger::EvacuateThinString(Map* map, HeapObject** slot, void Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object, ThinString* object, int object_size) {
int object_size) {
if (!is_incremental_marking_) { if (!is_incremental_marking_) {
// Loading actual is fine in a parallel setting since there is no write. // Loading actual is fine in a parallel setting is there is no write.
String* actual = object->actual(); String* actual = object->actual();
object->set_length(0); object->set_length(0);
*slot = actual; *slot = actual;
...@@ -161,17 +157,15 @@ SlotCallbackResult Scavenger::EvacuateThinString(Map* map, HeapObject** slot, ...@@ -161,17 +157,15 @@ SlotCallbackResult Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
base::AsAtomicPointer::Relaxed_Store( base::AsAtomicPointer::Relaxed_Store(
reinterpret_cast<Map**>(object->address()), reinterpret_cast<Map**>(object->address()),
MapWord::FromForwardingAddress(actual).ToMap()); MapWord::FromForwardingAddress(actual).ToMap());
return REMOVE_SLOT; return;
} }
return EvacuateObjectDefault( EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
map, reinterpret_cast<HeapObjectReference**>(slot), object, object_size); object, object_size);
} }
SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map* map, void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
HeapObject** slot, ConsString* object, int object_size) {
ConsString* object,
int object_size) {
DCHECK(IsShortcutCandidate(map->instance_type())); DCHECK(IsShortcutCandidate(map->instance_type()));
if (!is_incremental_marking_ && if (!is_incremental_marking_ &&
object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) { object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
...@@ -183,10 +177,10 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map* map, ...@@ -183,10 +177,10 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map* map,
base::AsAtomicPointer::Relaxed_Store( base::AsAtomicPointer::Relaxed_Store(
reinterpret_cast<Map**>(object->address()), reinterpret_cast<Map**>(object->address()),
MapWord::FromForwardingAddress(first).ToMap()); MapWord::FromForwardingAddress(first).ToMap());
return REMOVE_SLOT; return;
} }
MapWord first_word = first->synchronized_map_word(); MapWord first_word = first->map_word();
if (first_word.IsForwardingAddress()) { if (first_word.IsForwardingAddress()) {
HeapObject* target = first_word.ToForwardingAddress(); HeapObject* target = first_word.ToForwardingAddress();
...@@ -194,24 +188,23 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map* map, ...@@ -194,24 +188,23 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map* map,
base::AsAtomicPointer::Relaxed_Store( base::AsAtomicPointer::Relaxed_Store(
reinterpret_cast<Map**>(object->address()), reinterpret_cast<Map**>(object->address()),
MapWord::FromForwardingAddress(target).ToMap()); MapWord::FromForwardingAddress(target).ToMap());
return Heap::InToSpace(target) ? KEEP_SLOT : REMOVE_SLOT; return;
} }
Map* map = first_word.ToMap(); Map* map = first_word.ToMap();
SlotCallbackResult result = EvacuateObjectDefault( EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
map, reinterpret_cast<HeapObjectReference**>(slot), first, first, first->SizeFromMap(map));
first->SizeFromMap(map));
base::AsAtomicPointer::Relaxed_Store( base::AsAtomicPointer::Relaxed_Store(
reinterpret_cast<Map**>(object->address()), reinterpret_cast<Map**>(object->address()),
MapWord::FromForwardingAddress(*slot).ToMap()); MapWord::FromForwardingAddress(*slot).ToMap());
return result; return;
} }
return EvacuateObjectDefault( EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
map, reinterpret_cast<HeapObjectReference**>(slot), object, object_size); object, object_size);
} }
SlotCallbackResult Scavenger::EvacuateObject(HeapObjectReference** slot, void Scavenger::EvacuateObject(HeapObjectReference** slot, Map* map,
Map* map, HeapObject* source) { HeapObject* source) {
SLOW_DCHECK(Heap::InFromSpace(source)); SLOW_DCHECK(Heap::InFromSpace(source));
SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress()); SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
int size = source->SizeFromMap(map); int size = source->SizeFromMap(map);
...@@ -221,21 +214,22 @@ SlotCallbackResult Scavenger::EvacuateObject(HeapObjectReference** slot, ...@@ -221,21 +214,22 @@ SlotCallbackResult Scavenger::EvacuateObject(HeapObjectReference** slot,
case kVisitThinString: case kVisitThinString:
// At the moment we don't allow weak pointers to thin strings. // At the moment we don't allow weak pointers to thin strings.
DCHECK(!(*slot)->IsWeakHeapObject()); DCHECK(!(*slot)->IsWeakHeapObject());
return EvacuateThinString(map, reinterpret_cast<HeapObject**>(slot), EvacuateThinString(map, reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<ThinString*>(source), size); reinterpret_cast<ThinString*>(source), size);
break;
case kVisitShortcutCandidate: case kVisitShortcutCandidate:
DCHECK(!(*slot)->IsWeakHeapObject()); DCHECK(!(*slot)->IsWeakHeapObject());
// At the moment we don't allow weak pointers to cons strings. // At the moment we don't allow weak pointers to cons strings.
return EvacuateShortcutCandidate( EvacuateShortcutCandidate(map, reinterpret_cast<HeapObject**>(slot),
map, reinterpret_cast<HeapObject**>(slot), reinterpret_cast<ConsString*>(source), size);
reinterpret_cast<ConsString*>(source), size); break;
default: default:
return EvacuateObjectDefault(map, slot, source, size); EvacuateObjectDefault(map, slot, source, size);
break;
} }
} }
SlotCallbackResult Scavenger::ScavengeObject(HeapObjectReference** p, void Scavenger::ScavengeObject(HeapObjectReference** p, HeapObject* object) {
HeapObject* object) {
DCHECK(Heap::InFromSpace(object)); DCHECK(Heap::InFromSpace(object));
// Synchronized load that consumes the publishing CAS of MigrateObject. // Synchronized load that consumes the publishing CAS of MigrateObject.
...@@ -252,14 +246,14 @@ SlotCallbackResult Scavenger::ScavengeObject(HeapObjectReference** p, ...@@ -252,14 +246,14 @@ SlotCallbackResult Scavenger::ScavengeObject(HeapObjectReference** p,
DCHECK((*p)->IsStrongHeapObject()); DCHECK((*p)->IsStrongHeapObject());
*p = HeapObjectReference::Strong(dest); *p = HeapObjectReference::Strong(dest);
} }
return Heap::InToSpace(dest) ? KEEP_SLOT : REMOVE_SLOT; return;
} }
Map* map = first_word.ToMap(); Map* map = first_word.ToMap();
// AllocationMementos are unrooted and shouldn't survive a scavenge // AllocationMementos are unrooted and shouldn't survive a scavenge
DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map); DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map);
// Call the slow part of scavenge object. // Call the slow part of scavenge object.
return EvacuateObject(p, map, object); EvacuateObject(p, map, object);
} }
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
...@@ -273,8 +267,17 @@ SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, ...@@ -273,8 +267,17 @@ SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
DCHECK(success); DCHECK(success);
DCHECK(heap_object->IsHeapObject()); DCHECK(heap_object->IsHeapObject());
return ScavengeObject(reinterpret_cast<HeapObjectReference**>(slot), ScavengeObject(reinterpret_cast<HeapObjectReference**>(slot), heap_object);
heap_object);
object = *slot;
// If the object was in from space before and is after executing the
// callback in to space, the object is still live.
// Unfortunately, we do not know about the slot. It could be in a
// just freed free space object.
PageMemoryFence(object);
if (Heap::InToSpace(object)) {
return KEEP_SLOT;
}
} else if (Heap::InToSpace(object)) { } else if (Heap::InToSpace(object)) {
// Already updated slot. This can happen when processing of the work list // Already updated slot. This can happen when processing of the work list
// is interleaved with processing roots. // is interleaved with processing roots.
......
...@@ -53,13 +53,15 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { ...@@ -53,13 +53,15 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target)); scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
if (Heap::InFromSpace(target)) { if (Heap::InFromSpace(target)) {
SlotCallbackResult result = scavenger_->ScavengeObject(slot, target); scavenger_->ScavengeObject(slot, target);
bool success = (*slot)->ToStrongOrWeakHeapObject(&target); bool success = (*slot)->ToStrongOrWeakHeapObject(&target);
USE(success); USE(success);
DCHECK(success); DCHECK(success);
scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
if (result == KEEP_SLOT) { if (Heap::InNewSpace(target)) {
SLOW_DCHECK(target->IsHeapObject()); SLOW_DCHECK(target->IsHeapObject());
SLOW_DCHECK(Heap::InToSpace(target));
RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot_address), RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot_address),
slot_address); slot_address);
} }
......
...@@ -61,8 +61,7 @@ class Scavenger { ...@@ -61,8 +61,7 @@ class Scavenger {
// Scavenges an object |object| referenced from slot |p|. |object| is required // Scavenges an object |object| referenced from slot |p|. |object| is required
// to be in from space. // to be in from space.
inline SlotCallbackResult ScavengeObject(HeapObjectReference** p, inline void ScavengeObject(HeapObjectReference** p, HeapObject* object);
HeapObject* object);
// Copies |source| to |target| and sets the forwarding pointer in |source|. // Copies |source| to |target| and sets the forwarding pointer in |source|.
V8_INLINE bool MigrateObject(Map* map, HeapObject* source, HeapObject* target, V8_INLINE bool MigrateObject(Map* map, HeapObject* source, HeapObject* target,
...@@ -74,24 +73,22 @@ class Scavenger { ...@@ -74,24 +73,22 @@ class Scavenger {
V8_INLINE bool PromoteObject(Map* map, HeapObjectReference** slot, V8_INLINE bool PromoteObject(Map* map, HeapObjectReference** slot,
HeapObject* object, int object_size); HeapObject* object, int object_size);
V8_INLINE SlotCallbackResult EvacuateObject(HeapObjectReference** slot, V8_INLINE void EvacuateObject(HeapObjectReference** slot, Map* map,
Map* map, HeapObject* source); HeapObject* source);
// Different cases for object evacuation. // Different cases for object evacuation.
V8_INLINE SlotCallbackResult EvacuateObjectDefault(Map* map, V8_INLINE void EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
HeapObjectReference** slot, HeapObject* object, int object_size);
HeapObject* object,
int object_size);
inline SlotCallbackResult EvacuateThinString(Map* map, HeapObject** slot, V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot,
ThinString* object, JSFunction* object, int object_size);
int object_size);
inline SlotCallbackResult EvacuateShortcutCandidate(Map* map, inline void EvacuateThinString(Map* map, HeapObject** slot,
HeapObject** slot, ThinString* object, int object_size);
ConsString* object,
int object_size); inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size);
void IterateAndScavengePromotedObject(HeapObject* target, int size); void IterateAndScavengePromotedObject(HeapObject* target, int size);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment