Commit 067afa54 authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

[heap] Reland: Reuse object evacuation information for slot recording in Scavenger.

This reverts commit d6de4af5.

Bug: chromium:852420
Change-Id: Ife02a0e47ddb4a136c552965d9e01c6a0ef18e02
Reviewed-on: https://chromium-review.googlesource.com/1194363Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55470}
parent 4b0f5711
......@@ -71,8 +71,10 @@ bool Scavenger::MigrateObject(Map* map, HeapObject* source, HeapObject* target,
return true;
}
bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObjectReference** slot,
HeapObject* object, int object_size) {
CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map* map,
HeapObjectReference** slot,
HeapObject* object,
int object_size) {
DCHECK(heap()->AllowedToBeMigrated(object, NEW_SPACE));
AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
AllocationResult allocation =
......@@ -85,21 +87,26 @@ bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObjectReference** slot,
const bool self_success = MigrateObject(map, object, target, object_size);
if (!self_success) {
allocator_.FreeLast(NEW_SPACE, target, object_size);
MapWord map_word = object->map_word();
MapWord map_word = object->synchronized_map_word();
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
return true;
DCHECK(!Heap::InFromSpace(*slot));
return Heap::InToSpace(*slot)
? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
: CopyAndForwardResult::SUCCESS_OLD_GENERATION;
}
HeapObjectReference::Update(slot, target);
copied_list_.Push(ObjectAndSize(target, object_size));
copied_size_ += object_size;
return true;
return CopyAndForwardResult::SUCCESS_YOUNG_GENERATION;
}
return false;
return CopyAndForwardResult::FAILURE;
}
bool Scavenger::PromoteObject(Map* map, HeapObjectReference** slot,
HeapObject* object, int object_size) {
CopyAndForwardResult Scavenger::PromoteObject(Map* map,
HeapObjectReference** slot,
HeapObject* object,
int object_size) {
AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
AllocationResult allocation =
allocator_.Allocate(OLD_SPACE, object_size, alignment);
......@@ -111,61 +118,90 @@ bool Scavenger::PromoteObject(Map* map, HeapObjectReference** slot,
const bool self_success = MigrateObject(map, object, target, object_size);
if (!self_success) {
allocator_.FreeLast(OLD_SPACE, target, object_size);
MapWord map_word = object->map_word();
MapWord map_word = object->synchronized_map_word();
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
return true;
DCHECK(!Heap::InFromSpace(*slot));
return Heap::InToSpace(*slot)
? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
: CopyAndForwardResult::SUCCESS_OLD_GENERATION;
}
HeapObjectReference::Update(slot, target);
if (!ContainsOnlyData(map->visitor_id())) {
promotion_list_.Push(ObjectAndSize(target, object_size));
}
promoted_size_ += object_size;
return true;
return CopyAndForwardResult::SUCCESS_OLD_GENERATION;
}
return false;
return CopyAndForwardResult::FAILURE;
}
void Scavenger::EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
HeapObject* object, int object_size) {
SlotCallbackResult Scavenger::RememberedSetEntryNeeded(
CopyAndForwardResult result) {
DCHECK_NE(CopyAndForwardResult::FAILURE, result);
return result == CopyAndForwardResult::SUCCESS_YOUNG_GENERATION ? KEEP_SLOT
: REMOVE_SLOT;
}
SlotCallbackResult Scavenger::EvacuateObjectDefault(Map* map,
HeapObjectReference** slot,
HeapObject* object,
int object_size) {
SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
SLOW_DCHECK(object->SizeFromMap(map) == object_size);
CopyAndForwardResult result;
if (!heap()->ShouldBePromoted(object->address())) {
// A semi-space copy may fail due to fragmentation. In that case, we
// try to promote the object.
if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
result = SemiSpaceCopyObject(map, slot, object, object_size);
if (result != CopyAndForwardResult::FAILURE) {
return RememberedSetEntryNeeded(result);
}
}
if (PromoteObject(map, slot, object, object_size)) return;
// We may want to promote this object if the object was already semi-space
// copied in a previes young generation GC or if the semi-space copy above
// failed.
result = PromoteObject(map, slot, object, object_size);
if (result != CopyAndForwardResult::FAILURE) {
return RememberedSetEntryNeeded(result);
}
// If promotion failed, we try to copy the object to the other semi-space
if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
// If promotion failed, we try to copy the object to the other semi-space.
result = SemiSpaceCopyObject(map, slot, object, object_size);
if (result != CopyAndForwardResult::FAILURE) {
return RememberedSetEntryNeeded(result);
}
heap()->FatalProcessOutOfMemory("Scavenger: semi-space copy");
UNREACHABLE();
}
void Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object, int object_size) {
SlotCallbackResult Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object,
int object_size) {
if (!is_incremental_marking_) {
// Loading actual is fine in a parallel setting is there is no write.
// Loading actual is fine in a parallel setting since there is no write.
String* actual = object->actual();
object->set_length(0);
*slot = actual;
// ThinStrings always refer to internalized strings, which are
// always in old space.
DCHECK(!Heap::InNewSpace(actual));
base::AsAtomicPointer::Relaxed_Store(
base::AsAtomicPointer::Release_Store(
reinterpret_cast<Map**>(object->address()),
MapWord::FromForwardingAddress(actual).ToMap());
return;
return REMOVE_SLOT;
}
EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
object, object_size);
return EvacuateObjectDefault(
map, reinterpret_cast<HeapObjectReference**>(slot), object, object_size);
}
void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size) {
SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map* map,
HeapObject** slot,
ConsString* object,
int object_size) {
DCHECK(IsShortcutCandidate(map->instance_type()));
if (!is_incremental_marking_ &&
object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
......@@ -174,37 +210,38 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
*slot = first;
if (!Heap::InNewSpace(first)) {
base::AsAtomicPointer::Relaxed_Store(
base::AsAtomicPointer::Release_Store(
reinterpret_cast<Map**>(object->address()),
MapWord::FromForwardingAddress(first).ToMap());
return;
return REMOVE_SLOT;
}
MapWord first_word = first->map_word();
MapWord first_word = first->synchronized_map_word();
if (first_word.IsForwardingAddress()) {
HeapObject* target = first_word.ToForwardingAddress();
*slot = target;
base::AsAtomicPointer::Relaxed_Store(
base::AsAtomicPointer::Release_Store(
reinterpret_cast<Map**>(object->address()),
MapWord::FromForwardingAddress(target).ToMap());
return;
return Heap::InToSpace(target) ? KEEP_SLOT : REMOVE_SLOT;
}
Map* map = first_word.ToMap();
EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
first, first->SizeFromMap(map));
base::AsAtomicPointer::Relaxed_Store(
SlotCallbackResult result = EvacuateObjectDefault(
map, reinterpret_cast<HeapObjectReference**>(slot), first,
first->SizeFromMap(map));
base::AsAtomicPointer::Release_Store(
reinterpret_cast<Map**>(object->address()),
MapWord::FromForwardingAddress(*slot).ToMap());
return;
return result;
}
EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
object, object_size);
return EvacuateObjectDefault(
map, reinterpret_cast<HeapObjectReference**>(slot), object, object_size);
}
void Scavenger::EvacuateObject(HeapObjectReference** slot, Map* map,
HeapObject* source) {
SlotCallbackResult Scavenger::EvacuateObject(HeapObjectReference** slot,
Map* map, HeapObject* source) {
SLOW_DCHECK(Heap::InFromSpace(source));
SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
int size = source->SizeFromMap(map);
......@@ -214,22 +251,21 @@ void Scavenger::EvacuateObject(HeapObjectReference** slot, Map* map,
case kVisitThinString:
// At the moment we don't allow weak pointers to thin strings.
DCHECK(!(*slot)->IsWeakHeapObject());
EvacuateThinString(map, reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<ThinString*>(source), size);
break;
return EvacuateThinString(map, reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<ThinString*>(source), size);
case kVisitShortcutCandidate:
DCHECK(!(*slot)->IsWeakHeapObject());
// At the moment we don't allow weak pointers to cons strings.
EvacuateShortcutCandidate(map, reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<ConsString*>(source), size);
break;
return EvacuateShortcutCandidate(
map, reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<ConsString*>(source), size);
default:
EvacuateObjectDefault(map, slot, source, size);
break;
return EvacuateObjectDefault(map, slot, source, size);
}
}
void Scavenger::ScavengeObject(HeapObjectReference** p, HeapObject* object) {
SlotCallbackResult Scavenger::ScavengeObject(HeapObjectReference** p,
HeapObject* object) {
DCHECK(Heap::InFromSpace(object));
// Synchronized load that consumes the publishing CAS of MigrateObject.
......@@ -246,14 +282,15 @@ void Scavenger::ScavengeObject(HeapObjectReference** p, HeapObject* object) {
DCHECK((*p)->IsStrongHeapObject());
*p = HeapObjectReference::Strong(dest);
}
return;
DCHECK(Heap::InToSpace(dest) || !Heap::InNewSpace((dest)));
return Heap::InToSpace(dest) ? KEEP_SLOT : REMOVE_SLOT;
}
Map* map = first_word.ToMap();
// AllocationMementos are unrooted and shouldn't survive a scavenge
DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map);
// Call the slow part of scavenge object.
EvacuateObject(p, map, object);
return EvacuateObject(p, map, object);
}
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
......@@ -267,17 +304,10 @@ SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
DCHECK(success);
DCHECK(heap_object->IsHeapObject());
ScavengeObject(reinterpret_cast<HeapObjectReference**>(slot), heap_object);
object = *slot;
// If the object was in from space before and is after executing the
// callback in to space, the object is still live.
// Unfortunately, we do not know about the slot. It could be in a
// just freed free space object.
PageMemoryFence(object);
if (Heap::InToSpace(object)) {
return KEEP_SLOT;
}
SlotCallbackResult result = ScavengeObject(
reinterpret_cast<HeapObjectReference**>(slot), heap_object);
DCHECK_IMPLIES(result == REMOVE_SLOT, !Heap::InNewSpace(*slot));
return result;
} else if (Heap::InToSpace(object)) {
// Already updated slot. This can happen when processing of the work list
// is interleaved with processing roots.
......
......@@ -53,15 +53,13 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
if (Heap::InFromSpace(target)) {
scavenger_->ScavengeObject(slot, target);
SlotCallbackResult result = scavenger_->ScavengeObject(slot, target);
bool success = (*slot)->ToStrongOrWeakHeapObject(&target);
USE(success);
DCHECK(success);
scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
if (Heap::InNewSpace(target)) {
if (result == KEEP_SLOT) {
SLOW_DCHECK(target->IsHeapObject());
SLOW_DCHECK(Heap::InToSpace(target));
RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot_address),
slot_address);
}
......
......@@ -16,6 +16,12 @@ namespace internal {
class OneshotBarrier;
enum class CopyAndForwardResult {
SUCCESS_YOUNG_GENERATION,
SUCCESS_OLD_GENERATION,
FAILURE
};
class Scavenger {
public:
static const int kCopiedListSegmentSize = 256;
......@@ -61,34 +67,43 @@ class Scavenger {
// Scavenges an object |object| referenced from slot |p|. |object| is required
// to be in from space.
inline void ScavengeObject(HeapObjectReference** p, HeapObject* object);
inline SlotCallbackResult ScavengeObject(HeapObjectReference** p,
HeapObject* object);
// Copies |source| to |target| and sets the forwarding pointer in |source|.
V8_INLINE bool MigrateObject(Map* map, HeapObject* source, HeapObject* target,
int size);
V8_INLINE bool SemiSpaceCopyObject(Map* map, HeapObjectReference** slot,
HeapObject* object, int object_size);
V8_INLINE SlotCallbackResult
RememberedSetEntryNeeded(CopyAndForwardResult result);
V8_INLINE bool PromoteObject(Map* map, HeapObjectReference** slot,
HeapObject* object, int object_size);
V8_INLINE CopyAndForwardResult SemiSpaceCopyObject(Map* map,
HeapObjectReference** slot,
HeapObject* object,
int object_size);
V8_INLINE void EvacuateObject(HeapObjectReference** slot, Map* map,
HeapObject* source);
// Different cases for object evacuation.
V8_INLINE CopyAndForwardResult PromoteObject(Map* map,
HeapObjectReference** slot,
HeapObject* object,
int object_size);
V8_INLINE void EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
HeapObject* object, int object_size);
V8_INLINE SlotCallbackResult EvacuateObject(HeapObjectReference** slot,
Map* map, HeapObject* source);
V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot,
JSFunction* object, int object_size);
inline void EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object, int object_size);
inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size);
// Different cases for object evacuation.
V8_INLINE SlotCallbackResult EvacuateObjectDefault(Map* map,
HeapObjectReference** slot,
HeapObject* object,
int object_size);
inline SlotCallbackResult EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object,
int object_size);
inline SlotCallbackResult EvacuateShortcutCandidate(Map* map,
HeapObject** slot,
ConsString* object,
int object_size);
void IterateAndScavengePromotedObject(HeapObject* target, int size);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment