Commit ebc98f7f authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Redo scavenging logic

Replace the second level visitation with a much simpler logic that
just separately dispatches the special cases. All other cases can
use a dispatch that just evacuates an object based on size.

This is similar to the logic used in the mark-compact collector. The
goal is to align behaviors as much as possible, highlighting and 
fixing performance issues in the different behaviors.

This CL is mechanical as possible. A followup will clean
up the naming scheme and dispatching.

Bug: chromium:738368
Change-Id: Ia5a426c5ebb25230000b127580c300c97cff8b1b
Reviewed-on: https://chromium-review.googlesource.com/558060
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46364}
parent 498964cd
......@@ -1767,7 +1767,7 @@ void Heap::Scavenge() {
// Used for updating survived_since_last_expansion_ at function end.
size_t survived_watermark = PromotedSpaceSizeOfObjects();
scavenge_collector_->SelectScavengingVisitorsTable();
scavenge_collector_->UpdateConstraints();
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
......@@ -5732,13 +5732,6 @@ void Heap::DisableInlineAllocation() {
}
}
V8_DECLARE_ONCE(initialize_gc_once);
static void InitializeGCOnce() {
Scavenger::Initialize();
}
bool Heap::SetUp() {
#ifdef DEBUG
allocation_timeout_ = FLAG_gc_interval;
......@@ -5756,8 +5749,6 @@ bool Heap::SetUp() {
if (!ConfigureHeapDefault()) return false;
}
base::CallOnce(&initialize_gc_once, &InitializeGCOnce);
// Set up memory allocator.
memory_allocator_ = new MemoryAllocator(isolate_);
if (!memory_allocator_->SetUp(MaxReserved(), code_range_size_)) return false;
......
......@@ -10,6 +10,226 @@
namespace v8 {
namespace internal {
bool Scavenger::ContainsOnlyData(VisitorId visitor_id) {
switch (visitor_id) {
case kVisitSeqOneByteString:
return true;
case kVisitSeqTwoByteString:
return true;
case kVisitByteArray:
return true;
case kVisitFixedDoubleArray:
return true;
case kVisitDataObject:
return true;
default:
break;
}
return false;
}
// Helper function used by CopyObject to copy a source object to an
// allocated target object and update the forwarding pointer in the source
// object. Returns the target object.
HeapObject* Scavenger::MigrateObject(HeapObject* source, HeapObject* target,
int size) {
// If we migrate into to-space, then the to-space top pointer should be
// right after the target object. Incorporate double alignment
// over-allocation.
DCHECK(!heap()->InToSpace(target) ||
target->address() + size == heap()->new_space()->top() ||
target->address() + size + kPointerSize == heap()->new_space()->top());
// Make sure that we do not overwrite the promotion queue which is at
// the end of to-space.
DCHECK(!heap()->InToSpace(target) ||
heap()->promotion_queue()->IsBelowPromotionQueue(
heap()->new_space()->top()));
// Copy the content of source to target.
heap()->CopyBlock(target->address(), source->address(), size);
// Set the forwarding address.
source->set_map_word(MapWord::FromForwardingAddress(target));
if (V8_UNLIKELY(is_logging_)) {
// Update NewSpace stats if necessary.
RecordCopiedObject(target);
heap()->OnMoveEvent(target, source, size);
}
if (is_incremental_marking_) {
heap()->incremental_marking()->TransferColor(source, target);
}
return target;
}
bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
HeapObject* object, int object_size) {
DCHECK(heap()->AllowedToBeMigrated(object, NEW_SPACE));
AllocationAlignment alignment = object->RequiredAlignment();
AllocationResult allocation =
heap()->new_space()->AllocateRaw(object_size, alignment);
HeapObject* target = NULL; // Initialization to please compiler.
if (allocation.To(&target)) {
// Order is important here: Set the promotion limit before storing a
// filler for double alignment or migrating the object. Otherwise we
// may end up overwriting promotion queue entries when we migrate the
// object.
heap()->promotion_queue()->SetNewLimit(heap()->new_space()->top());
MigrateObject(object, target, object_size);
// Update slot to new target.
*slot = target;
heap()->IncrementSemiSpaceCopiedObjectSize(object_size);
return true;
}
return false;
}
bool Scavenger::PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
int object_size) {
AllocationAlignment alignment = object->RequiredAlignment();
AllocationResult allocation =
heap()->old_space()->AllocateRaw(object_size, alignment);
HeapObject* target = NULL; // Initialization to please compiler.
if (allocation.To(&target)) {
DCHECK(ObjectMarking::IsWhite(
target, heap()->mark_compact_collector()->marking_state(target)));
MigrateObject(object, target, object_size);
// Update slot to new target using CAS. A concurrent sweeper thread my
// filter the slot concurrently.
HeapObject* old = *slot;
base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot),
reinterpret_cast<base::AtomicWord>(old),
reinterpret_cast<base::AtomicWord>(target));
if (!ContainsOnlyData(static_cast<VisitorId>(map->visitor_id()))) {
heap()->promotion_queue()->insert(target, object_size);
}
heap()->IncrementPromotedObjectsSize(object_size);
return true;
}
return false;
}
void Scavenger::EvacuateObjectDefault(Map* map, HeapObject** slot,
HeapObject* object, int object_size) {
SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
SLOW_DCHECK(object->Size() == object_size);
if (!heap()->ShouldBePromoted(object->address())) {
// A semi-space copy may fail due to fragmentation. In that case, we
// try to promote the object.
if (SemiSpaceCopyObject(map, slot, object, object_size)) {
return;
}
}
if (PromoteObject(map, slot, object, object_size)) {
return;
}
// If promotion failed, we try to copy the object to the other semi-space
if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
FatalProcessOutOfMemory("Scavenger: semi-space copy\n");
}
void Scavenger::EvacuateJSFunction(Map* map, HeapObject** slot,
JSFunction* object, int object_size) {
EvacuateObjectDefault(map, slot, object, object_size);
if (!is_incremental_marking_) return;
MapWord map_word = object->map_word();
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
// TODO(mlippautz): Notify collector of this object so we don't have to
// retrieve the state our of thin air.
if (ObjectMarking::IsBlack(target, MarkingState::Internal(target))) {
// This object is black and it might not be rescanned by marker.
// We should explicitly record code entry slot for compaction because
// promotion queue processing (IteratePromotedObjectPointers) will
// miss it as it is not HeapObject-tagged.
Address code_entry_slot = target->address() + JSFunction::kCodeEntryOffset;
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
heap()->mark_compact_collector()->RecordCodeEntrySlot(
target, code_entry_slot, code);
}
}
void Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object, int object_size) {
if (!is_incremental_marking_) {
HeapObject* actual = object->actual();
*slot = actual;
// ThinStrings always refer to internalized strings, which are
// always in old space.
DCHECK(!map->GetHeap()->InNewSpace(actual));
object->set_map_word(MapWord::FromForwardingAddress(actual));
return;
}
EvacuateObjectDefault(map, slot, object, object_size);
}
void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size) {
DCHECK(IsShortcutCandidate(map->instance_type()));
if (!is_incremental_marking_ &&
object->unchecked_second() == heap()->empty_string()) {
HeapObject* first = HeapObject::cast(object->unchecked_first());
*slot = first;
if (!heap()->InNewSpace(first)) {
object->set_map_word(MapWord::FromForwardingAddress(first));
return;
}
MapWord first_word = first->map_word();
if (first_word.IsForwardingAddress()) {
HeapObject* target = first_word.ToForwardingAddress();
*slot = target;
object->set_map_word(MapWord::FromForwardingAddress(target));
return;
}
Scavenger::ScavengeObjectSlow(slot, first);
object->set_map_word(MapWord::FromForwardingAddress(*slot));
return;
}
EvacuateObjectDefault(map, slot, object, object_size);
}
void Scavenger::EvacuateObject(HeapObject** slot, Map* map,
HeapObject* source) {
int size = source->SizeFromMap(map);
switch (static_cast<VisitorId>(map->visitor_id())) {
case kVisitThinString:
EvacuateThinString(map, slot, ThinString::cast(source), size);
break;
case kVisitShortcutCandidate:
EvacuateShortcutCandidate(map, slot, ConsString::cast(source), size);
break;
case kVisitJSFunction:
EvacuateJSFunction(map, slot, JSFunction::cast(source), size);
break;
default:
EvacuateObjectDefault(map, slot, source, size);
break;
}
}
void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
......@@ -43,7 +263,7 @@ void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
SLOW_DCHECK(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
scavenger->EvacuateObject(p, map, object);
}
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
......
This diff is collapsed.
......@@ -11,15 +11,13 @@
namespace v8 {
namespace internal {
typedef void (*ScavengingCallback)(Map* map, HeapObject** slot,
HeapObject* object);
class Scavenger {
public:
explicit Scavenger(Heap* heap) : heap_(heap) {}
explicit Scavenger(Heap* heap)
: heap_(heap), is_logging_(false), is_incremental_marking_(false) {}
// Initializes static visitor dispatch tables.
static void Initialize();
V8_INLINE void EvacuateObject(HeapObject** slot, Map* map,
HeapObject* source);
// Callback function passed to Heap::Iterate etc. Copies an object if
// necessary, the object might be promoted to an old space. The caller must
......@@ -32,16 +30,43 @@ class Scavenger {
// Slow part of {ScavengeObject} above.
static inline void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
// Chooses an appropriate static visitor table depending on the current state
// of the heap (i.e. incremental marking, logging and profiling).
void SelectScavengingVisitorsTable();
void UpdateConstraints();
Isolate* isolate();
Heap* heap() { return heap_; }
private:
// White list for objects that for sure only contain data.
V8_INLINE static bool ContainsOnlyData(VisitorId visitor_id);
void RecordCopiedObject(HeapObject* obj);
V8_INLINE HeapObject* MigrateObject(HeapObject* source, HeapObject* target,
int size);
V8_INLINE bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
HeapObject* object, int object_size);
V8_INLINE bool PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
int object_size);
V8_INLINE void EvacuateObjectDefault(Map* map, HeapObject** slot,
HeapObject* object, int object_size);
// Special cases.
V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot,
JSFunction* object, int object_size);
V8_INLINE void EvacuateThinString(Map* map, HeapObject** slot,
ThinString* object, int object_size);
V8_INLINE void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size);
Heap* heap_;
VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
bool is_logging_;
bool is_incremental_marking_;
};
// Helper class for turning the scavenger into an object visitor that is also
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment