Commit 5337b905 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Record slots in concurrent marker and enable compaction.

BUG=chromium:694255

Change-Id: I25ac134ea2e6f9af13f18e2da819b6d368497646
Reviewed-on: https://chromium-review.googlesource.com/593009
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47051}
parent 0eb28cd5
......@@ -678,9 +678,6 @@ DEFINE_INT(v8_os_page_size, 0, "override OS page size (in KBytes)")
DEFINE_BOOL(always_compact, false, "Perform compaction on every full GC")
DEFINE_BOOL(never_compact, false,
"Never perform compaction on full GC - testing only")
// TODO(ulan): enable compaction for concurrent marking when it correctly
// records slots to evacuation candidates.
DEFINE_IMPLICATION(concurrent_marking, never_compact)
DEFINE_BOOL(compact_code_space, true, "Compact code space on full collections")
DEFINE_BOOL(cleanup_code_caches_at_gc, true,
"Flush code caches in maps during mark compact cycle.")
......
......@@ -9,6 +9,8 @@
#include "src/heap/heap-inl.h"
#include "src/heap/heap.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/mark-compact.h"
#include "src/heap/marking.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/objects-visiting.h"
......@@ -62,19 +64,21 @@ class ConcurrentMarkingVisitor final
}
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
for (Object** p = start; p < end; p++) {
Object* object = reinterpret_cast<Object*>(
base::Relaxed_Load(reinterpret_cast<const base::AtomicWord*>(p)));
for (Object** slot = start; slot < end; slot++) {
Object* object = base::AsAtomicWord::Relaxed_Load(slot);
if (!object->IsHeapObject()) continue;
MarkObject(HeapObject::cast(object));
MarkCompactCollector::RecordSlot(host, slot, object);
}
}
void VisitPointersInSnapshot(const SlotSnapshot& snapshot) {
void VisitPointersInSnapshot(HeapObject* host, const SlotSnapshot& snapshot) {
for (int i = 0; i < snapshot.number_of_slots(); i++) {
Object** slot = snapshot.slot(i);
Object* object = snapshot.value(i);
if (!object->IsHeapObject()) continue;
MarkObject(HeapObject::cast(object));
MarkCompactCollector::RecordSlot(host, slot, object);
}
}
......@@ -86,7 +90,7 @@ class ConcurrentMarkingVisitor final
int size = JSObject::BodyDescriptor::SizeOf(map, object);
const SlotSnapshot& snapshot = MakeSlotSnapshot(map, object, size);
if (!ShouldVisit(object)) return 0;
VisitPointersInSnapshot(snapshot);
VisitPointersInSnapshot(object, snapshot);
return size;
}
......@@ -101,7 +105,7 @@ class ConcurrentMarkingVisitor final
VisitMapPointer(object, object->map_slot());
// It is OK to iterate body of JS API object here because they do not have
// unboxed double fields.
DCHECK(map->HasFastPointerLayout());
DCHECK_IMPLIES(FLAG_unbox_double_fields, map->HasFastPointerLayout());
JSObject::BodyDescriptor::IterateBody(object, size, this);
// The main thread will do wrapper tracing in Blink.
bailout_.Push(object);
......@@ -207,7 +211,10 @@ class ConcurrentMarkingVisitor final
HeapObject* value = HeapObject::cast(object->value());
if (ObjectMarking::IsBlackOrGrey<AccessMode::ATOMIC>(
value, marking_state(value))) {
// TODO(ulan): Record slot for value.
// Weak cells with live values are directly processed here to reduce
// the processing time of weak cells during the main GC pause.
Object** slot = HeapObject::RawField(object, WeakCell::kValueOffset);
MarkCompactCollector::RecordSlot(object, slot, value);
} else {
// If we do not know about liveness of values of weak cells, we have to
// process them when we know the liveness of the whole transitive
......
......@@ -3321,7 +3321,7 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
FreeSpace::cast(filler)->relaxed_write_size(size);
}
if (mode == ClearRecordedSlots::kYes) {
ClearRecordedSlotRange(addr, addr + size);
UNREACHABLE();
}
// At this point, we may be deserializing the heap from a snapshot, and
......@@ -6402,12 +6402,11 @@ void Heap::CheckHandleCount() {
}
void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) {
if (!InNewSpace(object)) {
Address slot_addr = reinterpret_cast<Address>(slot);
Page* page = Page::FromAddress(slot_addr);
Address slot_addr = reinterpret_cast<Address>(slot);
Page* page = Page::FromAddress(slot_addr);
if (!page->InNewSpace()) {
DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
store_buffer()->DeleteEntry(slot_addr);
RememberedSet<OLD_TO_OLD>::Remove(page, slot_addr);
}
}
......@@ -6428,8 +6427,6 @@ void Heap::ClearRecordedSlotRange(Address start, Address end) {
if (!page->InNewSpace()) {
DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
store_buffer()->DeleteEntry(start, end);
RememberedSet<OLD_TO_OLD>::RemoveRange(page, start, end,
SlotSet::FREE_EMPTY_BUCKETS);
}
}
......
......@@ -46,8 +46,9 @@ void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(object)) {
DCHECK(
!source_page->ShouldSkipEvacuationSlotRecording()) {
DCHECK_IMPLIES(
!FLAG_concurrent_marking,
ObjectMarking::IsBlackOrGrey(object, MarkingState::Internal(object)));
RememberedSet<OLD_TO_OLD>::Insert(source_page,
reinterpret_cast<Address>(slot));
......
......@@ -1253,7 +1253,6 @@ class InternalizedStringTableCleaner : public ObjectVisitor {
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
// Visit all HeapObject pointers in [start, end).
MarkCompactCollector* collector = heap_->mark_compact_collector();
Object* the_hole = heap_->the_hole_value();
for (Object** p = start; p < end; p++) {
Object* o = *p;
......@@ -1267,7 +1266,7 @@ class InternalizedStringTableCleaner : public ObjectVisitor {
} else {
// StringTable contains only old space strings.
DCHECK(!heap_->InNewSpace(o));
collector->RecordSlot(table_, p, o);
MarkCompactCollector::RecordSlot(table_, p, o);
}
}
}
......@@ -3150,7 +3149,7 @@ void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo,
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
if (target_page->IsEvacuationCandidate() &&
(rinfo->host() == NULL ||
!ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
!source_page->ShouldSkipEvacuationSlotRecording())) {
RelocInfo::Mode rmode = rinfo->rmode();
Address addr = rinfo->pc();
SlotType slot_type = SlotTypeForRelocInfoMode(rmode);
......@@ -3824,7 +3823,7 @@ void MarkCompactCollector::InvalidateCode(Code* code) {
RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end);
if (heap_->incremental_marking()->IsCompacting() &&
!ShouldSkipEvacuationSlotRecording(code)) {
!page->ShouldSkipEvacuationSlotRecording()) {
DCHECK(compacting_);
// If the object is white than no slots were recorded on it yet.
......
......@@ -594,11 +594,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
void AbortCompaction();
INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) {
return Page::FromAddress(reinterpret_cast<Address>(host))
->ShouldSkipEvacuationSlotRecording();
}
static inline bool IsOnEvacuationCandidate(HeapObject* obj) {
return Page::FromAddress(reinterpret_cast<Address>(obj))
->IsEvacuationCandidate();
......@@ -606,9 +601,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
void RecordRelocSlot(Code* host, RelocInfo* rinfo, Object* target);
void RecordCodeTargetPatch(Address pc, Code* target);
INLINE(void RecordSlot(HeapObject* object, Object** slot, Object* target));
INLINE(void ForceRecordSlot(HeapObject* object, Object** slot,
Object* target));
V8_INLINE static void RecordSlot(HeapObject* object, Object** slot,
Object* target);
void RecordLiveSlotsOnPage(Page* page);
void UpdateSlots(SlotsBuffer* buffer);
......
......@@ -30,7 +30,6 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
Object* undefined = heap->undefined_value();
Object* head = undefined;
T* tail = NULL;
MarkCompactCollector* collector = heap->mark_compact_collector();
bool record_slots = MustRecordSlots(heap);
while (list != undefined) {
......@@ -49,7 +48,7 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
if (record_slots) {
Object** next_slot =
HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
collector->RecordSlot(tail, next_slot, retained);
MarkCompactCollector::RecordSlot(tail, next_slot, retained);
}
}
// Retained object is new tail.
......@@ -141,11 +140,10 @@ struct WeakListVisitor<Context> {
if (heap->gc_state() == Heap::MARK_COMPACT) {
// Record the slots of the weak entries in the native context.
MarkCompactCollector* collector = heap->mark_compact_collector();
for (int idx = Context::FIRST_WEAK_SLOT;
idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
collector->RecordSlot(context, slot, *slot);
MarkCompactCollector::RecordSlot(context, slot, *slot);
}
// Code objects are always allocated in Code space, we do not have to
// visit
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment