Commit c24ed0a2 authored by jkummerow's avatar jkummerow Committed by Commit bot

Reland^2 "Filter invalid slots out from the SlotsBuffer after marking."

And reland "Use a slot that is located on a heap page when removing
invalid entries from the SlotsBuffer."

This reverts commits de018fbd and
d23a9f7a.

Reason for relanding: looking fine on Canary, let's get these fixes back in.

BUG=chromium:454297,chromium:470801
LOG=y
TBR=ishell@chromium.org

Review URL: https://codereview.chromium.org/1043703003

Cr-Commit-Position: refs/heads/master@{#27507}
parent 256f00c0
......@@ -287,6 +287,60 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
}
void MarkCompactCollector::ClearInvalidSlotsBufferEntries(PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
}
}
void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
heap_->store_buffer()->ClearInvalidStoreBufferEntries();
ClearInvalidSlotsBufferEntries(heap_->old_pointer_space());
ClearInvalidSlotsBufferEntries(heap_->old_data_space());
ClearInvalidSlotsBufferEntries(heap_->code_space());
ClearInvalidSlotsBufferEntries(heap_->cell_space());
ClearInvalidSlotsBufferEntries(heap_->map_space());
LargeObjectIterator it(heap_->lo_space());
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer());
}
}
#ifdef VERIFY_HEAP
static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
SlotsBuffer::VerifySlots(heap, p->slots_buffer());
}
}
static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) {
heap->store_buffer()->VerifyValidStoreBufferEntries();
VerifyValidSlotsBufferEntries(heap, heap->old_pointer_space());
VerifyValidSlotsBufferEntries(heap, heap->old_data_space());
VerifyValidSlotsBufferEntries(heap, heap->code_space());
VerifyValidSlotsBufferEntries(heap, heap->cell_space());
VerifyValidSlotsBufferEntries(heap, heap->map_space());
LargeObjectIterator it(heap->lo_space());
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
SlotsBuffer::VerifySlots(heap, chunk->slots_buffer());
}
}
#endif
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
......@@ -312,11 +366,11 @@ void MarkCompactCollector::CollectGarbage() {
}
#endif
heap_->store_buffer()->ClearInvalidStoreBufferEntries();
ClearInvalidStoreAndSlotsBufferEntries();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
heap_->store_buffer()->VerifyValidStoreBufferEntries();
VerifyValidStoreAndSlotsBufferEntries(heap_);
}
#endif
......@@ -3055,10 +3109,14 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
}
bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) {
bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot,
HeapObject** out_object) {
// This function does not support large objects right now.
Space* owner = p->owner();
if (owner == heap_->lo_space() || owner == NULL) return true;
if (owner == heap_->lo_space() || owner == NULL) {
*out_object = NULL;
return true;
}
uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot);
unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2;
......@@ -3112,6 +3170,7 @@ bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) {
(object->address() + object->Size()) > slot) {
// If the slot is within the last found object in the cell, the slot is
// in a live object.
*out_object = object;
return true;
}
return false;
......@@ -3153,28 +3212,38 @@ bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) {
}
bool MarkCompactCollector::IsSlotInLiveObject(HeapObject** address,
HeapObject* object) {
// If the target object is not black, the source slot must be part
// of a non-black (dead) object.
if (!Marking::IsBlack(Marking::MarkBitFrom(object))) {
return false;
}
bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
HeapObject* object = NULL;
// The target object is black but we don't know if the source slot is black.
// The source object could have died and the slot could be part of a free
// space. Find out based on mark bits if the slot is part of a live object.
if (!IsSlotInBlackObject(
Page::FromAddress(reinterpret_cast<Address>(address)),
reinterpret_cast<Address>(address))) {
if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) {
return false;
}
#if V8_DOUBLE_FIELDS_UNBOXING
// |object| is NULL only when the slot belongs to large object space.
DCHECK(object != NULL ||
Page::FromAnyPointerAddress(heap_, slot)->owner() ==
heap_->lo_space());
// We don't need to check large objects' layout descriptor since it can't
// contain in-object fields anyway.
if (object != NULL) {
// Filter out slots that happens to point to unboxed double fields.
LayoutDescriptorHelper helper(object->map());
bool has_only_tagged_fields = helper.all_fields_tagged();
if (!has_only_tagged_fields &&
!helper.IsTagged(static_cast<int>(slot - object->address()))) {
return false;
}
}
#endif
return true;
}
void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address,
void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
HeapObject* object) {
// The target object has to be black.
CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
......@@ -3182,9 +3251,7 @@ void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address,
// The target object is black but we don't know if the source slot is black.
// The source object could have died and the slot could be part of a free
// space. Use the mark bit iterator to find out about liveness of the slot.
CHECK(IsSlotInBlackObjectSlow(
Page::FromAddress(reinterpret_cast<Address>(address)),
reinterpret_cast<Address>(address)));
CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
}
......@@ -4503,6 +4570,63 @@ bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator,
}
void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) {
// Remove entries by replacing them with an old-space slot containing a smi
// that is located in an unmovable page.
const ObjectSlot kRemovedEntry = HeapObject::RawField(
heap->empty_fixed_array(), FixedArrayBase::kLengthOffset);
DCHECK(Page::FromAddress(reinterpret_cast<Address>(kRemovedEntry))
->NeverEvacuate());
while (buffer != NULL) {
SlotsBuffer::ObjectSlot* slots = buffer->slots_;
intptr_t slots_count = buffer->idx_;
for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
ObjectSlot slot = slots[slot_idx];
if (!IsTypedSlot(slot)) {
Object* object = *slot;
if (object->IsHeapObject()) {
if (heap->InNewSpace(object) ||
!heap->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<Address>(slot))) {
slots[slot_idx] = kRemovedEntry;
}
}
} else {
++slot_idx;
DCHECK(slot_idx < slots_count);
}
}
buffer = buffer->next();
}
}
void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) {
while (buffer != NULL) {
SlotsBuffer::ObjectSlot* slots = buffer->slots_;
intptr_t slots_count = buffer->idx_;
for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
ObjectSlot slot = slots[slot_idx];
if (!IsTypedSlot(slot)) {
Object* object = *slot;
if (object->IsHeapObject()) {
CHECK(!heap->InNewSpace(object));
CHECK(heap->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<Address>(slot)));
}
} else {
++slot_idx;
DCHECK(slot_idx < slots_count);
}
}
buffer = buffer->next();
}
}
static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTarget(rmode)) {
return SlotsBuffer::CODE_TARGET_SLOT;
......
......@@ -363,6 +363,15 @@ class SlotsBuffer {
SlotsBuffer** buffer_address, SlotType type, Address addr,
AdditionMode mode);
// Eliminates all stale entries from the slots buffer, i.e., slots that
// are not part of live objects anymore. This method must be called after
// marking, when the whole transitive closure is known and must be called
// before sweeping when mark bits are still intact.
static void RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer);
// Ensures that there are no invalid slots in the chain of slots buffers.
static void VerifySlots(Heap* heap, SlotsBuffer* buffer);
static const int kNumberOfElements = 1021;
private:
......@@ -679,10 +688,10 @@ class MarkCompactCollector {
// The following four methods can just be called after marking, when the
// whole transitive closure is known. They must be called before sweeping
// when mark bits are still intact.
bool IsSlotInBlackObject(Page* p, Address slot);
bool IsSlotInBlackObject(Page* p, Address slot, HeapObject** out_object);
bool IsSlotInBlackObjectSlow(Page* p, Address slot);
bool IsSlotInLiveObject(HeapObject** address, HeapObject* object);
void VerifyIsSlotInLiveObject(HeapObject** address, HeapObject* object);
bool IsSlotInLiveObject(Address slot);
void VerifyIsSlotInLiveObject(Address slot, HeapObject* object);
private:
class SweeperTask;
......@@ -694,6 +703,8 @@ class MarkCompactCollector {
bool WillBeDeoptimized(Code* code);
void RemoveDeadInvalidatedCode();
void ProcessInvalidatedCode(ObjectVisitor* visitor);
void ClearInvalidSlotsBufferEntries(PagedSpace* space);
void ClearInvalidStoreAndSlotsBufferEntries();
void StartSweeperThreads();
......
......@@ -361,12 +361,14 @@ void StoreBuffer::ClearInvalidStoreBufferEntries() {
Address* new_top = old_start_;
for (Address* current = old_start_; current < old_top_; current++) {
Address addr = *current;
Object** slot = reinterpret_cast<Object**>(*current);
Object** slot = reinterpret_cast<Object**>(addr);
Object* object = *slot;
if (heap_->InNewSpace(object)) {
if (heap_->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<HeapObject*>(object))) {
if (heap_->InNewSpace(object) && object->IsHeapObject()) {
// If the target object is not black, the source slot must be part
// of a non-black (dead) object.
HeapObject* heap_object = HeapObject::cast(object);
if (Marking::IsBlack(Marking::MarkBitFrom(heap_object)) &&
heap_->mark_compact_collector()->IsSlotInLiveObject(addr)) {
*new_top++ = addr;
}
}
......@@ -389,10 +391,10 @@ void StoreBuffer::VerifyValidStoreBufferEntries() {
for (Address* current = old_start_; current < old_top_; current++) {
Object** slot = reinterpret_cast<Object**>(*current);
Object* object = *slot;
CHECK(object->IsHeapObject());
CHECK(heap_->InNewSpace(object));
heap_->mark_compact_collector()->VerifyIsSlotInLiveObject(
reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<HeapObject*>(object));
reinterpret_cast<Address>(slot), HeapObject::cast(object));
}
}
......
......@@ -48,6 +48,12 @@ static Handle<String> MakeName(const char* str, int suffix) {
}
Handle<JSObject> GetObject(const char* name) {
return v8::Utils::OpenHandle(
*v8::Handle<v8::Object>::Cast(CcTest::global()->Get(v8_str(name))));
}
static double GetDoubleFieldValue(JSObject* obj, FieldIndex field_index) {
if (obj->IsUnboxedDoubleField(field_index)) {
return obj->RawFastDoublePropertyAt(field_index);
......@@ -1388,4 +1394,211 @@ TEST(WriteBarriersInCopyJSObject) {
CHECK_EQ(boom_value, clone->RawFastDoublePropertyAt(index));
}
static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,
int tagged_descriptor, int double_descriptor,
bool check_tagged_value = true) {
FLAG_stress_compaction = true;
FLAG_manual_evacuation_candidates_selection = true;
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = CcTest::heap();
PagedSpace* old_pointer_space = heap->old_pointer_space();
// The plan: create |obj| by |map| in old space, create |obj_value| in
// new space and ensure that write barrier is triggered when |obj_value| is
// written to property |tagged_descriptor| of |obj|.
// Then migrate object to |new_map| and set proper value for property
// |double_descriptor|. Call GC and ensure that it did not crash during
// store buffer entries updating.
Handle<JSObject> obj;
Handle<HeapObject> obj_value;
{
AlwaysAllocateScope always_allocate(isolate);
obj = factory->NewJSObjectFromMap(map, TENURED, false);
CHECK(old_pointer_space->Contains(*obj));
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS);
}
CHECK(heap->InNewSpace(*obj_value));
{
FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
const int n = 153;
for (int i = 0; i < n; i++) {
obj->FastPropertyAtPut(index, *obj_value);
}
}
// Migrate |obj| to |new_map| which should shift fields and put the
// |boom_value| to the slot that was earlier recorded by write barrier.
JSObject::MigrateToMap(obj, new_map);
Address fake_object = reinterpret_cast<Address>(*obj_value) + kPointerSize;
double boom_value = bit_cast<double>(fake_object);
FieldIndex double_field_index =
FieldIndex::ForDescriptor(*new_map, double_descriptor);
CHECK(obj->IsUnboxedDoubleField(double_field_index));
obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
// Trigger GC to evacuate all candidates.
CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
if (check_tagged_value) {
FieldIndex tagged_field_index =
FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
}
CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
}
static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
int tagged_descriptor,
int double_descriptor,
bool check_tagged_value = true) {
if (FLAG_never_compact || !FLAG_incremental_marking) return;
FLAG_stress_compaction = true;
FLAG_manual_evacuation_candidates_selection = true;
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = CcTest::heap();
PagedSpace* old_pointer_space = heap->old_pointer_space();
// The plan: create |obj| by |map| in old space, create |obj_value| in
// old space and ensure it end up in evacuation candidate page. Start
// incremental marking and ensure that incremental write barrier is triggered
// when |obj_value| is written to property |tagged_descriptor| of |obj|.
// Then migrate object to |new_map| and set proper value for property
// |double_descriptor|. Call GC and ensure that it did not crash during
// slots buffer entries updating.
Handle<JSObject> obj;
Handle<HeapObject> obj_value;
Page* ec_page;
{
AlwaysAllocateScope always_allocate(isolate);
obj = factory->NewJSObjectFromMap(map, TENURED, false);
CHECK(old_pointer_space->Contains(*obj));
// Make sure |obj_value| is placed on an old-space evacuation candidate.
SimulateFullSpace(old_pointer_space);
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
ec_page = Page::FromAddress(obj_value->address());
CHECK_NE(ec_page, Page::FromAddress(obj->address()));
}
// Heap is ready, force |ec_page| to become an evacuation candidate and
// simulate incremental marking.
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
SimulateIncrementalMarking(heap);
// Check that everything is ready for triggering incremental write barrier
// (i.e. that both |obj| and |obj_value| are black and the marking phase is
// still active and |obj_value|'s page is indeed an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsMarking());
CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj)));
CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj_value)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger incremental write barrier, which should add a slot to |ec_page|'s
// slots buffer.
{
int slots_buffer_len = SlotsBuffer::SizeOfChain(ec_page->slots_buffer());
FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
const int n = SlotsBuffer::kNumberOfElements + 10;
for (int i = 0; i < n; i++) {
obj->FastPropertyAtPut(index, *obj_value);
}
// Ensure that the slot was actually added to the |ec_page|'s slots buffer.
CHECK_EQ(slots_buffer_len + n,
SlotsBuffer::SizeOfChain(ec_page->slots_buffer()));
}
// Migrate |obj| to |new_map| which should shift fields and put the
// |boom_value| to the slot that was earlier recorded by incremental write
// barrier.
JSObject::MigrateToMap(obj, new_map);
double boom_value = bit_cast<double>(UINT64_C(0xbaad0176a37c28e1));
FieldIndex double_field_index =
FieldIndex::ForDescriptor(*new_map, double_descriptor);
CHECK(obj->IsUnboxedDoubleField(double_field_index));
obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
// Trigger GC to evacuate all candidates.
CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE, "boom");
// Ensure that the values are still there and correct.
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
if (check_tagged_value) {
FieldIndex tagged_field_index =
FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
}
CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
}
enum WriteBarrierKind { OLD_TO_OLD_WRITE_BARRIER, OLD_TO_NEW_WRITE_BARRIER };
static void TestWriteBarrierObjectShiftFieldsRight(
WriteBarrierKind write_barrier_kind) {
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::HandleScope scope(CcTest::isolate());
Handle<HeapType> any_type = HeapType::Any(isolate);
CompileRun("function func() { return 1; }");
Handle<JSObject> func = GetObject("func");
Handle<Map> map = Map::Create(isolate, 10);
map = Map::CopyWithConstant(map, MakeName("prop", 0), func, NONE,
INSERT_TRANSITION).ToHandleChecked();
map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE,
Representation::Double(),
INSERT_TRANSITION).ToHandleChecked();
map = Map::CopyWithField(map, MakeName("prop", 2), any_type, NONE,
Representation::Tagged(),
INSERT_TRANSITION).ToHandleChecked();
// Shift fields right by turning constant property to a field.
Handle<Map> new_map = Map::ReconfigureProperty(
map, 0, kData, NONE, Representation::Tagged(), any_type, FORCE_FIELD);
if (write_barrier_kind == OLD_TO_NEW_WRITE_BARRIER) {
TestWriteBarrier(map, new_map, 2, 1);
} else {
CHECK_EQ(OLD_TO_OLD_WRITE_BARRIER, write_barrier_kind);
TestIncrementalWriteBarrier(map, new_map, 2, 1);
}
}
// TODO(ishell): enable when this issue is fixed.
DISABLED_TEST(WriteBarrierObjectShiftFieldsRight) {
TestWriteBarrierObjectShiftFieldsRight(OLD_TO_NEW_WRITE_BARRIER);
}
TEST(IncrementalWriteBarrierObjectShiftFieldsRight) {
TestWriteBarrierObjectShiftFieldsRight(OLD_TO_OLD_WRITE_BARRIER);
}
// TODO(ishell): add respective tests for property kind reconfiguring from
// accessor field to double, once accessor fields are supported by
// Map::ReconfigureProperty().
// TODO(ishell): add respective tests for fast property removal case once
// Map::ReconfigureProperty() supports that.
#endif
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment