Commit c5142d86 authored by ulan's avatar ulan Committed by Commit bot

Clear recorded slots when creating filler objects.

BUG=chromium:589413
LOG=NO

Review URL: https://codereview.chromium.org/1733333002

Cr-Commit-Position: refs/heads/master@{#34295}
parent 05ae2db7
......@@ -945,7 +945,8 @@ void Heap::EnsureFillerObjectAtTop() {
Page* page = Page::FromAddress(from_top);
if (page->Contains(from_top)) {
int remaining_in_page = static_cast<int>(page->area_end() - from_top);
CreateFillerObjectAt(from_top, remaining_in_page);
CreateFillerObjectAt(from_top, remaining_in_page,
ClearRecordedSlots::kNo);
}
}
}
......@@ -1166,7 +1167,8 @@ bool Heap::ReserveSpace(Reservation* reservations) {
// Mark with a free list node, in case we have a GC before
// deserializing.
Address free_space_address = free_space->address();
CreateFillerObjectAt(free_space_address, size);
CreateFillerObjectAt(free_space_address, size,
ClearRecordedSlots::kNo);
DCHECK(space < Serializer::kNumberOfPreallocatedSpaces);
chunk.start = free_space_address;
chunk.end = free_space_address + size;
......@@ -1999,7 +2001,7 @@ int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) {
HeapObject* Heap::PrecedeWithFiller(HeapObject* object, int filler_size) {
CreateFillerObjectAt(object->address(), filler_size);
CreateFillerObjectAt(object->address(), filler_size, ClearRecordedSlots::kNo);
return HeapObject::FromAddress(object->address() + filler_size);
}
......@@ -2015,7 +2017,8 @@ HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size,
filler_size -= pre_filler;
}
if (filler_size)
CreateFillerObjectAt(object->address() + object_size, filler_size);
CreateFillerObjectAt(object->address() + object_size, filler_size,
ClearRecordedSlots::kNo);
return object;
}
......@@ -2133,7 +2136,7 @@ AllocationResult Heap::AllocateFillerObject(int size, bool double_align,
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
DCHECK(chunk->owner()->identity() == space);
#endif
CreateFillerObjectAt(obj->address(), size);
CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
return obj;
}
......@@ -3042,8 +3045,8 @@ AllocationResult Heap::AllocateBytecodeArray(int length,
return result;
}
void Heap::CreateFillerObjectAt(Address addr, int size) {
void Heap::CreateFillerObjectAt(Address addr, int size,
ClearRecordedSlots mode) {
if (size == 0) return;
HeapObject* filler = HeapObject::FromAddress(addr);
if (size == kPointerSize) {
......@@ -3058,6 +3061,9 @@ void Heap::CreateFillerObjectAt(Address addr, int size) {
reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex)));
FreeSpace::cast(filler)->nobarrier_set_size(size);
}
if (mode == ClearRecordedSlots::kYes) {
ClearRecordedSlotRange(addr, addr + size);
}
// At this point, we may be deserializing the heap from a snapshot, and
// none of the maps have been created yet and are NULL.
DCHECK((filler->map() == NULL && !deserialization_complete_) ||
......@@ -3131,7 +3137,8 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
// Technically in new space this write might be omitted (except for
// debug mode which iterates through the heap), but to play safer
// we still do it.
CreateFillerObjectAt(object->address(), bytes_to_trim);
CreateFillerObjectAt(object->address(), bytes_to_trim,
ClearRecordedSlots::kYes);
// Initialize header of the trimmed array. Since left trimming is only
// performed on pages which are not concurrently swept creating a filler
......@@ -3146,11 +3153,6 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
// Maintain consistency of live bytes during incremental marking
Marking::TransferMark(this, object->address(), new_start);
if (mark_compact_collector()->sweeping_in_progress()) {
// Array trimming during sweeping can add invalid slots in free list.
ClearRecordedSlotRange(object, former_start,
HeapObject::RawField(new_object, 0));
}
AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER);
// Notify the heap profiler of change in object layout.
......@@ -3210,12 +3212,7 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) {
// TODO(hpayer): We should shrink the large object page if the size
// of the object changed significantly.
if (!lo_space()->Contains(object)) {
CreateFillerObjectAt(new_end, bytes_to_trim);
if (mark_compact_collector()->sweeping_in_progress()) {
// Array trimming during sweeping can add invalid slots in free list.
ClearRecordedSlotRange(object, reinterpret_cast<Object**>(new_end),
reinterpret_cast<Object**>(old_end));
}
CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes);
}
// Initialize header of the trimmed array. We are storing the new length
......@@ -3319,7 +3316,8 @@ AllocationResult Heap::AllocateCode(int object_size, bool immovable) {
MemoryChunk::FromAddress(address)->owner()->identity() != LO_SPACE) {
// Discard the first code allocation, which was on a page where it could
// be moved.
CreateFillerObjectAt(result->address(), object_size);
CreateFillerObjectAt(result->address(), object_size,
ClearRecordedSlots::kNo);
allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
if (!allocation.To(&result)) return allocation;
OnAllocationEvent(result, object_size);
......@@ -5581,15 +5579,12 @@ void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) {
}
}
void Heap::ClearRecordedSlotRange(HeapObject* object, Object** start,
Object** end) {
if (!InNewSpace(object)) {
void Heap::ClearRecordedSlotRange(Address start, Address end) {
Page* page = Page::FromAddress(start);
if (!page->InNewSpace()) {
store_buffer()->MoveEntriesToRememberedSet();
Address start_addr = reinterpret_cast<Address>(start);
Address end_addr = reinterpret_cast<Address>(end);
Page* page = Page::FromAddress(start_addr);
DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
RememberedSet<OLD_TO_NEW>::RemoveRange(page, start_addr, end_addr);
RememberedSet<OLD_TO_NEW>::RemoveRange(page, start, end);
}
}
......
......@@ -403,6 +403,7 @@ enum ArrayStorageAllocationMode {
INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
};
enum class ClearRecordedSlots { kYes, kNo };
class Heap {
public:
......@@ -632,8 +633,10 @@ class Heap {
void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
// Initialize a filler object to keep the ability to iterate over the heap
// when introducing gaps within pages.
void CreateFillerObjectAt(Address addr, int size);
// when introducing gaps within pages. If slots could have been recorded in
// the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
// pass ClearRecordedSlots::kNo.
void CreateFillerObjectAt(Address addr, int size, ClearRecordedSlots mode);
bool CanMoveObjectStart(HeapObject* object);
......@@ -1060,7 +1063,7 @@ class Heap {
}
void ClearRecordedSlot(HeapObject* object, Object** slot);
void ClearRecordedSlotRange(HeapObject* object, Object** start, Object** end);
void ClearRecordedSlotRange(Address start, Address end);
// ===========================================================================
// Incremental marking API. ==================================================
......
......@@ -1432,7 +1432,8 @@ void LocalAllocationBuffer::Close() {
if (IsValid()) {
heap_->CreateFillerObjectAt(
allocation_info_.top(),
static_cast<int>(allocation_info_.limit() - allocation_info_.top()));
static_cast<int>(allocation_info_.limit() - allocation_info_.top()),
ClearRecordedSlots::kNo);
}
}
......@@ -1443,7 +1444,8 @@ LocalAllocationBuffer::LocalAllocationBuffer(Heap* heap,
if (IsValid()) {
heap_->CreateFillerObjectAt(
allocation_info_.top(),
static_cast<int>(allocation_info_.limit() - allocation_info_.top()));
static_cast<int>(allocation_info_.limit() - allocation_info_.top()),
ClearRecordedSlots::kNo);
}
}
......@@ -1526,7 +1528,7 @@ bool NewSpace::AddFreshPage() {
}
int remaining_in_page = static_cast<int>(limit - top);
heap()->CreateFillerObjectAt(top, remaining_in_page);
heap()->CreateFillerObjectAt(top, remaining_in_page, ClearRecordedSlots::kNo);
pages_used_++;
UpdateAllocationInfo();
......@@ -2362,7 +2364,8 @@ void FreeList::Reset() {
int FreeList::Free(Address start, int size_in_bytes) {
if (size_in_bytes == 0) return 0;
owner()->heap()->CreateFillerObjectAt(start, size_in_bytes);
owner()->heap()->CreateFillerObjectAt(start, size_in_bytes,
ClearRecordedSlots::kNo);
Page* page = Page::FromAddress(start);
......@@ -2660,7 +2663,7 @@ void PagedSpace::RepairFreeListsAfterDeserialization() {
int size = static_cast<int>(page->wasted_memory());
if (size == 0) continue;
Address address = page->OffsetToAddress(Page::kPageSize - size);
heap()->CreateFillerObjectAt(address, size);
heap()->CreateFillerObjectAt(address, size, ClearRecordedSlots::kNo);
}
}
......@@ -2672,7 +2675,8 @@ void PagedSpace::EvictEvacuationCandidatesFromLinearAllocationArea() {
// Create filler object to keep page iterable if it was iterable.
int remaining =
static_cast<int>(allocation_info_.limit() - allocation_info_.top());
heap()->CreateFillerObjectAt(allocation_info_.top(), remaining);
heap()->CreateFillerObjectAt(allocation_info_.top(), remaining,
ClearRecordedSlots::kNo);
allocation_info_.Reset(nullptr, nullptr);
}
}
......
......@@ -1738,7 +1738,8 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
// Byte size of the external String object.
int new_size = this->SizeFromMap(new_map);
heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
heap->CreateFillerObjectAt(this->address() + new_size, size - new_size,
ClearRecordedSlots::kNo);
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
......@@ -1799,7 +1800,8 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
// Byte size of the external String object.
int new_size = this->SizeFromMap(new_map);
heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
heap->CreateFillerObjectAt(this->address() + new_size, size - new_size,
ClearRecordedSlots::kNo);
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
......@@ -2943,8 +2945,8 @@ void MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
if (instance_size_delta > 0) {
Address address = object->address();
heap->CreateFillerObjectAt(
address + new_instance_size, instance_size_delta);
heap->CreateFillerObjectAt(address + new_instance_size, instance_size_delta,
ClearRecordedSlots::kYes);
heap->AdjustLiveBytes(*object, -instance_size_delta,
Heap::CONCURRENT_TO_SWEEPER);
}
......@@ -3039,7 +3041,7 @@ void MigrateFastToSlow(Handle<JSObject> object, Handle<Map> new_map,
if (instance_size_delta > 0) {
Heap* heap = isolate->heap();
heap->CreateFillerObjectAt(object->address() + new_instance_size,
instance_size_delta);
instance_size_delta, ClearRecordedSlots::kYes);
heap->AdjustLiveBytes(*object, -instance_size_delta,
Heap::CONCURRENT_TO_SWEEPER);
}
......@@ -12115,7 +12117,8 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
Heap* heap = string->GetHeap();
// Sizes are pointer size aligned, so that we can use filler objects
// that are a multiple of pointer size.
heap->CreateFillerObjectAt(start_of_string + new_size, delta);
heap->CreateFillerObjectAt(start_of_string + new_size, delta,
ClearRecordedSlots::kNo);
heap->AdjustLiveBytes(*string, -delta, Heap::CONCURRENT_TO_SWEEPER);
// We are storing the new length using release store after creating a filler
......
......@@ -226,7 +226,7 @@ void AllocationTracker::AllocationEvent(Address addr, int size) {
// Mark the new block as FreeSpace to make sure the heap is iterable
// while we are capturing stack trace.
heap->CreateFillerObjectAt(addr, size);
heap->CreateFillerObjectAt(addr, size, ClearRecordedSlots::kNo);
Isolate* isolate = heap->isolate();
int length = 0;
......
......@@ -99,7 +99,8 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
// Mark the new block as FreeSpace to make sure the heap is iterable while we
// are taking the sample.
heap()->CreateFillerObjectAt(soon_object, static_cast<int>(size));
heap()->CreateFillerObjectAt(soon_object, static_cast<int>(size),
ClearRecordedSlots::kNo);
Local<v8::Value> loc = v8::Utils::ToLocal(obj);
......
......@@ -642,7 +642,7 @@ MUST_USE_RESULT static Object* StringReplaceGlobalRegExpWithEmptyString(
// TODO(hpayer): We should shrink the large object page if the size
// of the object changed significantly.
if (!heap->lo_space()->Contains(*answer)) {
heap->CreateFillerObjectAt(end_of_string, delta);
heap->CreateFillerObjectAt(end_of_string, delta, ClearRecordedSlots::kNo);
}
heap->AdjustLiveBytes(*answer, -delta, Heap::CONCURRENT_TO_SWEEPER);
return *answer;
......
......@@ -2068,7 +2068,7 @@ static HeapObject* NewSpaceAllocateAligned(int size,
heap->new_space()->AllocateRawAligned(size, alignment);
HeapObject* obj = NULL;
allocation.To(&obj);
heap->CreateFillerObjectAt(obj->address(), size);
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
return obj;
}
......@@ -2171,7 +2171,7 @@ static HeapObject* OldSpaceAllocateAligned(int size,
heap->old_space()->AllocateRawAligned(size, alignment);
HeapObject* obj = NULL;
allocation.To(&obj);
heap->CreateFillerObjectAt(obj->address(), size);
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
return obj;
}
......@@ -4285,8 +4285,9 @@ TEST(Regress169928) {
AllocationMemento::kSize + kPointerSize);
CHECK(allocation.To(&obj));
Address addr_obj = obj->address();
CcTest::heap()->CreateFillerObjectAt(
addr_obj, AllocationMemento::kSize + kPointerSize);
CcTest::heap()->CreateFillerObjectAt(addr_obj,
AllocationMemento::kSize + kPointerSize,
ClearRecordedSlots::kNo);
// Give the array a name, making sure not to allocate strings.
v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
......
......@@ -46,7 +46,8 @@ static bool AllocateFromLab(Heap* heap, LocalAllocationBuffer* lab,
AllocationResult result =
lab->AllocateRawAligned(static_cast<int>(size_in_bytes), alignment);
if (result.To(&obj)) {
heap->CreateFillerObjectAt(obj->address(), static_cast<int>(size_in_bytes));
heap->CreateFillerObjectAt(obj->address(), static_cast<int>(size_in_bytes),
ClearRecordedSlots::kNo);
return true;
}
return false;
......
......@@ -519,7 +519,8 @@ static HeapObject* AllocateUnaligned(NewSpace* space, int size) {
CHECK(!allocation.IsRetry());
HeapObject* filler = NULL;
CHECK(allocation.To(&filler));
space->heap()->CreateFillerObjectAt(filler->address(), size);
space->heap()->CreateFillerObjectAt(filler->address(), size,
ClearRecordedSlots::kNo);
return filler;
}
......@@ -528,7 +529,8 @@ static HeapObject* AllocateUnaligned(PagedSpace* space, int size) {
CHECK(!allocation.IsRetry());
HeapObject* filler = NULL;
CHECK(allocation.To(&filler));
space->heap()->CreateFillerObjectAt(filler->address(), size);
space->heap()->CreateFillerObjectAt(filler->address(), size,
ClearRecordedSlots::kNo);
return filler;
}
......
......@@ -49,7 +49,7 @@ static inline std::vector<Handle<FixedArray>> CreatePadding(
if (length <= 0) {
// Not enough room to create another fixed array. Let's create a filler.
heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(),
free_memory);
free_memory, ClearRecordedSlots::kNo);
break;
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment