Commit 3f9d9ab8 authored by hpayer@chromium.org's avatar hpayer@chromium.org

Ignore slots buffer overflow when recording entries of the allocation sites scratchpad.

BUG=
R=mstarzinger@chromium.org, ulan@chromium.org

Review URL: https://codereview.chromium.org/181063033

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19677 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 52fd520c
......@@ -490,7 +490,8 @@ void Heap::ScavengePointer(HeapObject** p) {
}
void Heap::UpdateAllocationSiteFeedback(HeapObject* object) {
void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
ScratchpadSlotMode mode) {
Heap* heap = object->GetHeap();
ASSERT(heap->InFromSpace(object));
......@@ -518,7 +519,7 @@ void Heap::UpdateAllocationSiteFeedback(HeapObject* object) {
if (!memento->IsValid()) return;
if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite());
heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
}
}
......@@ -541,7 +542,7 @@ void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
return;
}
UpdateAllocationSiteFeedback(object);
UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
// AllocationMementos are unrooted and shouldn't survive a scavenge
ASSERT(object->map() != object->GetHeap()->allocation_memento_map());
......
......@@ -3654,7 +3654,8 @@ void Heap::InitializeAllocationSitesScratchpad() {
}
void Heap::AddAllocationSiteToScratchpad(AllocationSite* site) {
void Heap::AddAllocationSiteToScratchpad(AllocationSite* site,
ScratchpadSlotMode mode) {
if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) {
// We cannot use the normal write-barrier because slots need to be
// recorded with non-incremental marking as well. We have to explicitly
......@@ -3663,7 +3664,15 @@ void Heap::AddAllocationSiteToScratchpad(AllocationSite* site) {
allocation_sites_scratchpad_length_, site, SKIP_WRITE_BARRIER);
Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt(
allocation_sites_scratchpad_length_);
mark_compact_collector()->RecordSlot(slot, slot, *slot);
if (mode == RECORD_SCRATCHPAD_SLOT) {
// We need to allow slots buffer overflow here since the evacuation
// candidates are not part of the global list of old space pages and
// releasing an evacuation candidate due to a slots buffer overflow
// results in lost pages.
mark_compact_collector()->RecordSlot(
slot, slot, *slot, SlotsBuffer::IGNORE_OVERFLOW);
}
allocation_sites_scratchpad_length_++;
}
}
......
......@@ -1504,10 +1504,16 @@ class Heap {
static inline void ScavengePointer(HeapObject** p);
static inline void ScavengeObject(HeapObject** p, HeapObject* object);
enum ScratchpadSlotMode {
IGNORE_SCRATCHPAD_SLOT,
RECORD_SCRATCHPAD_SLOT
};
// An object may have an AllocationSite associated with it through a trailing
// AllocationMemento. Its feedback should be updated when objects are found
// in the heap.
static inline void UpdateAllocationSiteFeedback(HeapObject* object);
static inline void UpdateAllocationSiteFeedback(
HeapObject* object, ScratchpadSlotMode mode);
// Support for partial snapshots. After calling this we have a linear
// space to write objects in each space.
......@@ -2312,7 +2318,8 @@ class Heap {
void InitializeAllocationSitesScratchpad();
// Adds an allocation site to the scratchpad if there is space left.
void AddAllocationSiteToScratchpad(AllocationSite* site);
void AddAllocationSiteToScratchpad(AllocationSite* site,
ScratchpadSlotMode mode);
void UpdateSurvivalRateTrend(int start_new_space_size);
......
......@@ -81,14 +81,15 @@ bool MarkCompactCollector::IsMarked(Object* obj) {
void MarkCompactCollector::RecordSlot(Object** anchor_slot,
Object** slot,
Object* object) {
Object* object,
SlotsBuffer::AdditionMode mode) {
Page* object_page = Page::FromAddress(reinterpret_cast<Address>(object));
if (object_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(anchor_slot)) {
if (!SlotsBuffer::AddTo(&slots_buffer_allocator_,
object_page->slots_buffer_address(),
slot,
SlotsBuffer::FAIL_ON_OVERFLOW)) {
mode)) {
EvictEvacuationCandidate(object_page);
}
}
......
......@@ -2053,7 +2053,7 @@ int MarkCompactCollector::DiscoverAndPromoteBlackObjectsOnPage(
int size = object->Size();
survivors_size += size;
Heap::UpdateAllocationSiteFeedback(object);
Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT);
offset++;
current_cell >>= 1;
......
......@@ -690,7 +690,11 @@ class MarkCompactCollector {
void RecordCodeEntrySlot(Address slot, Code* target);
void RecordCodeTargetPatch(Address pc, Code* target);
INLINE(void RecordSlot(Object** anchor_slot, Object** slot, Object* object));
INLINE(void RecordSlot(Object** anchor_slot,
Object** slot,
Object* object,
SlotsBuffer::AdditionMode mode =
SlotsBuffer::FAIL_ON_OVERFLOW));
void MigrateObject(Address dst,
Address src,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment