Commit 3e924dd0 authored by hpayer's avatar hpayer Committed by Commit bot

Delete dead store buffer verification code and code that changes the store buffer in debug mode.

BUG=

Review URL: https://codereview.chromium.org/1009323002

Cr-Commit-Position: refs/heads/master@{#27214}
parent 2ffc970b
...@@ -1575,10 +1575,6 @@ void Heap::Scavenge() { ...@@ -1575,10 +1575,6 @@ void Heap::Scavenge() {
Address new_space_front = new_space_.ToSpaceStart(); Address new_space_front = new_space_.ToSpaceStart();
promotion_queue_.Initialize(); promotion_queue_.Initialize();
#ifdef DEBUG
store_buffer()->Clean();
#endif
ScavengeVisitor scavenge_visitor(this); ScavengeVisitor scavenge_visitor(this);
// Copy roots. // Copy roots.
IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
...@@ -4961,143 +4957,6 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start, Address end, ...@@ -4961,143 +4957,6 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start, Address end,
} }
#ifdef DEBUG
typedef bool (*CheckStoreBufferFilter)(Object** addr);
bool IsAMapPointerAddress(Object** addr) {
uintptr_t a = reinterpret_cast<uintptr_t>(addr);
int mod = a % Map::kSize;
return mod >= Map::kPointerFieldsBeginOffset &&
mod < Map::kPointerFieldsEndOffset;
}
bool EverythingsAPointer(Object** addr) { return true; }
static void CheckStoreBuffer(Heap* heap, Object** current, Object** limit,
Object**** store_buffer_position,
Object*** store_buffer_top,
CheckStoreBufferFilter filter,
Address special_garbage_start,
Address special_garbage_end) {
Map* free_space_map = heap->free_space_map();
for (; current < limit; current++) {
Object* o = *current;
Address current_address = reinterpret_cast<Address>(current);
// Skip free space.
if (o == free_space_map) {
Address current_address = reinterpret_cast<Address>(current);
FreeSpace* free_space =
FreeSpace::cast(HeapObject::FromAddress(current_address));
int skip = free_space->Size();
DCHECK(current_address + skip <= reinterpret_cast<Address>(limit));
DCHECK(skip > 0);
current_address += skip - kPointerSize;
current = reinterpret_cast<Object**>(current_address);
continue;
}
// Skip the current linear allocation space between top and limit which is
// unmarked with the free space map, but can contain junk.
if (current_address == special_garbage_start &&
special_garbage_end != special_garbage_start) {
current_address = special_garbage_end - kPointerSize;
current = reinterpret_cast<Object**>(current_address);
continue;
}
if (!(*filter)(current)) continue;
DCHECK(current_address < special_garbage_start ||
current_address >= special_garbage_end);
DCHECK(reinterpret_cast<uintptr_t>(o) != kFreeListZapValue);
// We have to check that the pointer does not point into new space
// without trying to cast it to a heap object since the hash field of
// a string can contain values like 1 and 3 which are tagged null
// pointers.
if (!heap->InNewSpace(o)) continue;
while (**store_buffer_position < current &&
*store_buffer_position < store_buffer_top) {
(*store_buffer_position)++;
}
if (**store_buffer_position != current ||
*store_buffer_position == store_buffer_top) {
Object** obj_start = current;
while (!(*obj_start)->IsMap()) obj_start--;
UNREACHABLE();
}
}
}
// Check that the store buffer contains all intergenerational pointers by
// scanning a page and ensuring that all pointers to young space are in the
// store buffer.
void Heap::OldPointerSpaceCheckStoreBuffer() {
OldSpace* space = old_pointer_space();
PageIterator pages(space);
store_buffer()->SortUniq();
while (pages.has_next()) {
Page* page = pages.next();
Object** current = reinterpret_cast<Object**>(page->area_start());
Address end = page->area_end();
Object*** store_buffer_position = store_buffer()->Start();
Object*** store_buffer_top = store_buffer()->Top();
Object** limit = reinterpret_cast<Object**>(end);
CheckStoreBuffer(this, current, limit, &store_buffer_position,
store_buffer_top, &EverythingsAPointer, space->top(),
space->limit());
}
}
void Heap::MapSpaceCheckStoreBuffer() {
MapSpace* space = map_space();
PageIterator pages(space);
store_buffer()->SortUniq();
while (pages.has_next()) {
Page* page = pages.next();
Object** current = reinterpret_cast<Object**>(page->area_start());
Address end = page->area_end();
Object*** store_buffer_position = store_buffer()->Start();
Object*** store_buffer_top = store_buffer()->Top();
Object** limit = reinterpret_cast<Object**>(end);
CheckStoreBuffer(this, current, limit, &store_buffer_position,
store_buffer_top, &IsAMapPointerAddress, space->top(),
space->limit());
}
}
void Heap::LargeObjectSpaceCheckStoreBuffer() {
LargeObjectIterator it(lo_space());
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
// We only have code, sequential strings, or fixed arrays in large
// object space, and only fixed arrays can possibly contain pointers to
// the young generation.
if (object->IsFixedArray()) {
Object*** store_buffer_position = store_buffer()->Start();
Object*** store_buffer_top = store_buffer()->Top();
Object** current = reinterpret_cast<Object**>(object->address());
Object** limit =
reinterpret_cast<Object**>(object->address() + object->Size());
CheckStoreBuffer(this, current, limit, &store_buffer_position,
store_buffer_top, &EverythingsAPointer, NULL, NULL);
}
}
}
#endif
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
IterateStrongRoots(v, mode); IterateStrongRoots(v, mode);
IterateWeakRoots(v, mode); IterateWeakRoots(v, mode);
......
...@@ -995,10 +995,6 @@ class Heap { ...@@ -995,10 +995,6 @@ class Heap {
void Print(); void Print();
void PrintHandles(); void PrintHandles();
void OldPointerSpaceCheckStoreBuffer();
void MapSpaceCheckStoreBuffer();
void LargeObjectSpaceCheckStoreBuffer();
// Report heap statistics. // Report heap statistics.
void ReportHeapStatistics(const char* title); void ReportHeapStatistics(const char* title);
void ReportCodeStatistics(const char* title); void ReportCodeStatistics(const char* title);
......
...@@ -108,26 +108,6 @@ void StoreBuffer::StoreBufferOverflow(Isolate* isolate) { ...@@ -108,26 +108,6 @@ void StoreBuffer::StoreBufferOverflow(Isolate* isolate) {
} }
void StoreBuffer::Uniq() {
// Remove adjacent duplicates and cells that do not point at new space.
Address previous = NULL;
Address* write = old_start_;
DCHECK(may_move_store_buffer_entries_);
for (Address* read = old_start_; read < old_top_; read++) {
Address current = *read;
if (current != previous) {
Object* object = reinterpret_cast<Object*>(
base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(current)));
if (heap_->InNewSpace(object)) {
*write++ = current;
}
}
previous = current;
}
old_top_ = write;
}
bool StoreBuffer::SpaceAvailable(intptr_t space_needed) { bool StoreBuffer::SpaceAvailable(intptr_t space_needed) {
return old_limit_ - old_top_ >= space_needed; return old_limit_ - old_top_ >= space_needed;
} }
...@@ -247,20 +227,6 @@ void StoreBuffer::Filter(int flag) { ...@@ -247,20 +227,6 @@ void StoreBuffer::Filter(int flag) {
} }
void StoreBuffer::SortUniq() {
Compact();
if (old_buffer_is_sorted_) return;
std::sort(old_start_, old_top_);
Uniq();
old_buffer_is_sorted_ = true;
// Filtering hash sets are inconsistent with the store buffer after this
// operation.
ClearFilteringHashSets();
}
bool StoreBuffer::PrepareForIteration() { bool StoreBuffer::PrepareForIteration() {
Compact(); Compact();
PointerChunkIterator it(heap_); PointerChunkIterator it(heap_);
...@@ -285,41 +251,6 @@ bool StoreBuffer::PrepareForIteration() { ...@@ -285,41 +251,6 @@ bool StoreBuffer::PrepareForIteration() {
} }
#ifdef DEBUG
void StoreBuffer::Clean() {
ClearFilteringHashSets();
Uniq(); // Also removes things that no longer point to new space.
EnsureSpace(kStoreBufferSize / 2);
}
static Address* in_store_buffer_1_element_cache = NULL;
bool StoreBuffer::CellIsInStoreBuffer(Address cell_address) {
if (!FLAG_enable_slow_asserts) return true;
if (in_store_buffer_1_element_cache != NULL &&
*in_store_buffer_1_element_cache == cell_address) {
return true;
}
Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
for (Address* current = top - 1; current >= start_; current--) {
if (*current == cell_address) {
in_store_buffer_1_element_cache = current;
return true;
}
}
for (Address* current = old_top_ - 1; current >= old_start_; current--) {
if (*current == cell_address) {
in_store_buffer_1_element_cache = current;
return true;
}
}
return false;
}
#endif
void StoreBuffer::ClearFilteringHashSets() { void StoreBuffer::ClearFilteringHashSets() {
if (!hash_sets_are_empty_) { if (!hash_sets_are_empty_) {
memset(reinterpret_cast<void*>(hash_set_1_), 0, memset(reinterpret_cast<void*>(hash_set_1_), 0,
......
...@@ -83,21 +83,11 @@ class StoreBuffer { ...@@ -83,21 +83,11 @@ class StoreBuffer {
bool old_buffer_is_sorted() { return old_buffer_is_sorted_; } bool old_buffer_is_sorted() { return old_buffer_is_sorted_; }
bool old_buffer_is_filtered() { return old_buffer_is_filtered_; } bool old_buffer_is_filtered() { return old_buffer_is_filtered_; }
// Goes through the store buffer removing pointers to things that have
// been promoted. Rebuilds the store buffer completely if it overflowed.
void SortUniq();
void EnsureSpace(intptr_t space_needed); void EnsureSpace(intptr_t space_needed);
void Verify(); void Verify();
bool PrepareForIteration(); bool PrepareForIteration();
#ifdef DEBUG
void Clean();
// Slow, for asserts only.
bool CellIsInStoreBuffer(Address cell);
#endif
void Filter(int flag); void Filter(int flag);
// Eliminates all stale store buffer entries from the store buffer, i.e., // Eliminates all stale store buffer entries from the store buffer, i.e.,
...@@ -144,7 +134,6 @@ class StoreBuffer { ...@@ -144,7 +134,6 @@ class StoreBuffer {
void ClearFilteringHashSets(); void ClearFilteringHashSets();
bool SpaceAvailable(intptr_t space_needed); bool SpaceAvailable(intptr_t space_needed);
void Uniq();
void ExemptPopularPages(int prime_sample_step, int threshold); void ExemptPopularPages(int prime_sample_step, int threshold);
void ProcessOldToNewSlot(Address slot_address, void ProcessOldToNewSlot(Address slot_address,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment