Commit f6fef241 authored by erik.corry@gmail.com's avatar erik.corry@gmail.com

Move some heap verification code in under the --verify-heap flag to speed

up debug mode tests.
Review URL: http://codereview.chromium.org/8381040

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9774 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent abeb5a4d
......@@ -590,7 +590,9 @@ void ExternalStringTable::AddOldString(String* string) {
void ExternalStringTable::ShrinkNewStrings(int position) {
new_space_strings_.Rewind(position);
if (FLAG_verify_heap) {
Verify();
}
}
......
......@@ -693,7 +693,9 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
PROFILE(isolate_, CodeMovingGCEvent());
}
if (FLAG_verify_heap) {
VerifySymbolTable();
}
if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
ASSERT(!allocation_allowed_);
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
......@@ -789,7 +791,9 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
global_gc_epilogue_callback_();
}
if (FLAG_verify_heap) {
VerifySymbolTable();
}
return next_gc_likely_to_collect_more;
}
......@@ -983,7 +987,7 @@ void StoreBufferRebuilder::Callback(MemoryChunk* page, StoreBufferEvent event) {
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
#endif
gc_state_ = SCAVENGE;
......@@ -1112,7 +1116,9 @@ String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
void Heap::UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func) {
if (FLAG_verify_heap) {
external_string_table_.Verify();
}
if (external_string_table_.new_space_strings_.is_empty()) return;
......@@ -2910,7 +2916,9 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
ASSERT(buffer->IsFlat());
#if DEBUG
if (FLAG_verify_heap) {
buffer->StringVerify();
}
#endif
Object* result;
......@@ -3156,7 +3164,9 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
code->CopyFrom(desc);
#ifdef DEBUG
if (FLAG_verify_heap) {
code->Verify();
}
#endif
return code;
}
......@@ -3236,7 +3246,9 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
new_code->Relocate(new_addr - old_addr);
#ifdef DEBUG
if (FLAG_verify_heap) {
code->Verify();
}
#endif
return new_code;
}
......@@ -6345,7 +6357,9 @@ void ExternalStringTable::CleanUp() {
old_space_strings_[last++] = old_space_strings_[i];
}
old_space_strings_.Rewind(last);
if (FLAG_verify_heap) {
Verify();
}
}
......
......@@ -473,7 +473,7 @@ void IncrementalMarking::StartMarking(CompactionFlag flag) {
#ifdef DEBUG
// Marking bits are cleared by the sweeper.
if (FLAG_enable_slow_asserts) {
if (FLAG_verify_heap) {
heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
}
#endif
......
......@@ -765,7 +765,10 @@ bool Object::IsJSFunctionResultCache() {
return false;
}
#ifdef DEBUG
reinterpret_cast<JSFunctionResultCache*>(this)->JSFunctionResultCacheVerify();
if (FLAG_verify_heap) {
reinterpret_cast<JSFunctionResultCache*>(this)->
JSFunctionResultCacheVerify();
}
#endif
return true;
}
......@@ -777,7 +780,9 @@ bool Object::IsNormalizedMapCache() {
return false;
}
#ifdef DEBUG
if (FLAG_verify_heap) {
reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
}
#endif
return true;
}
......
......@@ -3231,7 +3231,9 @@ MaybeObject* NormalizedMapCache::Get(JSObject* obj,
if (result->IsMap() &&
Map::cast(result)->EquivalentToForNormalization(fast, mode)) {
#ifdef DEBUG
if (FLAG_verify_heap) {
Map::cast(result)->SharedMapVerify();
}
if (FLAG_enable_slow_asserts) {
// The cached map should match newly created normalized map bit-by-bit.
Object* fresh;
......@@ -4727,7 +4729,7 @@ MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode,
Map::cast(result)->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
#ifdef DEBUG
if (Map::cast(result)->is_shared()) {
if (FLAG_verify_heap && Map::cast(result)->is_shared()) {
Map::cast(result)->SharedMapVerify();
}
#endif
......
......@@ -13199,7 +13199,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
}
#ifdef DEBUG
if (FLAG_verify_heap) {
cache_handle->JSFunctionResultCacheVerify();
}
#endif
// Function invocation may have cleared the cache. Reread all the data.
......@@ -13228,7 +13230,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
cache_handle->set_finger_index(index);
#ifdef DEBUG
if (FLAG_verify_heap) {
cache_handle->JSFunctionResultCacheVerify();
}
#endif
return *value;
......
......@@ -95,10 +95,6 @@ void HeapObjectIterator::Initialize(PagedSpace* space,
cur_end_ = end;
page_mode_ = mode;
size_func_ = size_f;
#ifdef DEBUG
Verify();
#endif
}
......@@ -123,13 +119,6 @@ bool HeapObjectIterator::AdvanceToNextPage() {
}
#ifdef DEBUG
void HeapObjectIterator::Verify() {
// TODO(gc): We should do something here.
}
#endif
// -----------------------------------------------------------------------------
// CodeRange
......
......@@ -1132,11 +1132,6 @@ class HeapObjectIterator: public ObjectIterator {
Address end,
PageMode mode,
HeapObjectCallback size_func);
#ifdef DEBUG
// Verifies whether fields have valid values.
void Verify();
#endif
};
......
......@@ -390,20 +390,20 @@ void StoreBuffer::VerifyPointers(LargeObjectSpace* space) {
void StoreBuffer::Verify() {
#ifdef DEBUG
if (FLAG_enable_slow_asserts || FLAG_verify_heap) {
VerifyPointers(heap_->old_pointer_space(),
&StoreBuffer::FindPointersToNewSpaceInRegion);
VerifyPointers(heap_->map_space(),
&StoreBuffer::FindPointersToNewSpaceInMapsRegion);
VerifyPointers(heap_->lo_space());
}
#endif
}
void StoreBuffer::GCEpilogue() {
during_gc_ = false;
if (FLAG_verify_heap) {
Verify();
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment