Commit 6e1bafc8 authored by verwaest@chromium.org's avatar verwaest@chromium.org

Revert "Allow partial scanning of large arrays in order to avoid"

This reverts commit r12619.

BUG=

Review URL: https://chromiumcodereview.appspot.com/11029023

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12660 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent a92a9c8a
...@@ -1359,12 +1359,11 @@ void Heap::UpdateNewSpaceReferencesInExternalStringTable( ...@@ -1359,12 +1359,11 @@ void Heap::UpdateNewSpaceReferencesInExternalStringTable(
if (external_string_table_.new_space_strings_.is_empty()) return; if (external_string_table_.new_space_strings_.is_empty()) return;
Object** start_slot = &external_string_table_.new_space_strings_[0]; Object** start = &external_string_table_.new_space_strings_[0];
Object** end_slot = Object** end = start + external_string_table_.new_space_strings_.length();
start_slot + external_string_table_.new_space_strings_.length(); Object** last = start;
Object** last = start_slot;
for (Object** p = start_slot; p < end_slot; ++p) { for (Object** p = start; p < end; ++p) {
ASSERT(InFromSpace(*p)); ASSERT(InFromSpace(*p));
String* target = updater_func(this, p); String* target = updater_func(this, p);
...@@ -1382,8 +1381,8 @@ void Heap::UpdateNewSpaceReferencesInExternalStringTable( ...@@ -1382,8 +1381,8 @@ void Heap::UpdateNewSpaceReferencesInExternalStringTable(
} }
} }
ASSERT(last <= end_slot); ASSERT(last <= end);
external_string_table_.ShrinkNewStrings(static_cast<int>(last - start_slot)); external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
} }
...@@ -1392,10 +1391,9 @@ void Heap::UpdateReferencesInExternalStringTable( ...@@ -1392,10 +1391,9 @@ void Heap::UpdateReferencesInExternalStringTable(
// Update old space string references. // Update old space string references.
if (external_string_table_.old_space_strings_.length() > 0) { if (external_string_table_.old_space_strings_.length() > 0) {
Object** start_slot = &external_string_table_.old_space_strings_[0]; Object** start = &external_string_table_.old_space_strings_[0];
Object** end_slot = Object** end = start + external_string_table_.old_space_strings_.length();
start_slot + external_string_table_.old_space_strings_.length(); for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
for (Object** p = start_slot; p < end_slot; ++p) *p = updater_func(this, p);
} }
UpdateNewSpaceReferencesInExternalStringTable(updater_func); UpdateNewSpaceReferencesInExternalStringTable(updater_func);
...@@ -6792,11 +6790,11 @@ void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) { ...@@ -6792,11 +6790,11 @@ void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) {
// Scan the object body. // Scan the object body.
if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) { if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
// This is specialized to scan Context's properly. // This is specialized to scan Context's properly.
Object** start_slot = reinterpret_cast<Object**>(obj->address() + Object** start = reinterpret_cast<Object**>(obj->address() +
Context::kHeaderSize); Context::kHeaderSize);
Object** end_slot = reinterpret_cast<Object**>(obj->address() + Object** end = reinterpret_cast<Object**>(obj->address() +
Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize); Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize);
mark_visitor->VisitPointers(start_slot, end_slot); mark_visitor->VisitPointers(start, end);
} else { } else {
obj->IterateBody(map_p->instance_type(), obj->IterateBody(map_p->instance_type(),
obj->SizeFromMap(map_p), obj->SizeFromMap(map_p),
......
...@@ -190,11 +190,8 @@ class IncrementalMarkingMarkingVisitor ...@@ -190,11 +190,8 @@ class IncrementalMarkingMarkingVisitor
static void VisitJSWeakMap(Map* map, HeapObject* object) { static void VisitJSWeakMap(Map* map, HeapObject* object) {
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
Object** start_slot =
HeapObject::RawField(object, JSWeakMap::kPropertiesOffset);
VisitPointers(heap, VisitPointers(heap,
start_slot, HeapObject::RawField(object, JSWeakMap::kPropertiesOffset),
start_slot,
HeapObject::RawField(object, JSWeakMap::kSize)); HeapObject::RawField(object, JSWeakMap::kSize));
} }
...@@ -209,54 +206,15 @@ class IncrementalMarkingMarkingVisitor ...@@ -209,54 +206,15 @@ class IncrementalMarkingMarkingVisitor
void>::Visit(map, object); void>::Visit(map, object);
} }
static const int kScanningChunk = 32 * 1024;
static int VisitHugeArray(FixedArray* array) {
Heap* heap = array->GetHeap();
MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
Object** start_slot = array->data_start();
int length = array->length();
if (chunk->owner()->identity() != LO_SPACE) {
VisitPointers(heap, start_slot, start_slot, start_slot + length);
return length;
}
int from =
chunk->IsPartiallyScanned() ? chunk->PartiallyScannedProgress() : 0;
int to = Min(from + kScanningChunk, length);
VisitPointers(heap, start_slot, start_slot + from, start_slot + to);
if (to == length) {
// If it went from black to grey while it was waiting for the next bit to
// be scanned then we have to start the scan again.
MarkBit mark_bit = Marking::MarkBitFrom(array);
if (!Marking::IsBlack(mark_bit)) {
ASSERT(Marking::IsGrey(mark_bit));
chunk->SetPartiallyScannedProgress(0);
} else {
chunk->SetCompletelyScanned();
}
} else {
chunk->SetPartiallyScannedProgress(to);
}
return to - from;
}
static inline void VisitJSFunction(Map* map, HeapObject* object) { static inline void VisitJSFunction(Map* map, HeapObject* object) {
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
// Iterate over all fields in the body but take care in dealing with // Iterate over all fields in the body but take care in dealing with
// the code entry and skip weak fields. // the code entry and skip weak fields.
Object** start_slot =
HeapObject::RawField(object, JSFunction::kPropertiesOffset);
VisitPointers(heap, VisitPointers(heap,
start_slot, HeapObject::RawField(object, JSFunction::kPropertiesOffset),
start_slot,
HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
VisitPointers(heap, VisitPointers(heap,
start_slot,
HeapObject::RawField(object, HeapObject::RawField(object,
JSFunction::kCodeEntryOffset + kPointerSize), JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(object, HeapObject::RawField(object,
...@@ -271,14 +229,11 @@ class IncrementalMarkingMarkingVisitor ...@@ -271,14 +229,11 @@ class IncrementalMarkingMarkingVisitor
} }
} }
INLINE(static void VisitPointers(Heap* heap, INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
Object** anchor,
Object** start,
Object** end)) {
for (Object** p = start; p < end; p++) { for (Object** p = start; p < end; p++) {
Object* obj = *p; Object* obj = *p;
if (obj->NonFailureIsHeapObject()) { if (obj->NonFailureIsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(anchor, p, obj); heap->mark_compact_collector()->RecordSlot(start, p, obj);
MarkObject(heap, obj); MarkObject(heap, obj);
} }
} }
...@@ -680,8 +635,7 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { ...@@ -680,8 +635,7 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
#ifdef DEBUG #ifdef DEBUG
MarkBit mark_bit = Marking::MarkBitFrom(obj); MarkBit mark_bit = Marking::MarkBitFrom(obj);
ASSERT(Marking::IsGrey(mark_bit) || ASSERT(Marking::IsGrey(mark_bit) ||
(obj->IsFiller() && Marking::IsWhite(mark_bit)) || (obj->IsFiller() && Marking::IsWhite(mark_bit)));
MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
#endif #endif
} }
} }
...@@ -704,7 +658,6 @@ void IncrementalMarking::Hurry() { ...@@ -704,7 +658,6 @@ void IncrementalMarking::Hurry() {
// was stopped. // was stopped.
Map* filler_map = heap_->one_pointer_filler_map(); Map* filler_map = heap_->one_pointer_filler_map();
Map* native_context_map = heap_->native_context_map(); Map* native_context_map = heap_->native_context_map();
do {
while (!marking_deque_.IsEmpty()) { while (!marking_deque_.IsEmpty()) {
HeapObject* obj = marking_deque_.Pop(); HeapObject* obj = marking_deque_.Pop();
...@@ -716,36 +669,18 @@ void IncrementalMarking::Hurry() { ...@@ -716,36 +669,18 @@ void IncrementalMarking::Hurry() {
} else if (map == native_context_map) { } else if (map == native_context_map) {
// Native contexts have weak fields. // Native contexts have weak fields.
IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj); IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj)));
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
} else if (map->instance_type() == FIXED_ARRAY_TYPE &&
FixedArray::cast(obj)->length() >
IncrementalMarkingMarkingVisitor::kScanningChunk) {
MarkBit map_mark_bit = Marking::MarkBitFrom(map);
if (Marking::IsWhite(map_mark_bit)) {
WhiteToGreyAndPush(map, map_mark_bit);
}
MarkBit mark_bit = Marking::MarkBitFrom(obj);
if (!Marking::IsBlack(mark_bit)) {
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
} else {
ASSERT(
MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
}
IncrementalMarkingMarkingVisitor::VisitHugeArray(
FixedArray::cast(obj));
} else { } else {
MarkBit map_mark_bit = Marking::MarkBitFrom(map); MarkBit map_mark_bit = Marking::MarkBitFrom(map);
if (Marking::IsWhite(map_mark_bit)) { if (Marking::IsWhite(map_mark_bit)) {
WhiteToGreyAndPush(map, map_mark_bit); WhiteToGreyAndPush(map, map_mark_bit);
} }
IncrementalMarkingMarkingVisitor::IterateBody(map, obj); IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj)));
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
} }
MarkBit mark_bit = Marking::MarkBitFrom(obj); MarkBit mark_bit = Marking::MarkBitFrom(obj);
ASSERT(!Marking::IsBlack(mark_bit));
Marking::MarkBlack(mark_bit); Marking::MarkBlack(mark_bit);
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
} }
state_ = COMPLETE; state_ = COMPLETE;
if (FLAG_trace_incremental_marking) { if (FLAG_trace_incremental_marking) {
...@@ -753,8 +688,6 @@ void IncrementalMarking::Hurry() { ...@@ -753,8 +688,6 @@ void IncrementalMarking::Hurry() {
PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n", PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
static_cast<int>(end - start)); static_cast<int>(end - start));
} }
MarkCompactCollector::ProcessLargePostponedArrays(heap_, &marking_deque_);
} while (!marking_deque_.IsEmpty());
} }
if (FLAG_cleanup_code_caches_at_gc) { if (FLAG_cleanup_code_caches_at_gc) {
...@@ -889,7 +822,6 @@ void IncrementalMarking::Step(intptr_t allocated_bytes, ...@@ -889,7 +822,6 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
} else if (state_ == MARKING) { } else if (state_ == MARKING) {
Map* filler_map = heap_->one_pointer_filler_map(); Map* filler_map = heap_->one_pointer_filler_map();
Map* native_context_map = heap_->native_context_map(); Map* native_context_map = heap_->native_context_map();
while (true) {
while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
HeapObject* obj = marking_deque_.Pop(); HeapObject* obj = marking_deque_.Pop();
...@@ -899,6 +831,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes, ...@@ -899,6 +831,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
if (map == filler_map) continue; if (map == filler_map) continue;
int size = obj->SizeFromMap(map); int size = obj->SizeFromMap(map);
bytes_to_process -= size;
MarkBit map_mark_bit = Marking::MarkBitFrom(map); MarkBit map_mark_bit = Marking::MarkBitFrom(map);
if (Marking::IsWhite(map_mark_bit)) { if (Marking::IsWhite(map_mark_bit)) {
WhiteToGreyAndPush(map, map_mark_bit); WhiteToGreyAndPush(map, map_mark_bit);
...@@ -914,46 +847,17 @@ void IncrementalMarking::Step(intptr_t allocated_bytes, ...@@ -914,46 +847,17 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx); IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
bytes_to_process -= size;
SLOW_ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj)));
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
} else if (map->instance_type() == FIXED_ARRAY_TYPE &&
FixedArray::cast(obj)->length() >
IncrementalMarkingMarkingVisitor::kScanningChunk) {
SLOW_ASSERT(
Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
bytes_to_process -=
IncrementalMarkingMarkingVisitor::VisitHugeArray(
FixedArray::cast(obj));
MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
if (!Marking::IsBlack(obj_mark_bit)) {
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
}
} else { } else {
IncrementalMarkingMarkingVisitor::IterateBody(map, obj); IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
bytes_to_process -= size;
SLOW_ASSERT(
Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
(obj->IsFiller() && Marking::IsWhite(Marking::MarkBitFrom(obj))));
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
} }
MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
(obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
Marking::MarkBlack(obj_mark_bit); Marking::MarkBlack(obj_mark_bit);
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
} }
if (marking_deque_.IsEmpty()) { if (marking_deque_.IsEmpty()) MarkingComplete(action);
MarkCompactCollector::ProcessLargePostponedArrays(heap_,
&marking_deque_);
if (marking_deque_.IsEmpty()) {
MarkingComplete(action);
break;
}
} else {
ASSERT(bytes_to_process <= 0);
break;
}
}
} }
steps_count_++; steps_count_++;
......
...@@ -83,9 +83,6 @@ void MarkCompactCollector::RecordSlot(Object** anchor_slot, ...@@ -83,9 +83,6 @@ void MarkCompactCollector::RecordSlot(Object** anchor_slot,
Object** slot, Object** slot,
Object* object) { Object* object) {
Page* object_page = Page::FromAddress(reinterpret_cast<Address>(object)); Page* object_page = Page::FromAddress(reinterpret_cast<Address>(object));
// Ensure the anchor slot is on the first 'page' of a large object.
ASSERT(Page::FromAddress(reinterpret_cast<Address>(anchor_slot))->owner() !=
NULL);
if (object_page->IsEvacuationCandidate() && if (object_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(anchor_slot)) { !ShouldSkipEvacuationSlotRecording(anchor_slot)) {
if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, if (!SlotsBuffer::AddTo(&slots_buffer_allocator_,
......
This diff is collapsed.
...@@ -240,35 +240,6 @@ class MarkingDeque { ...@@ -240,35 +240,6 @@ class MarkingDeque {
int mask() { return mask_; } int mask() { return mask_; }
void set_top(int top) { top_ = top; } void set_top(int top) { top_ = top; }
int space_left() {
// If we already overflowed we may as well just say there is lots of
// space left.
if (overflowed_) return mask_ + 1;
if (IsEmpty()) return mask_ + 1;
if (IsFull()) return 0;
return (bottom_ - top_) & mask_;
}
#ifdef DEBUG
const char* Status() {
if (overflowed_) return "Overflowed";
if (IsEmpty()) return "Empty";
if (IsFull()) return "Full";
int oct = (((top_ - bottom_) & mask_) * 8) / (mask_ + 1);
switch (oct) {
case 0: return "Almost empty";
case 1: return "1/8 full";
case 2: return "2/8 full";
case 3: return "3/8 full";
case 4: return "4/8 full";
case 5: return "5/8 full";
case 6: return "6/8 full";
case 7: return "7/8 full";
}
return "??";
}
#endif
private: private:
HeapObject** array_; HeapObject** array_;
// array_[(top - 1) & mask_] is the top element in the deque. The Deque is // array_[(top - 1) & mask_] is the top element in the deque. The Deque is
...@@ -596,10 +567,6 @@ class MarkCompactCollector { ...@@ -596,10 +567,6 @@ class MarkCompactCollector {
bool is_compacting() const { return compacting_; } bool is_compacting() const { return compacting_; }
// Find the large objects that are not completely scanned, but have been
// postponed to later.
static void ProcessLargePostponedArrays(Heap* heap, MarkingDeque* deque);
private: private:
MarkCompactCollector(); MarkCompactCollector();
~MarkCompactCollector(); ~MarkCompactCollector();
......
...@@ -262,11 +262,9 @@ void StaticMarkingVisitor<StaticVisitor>::VisitMap( ...@@ -262,11 +262,9 @@ void StaticMarkingVisitor<StaticVisitor>::VisitMap(
map_object->instance_type() >= FIRST_JS_RECEIVER_TYPE) { map_object->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
MarkMapContents(heap, map_object); MarkMapContents(heap, map_object);
} else { } else {
Object** start_slot = StaticVisitor::VisitPointers(heap,
HeapObject::RawField(object, Map::kPointerFieldsBeginOffset); HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
Object** end_slot = HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
HeapObject::RawField(object, Map::kPointerFieldsEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot);
} }
} }
...@@ -288,12 +286,9 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp( ...@@ -288,12 +286,9 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
int last_property_offset = int last_property_offset =
JSRegExp::kSize + kPointerSize * map->inobject_properties(); JSRegExp::kSize + kPointerSize * map->inobject_properties();
Object** start_slot = StaticVisitor::VisitPointers(map->GetHeap(),
HeapObject::RawField(object, JSRegExp::kPropertiesOffset); HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
Object** end_slot = HeapObject::RawField(object, last_property_offset));
HeapObject::RawField(object, last_property_offset);
StaticVisitor::VisitPointers(
map->GetHeap(), start_slot, start_slot, end_slot);
} }
...@@ -320,11 +315,9 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents( ...@@ -320,11 +315,9 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
// Mark the pointer fields of the Map. Since the transitions array has // Mark the pointer fields of the Map. Since the transitions array has
// been marked already, it is fine that one of these fields contains a // been marked already, it is fine that one of these fields contains a
// pointer to it. // pointer to it.
Object** start_slot = StaticVisitor::VisitPointers(heap,
HeapObject::RawField(map, Map::kPointerFieldsBeginOffset); HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot);
} }
......
...@@ -213,7 +213,7 @@ class BodyVisitorBase : public AllStatic { ...@@ -213,7 +213,7 @@ class BodyVisitorBase : public AllStatic {
start_offset); start_offset);
Object** end_slot = reinterpret_cast<Object**>(object->address() + Object** end_slot = reinterpret_cast<Object**>(object->address() +
end_offset); end_offset);
StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); StaticVisitor::VisitPointers(heap, start_slot, end_slot);
} }
}; };
...@@ -283,26 +283,21 @@ class StaticNewSpaceVisitor : public StaticVisitorBase { ...@@ -283,26 +283,21 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return table_.GetVisitor(map)(map, obj); return table_.GetVisitor(map)(map, obj);
} }
static inline void VisitPointers( static inline void VisitPointers(Heap* heap, Object** start, Object** end) {
Heap* heap, Object** anchor, Object** start, Object** end) {
for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p); for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
} }
private: private:
static inline int VisitJSFunction(Map* map, HeapObject* object) { static inline int VisitJSFunction(Map* map, HeapObject* object) {
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
Object** start_slot =
HeapObject::RawField(object, JSFunction::kPropertiesOffset);
VisitPointers(heap, VisitPointers(heap,
start_slot, HeapObject::RawField(object, JSFunction::kPropertiesOffset),
start_slot,
HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
// Don't visit code entry. We are using this visitor only during scavenges. // Don't visit code entry. We are using this visitor only during scavenges.
VisitPointers( VisitPointers(
heap, heap,
start_slot,
HeapObject::RawField(object, HeapObject::RawField(object,
JSFunction::kCodeEntryOffset + kPointerSize), JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(object, HeapObject::RawField(object,
......
...@@ -2679,10 +2679,12 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size, ...@@ -2679,10 +2679,12 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size,
HeapObject* object = page->GetObject(); HeapObject* object = page->GetObject();
// Make the object consistent so the large object space can be traversed. #ifdef DEBUG
// Make the object consistent so the heap can be vefified in OldSpaceStep.
reinterpret_cast<Object**>(object->address())[0] = reinterpret_cast<Object**>(object->address())[0] =
heap()->fixed_array_map(); heap()->fixed_array_map();
reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0); reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
#endif
heap()->incremental_marking()->OldSpaceStep(object_size); heap()->incremental_marking()->OldSpaceStep(object_size);
return object; return object;
......
...@@ -400,15 +400,6 @@ class MemoryChunk { ...@@ -400,15 +400,6 @@ class MemoryChunk {
WAS_SWEPT_PRECISELY, WAS_SWEPT_PRECISELY,
WAS_SWEPT_CONSERVATIVELY, WAS_SWEPT_CONSERVATIVELY,
// Used for large objects only. Indicates that the object has been
// partially scanned by the incremental mark-sweep GC. Objects that have
// been partially scanned are marked black so that the write barrier
// triggers for them, and they are counted as live bytes. If the mutator
// writes to them they may be turned grey and subtracted from the live byte
// list. They move back to the marking deque either by an iteration over
// the large object space or in the write barrier.
IS_PARTIALLY_SCANNED,
// Last flag, keep at bottom. // Last flag, keep at bottom.
NUM_MEMORY_CHUNK_FLAGS NUM_MEMORY_CHUNK_FLAGS
}; };
...@@ -429,25 +420,6 @@ class MemoryChunk { ...@@ -429,25 +420,6 @@ class MemoryChunk {
(1 << IN_FROM_SPACE) | (1 << IN_FROM_SPACE) |
(1 << IN_TO_SPACE); (1 << IN_TO_SPACE);
static const int kIsPartiallyScannedMask = 1 << IS_PARTIALLY_SCANNED;
void SetPartiallyScannedProgress(int progress) {
SetFlag(IS_PARTIALLY_SCANNED);
partially_scanned_progress_ = progress;
}
bool IsPartiallyScanned() {
return IsFlagSet(IS_PARTIALLY_SCANNED);
}
void SetCompletelyScanned() {
ClearFlag(IS_PARTIALLY_SCANNED);
}
int PartiallyScannedProgress() {
ASSERT(IsPartiallyScanned());
return partially_scanned_progress_;
}
void SetFlag(int flag) { void SetFlag(int flag) {
flags_ |= static_cast<uintptr_t>(1) << flag; flags_ |= static_cast<uintptr_t>(1) << flag;
...@@ -534,14 +506,8 @@ class MemoryChunk { ...@@ -534,14 +506,8 @@ class MemoryChunk {
static const size_t kWriteBarrierCounterOffset = static const size_t kWriteBarrierCounterOffset =
kSlotsBufferOffset + kPointerSize + kPointerSize; kSlotsBufferOffset + kPointerSize + kPointerSize;
static const size_t kPartiallyScannedProgress =
kWriteBarrierCounterOffset + kPointerSize;
// Actually the partially_scanned_progress_ member is only an int, but on static const size_t kHeaderSize = kWriteBarrierCounterOffset + kPointerSize;
// 64 bit the size of MemoryChunk gets rounded up to a 64 bit size so we
// have to have the header start kPointerSize after the
// partially_scanned_progress_ member.
static const size_t kHeaderSize = kPartiallyScannedProgress + kPointerSize;
static const int kBodyOffset = static const int kBodyOffset =
CODE_POINTER_ALIGN(MAP_POINTER_ALIGN(kHeaderSize + Bitmap::kSize)); CODE_POINTER_ALIGN(MAP_POINTER_ALIGN(kHeaderSize + Bitmap::kSize));
...@@ -678,7 +644,6 @@ class MemoryChunk { ...@@ -678,7 +644,6 @@ class MemoryChunk {
SlotsBuffer* slots_buffer_; SlotsBuffer* slots_buffer_;
SkipList* skip_list_; SkipList* skip_list_;
intptr_t write_barrier_counter_; intptr_t write_barrier_counter_;
int partially_scanned_progress_;
static MemoryChunk* Initialize(Heap* heap, static MemoryChunk* Initialize(Heap* heap,
Address base, Address base,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment