Commit ac8b6367 authored by hpayer@chromium.org's avatar hpayer@chromium.org

Reland "Precisely sweep scan on scavenge pages and use heap iterator to iterate over them."

BUG=
R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/377863003

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22270 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent d7ecf0e4
...@@ -4145,6 +4145,16 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { ...@@ -4145,6 +4145,16 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p); SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p);
pages_swept++; pages_swept++;
parallel_sweeping_active = true; parallel_sweeping_active = true;
} else {
if (p->scan_on_scavenge()) {
SweepPrecisely<SWEEP_ONLY, IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(
space, p, NULL);
pages_swept++;
if (FLAG_gc_verbose) {
PrintF("Sweeping 0x%" V8PRIxPTR
" scan on scavenge page precisely.\n",
reinterpret_cast<intptr_t>(p));
}
} else { } else {
if (FLAG_gc_verbose) { if (FLAG_gc_verbose) {
PrintF("Sweeping 0x%" V8PRIxPTR " conservatively in parallel.\n", PrintF("Sweeping 0x%" V8PRIxPTR " conservatively in parallel.\n",
...@@ -4153,6 +4163,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { ...@@ -4153,6 +4163,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
p->set_parallel_sweeping(MemoryChunk::PARALLEL_SWEEPING_PENDING); p->set_parallel_sweeping(MemoryChunk::PARALLEL_SWEEPING_PENDING);
space->IncreaseUnsweptFreeBytes(p); space->IncreaseUnsweptFreeBytes(p);
} }
}
space->set_end_of_unswept_pages(p); space->set_end_of_unswept_pages(p);
break; break;
} }
......
...@@ -1480,6 +1480,22 @@ int HeapObject::Size() { ...@@ -1480,6 +1480,22 @@ int HeapObject::Size() {
} }
bool HeapObject::ContainsPointers() {
InstanceType type = map()->instance_type();
if (type <= LAST_NAME_TYPE) {
if (type == SYMBOL_TYPE) {
return true;
}
ASSERT(type < FIRST_NONSTRING_TYPE);
// There are four string representations: sequential strings, external
// strings, cons strings, and sliced strings.
// Only the latter two contain non-map-word pointers to heap objects.
return ((type & kIsIndirectStringMask) == kIsIndirectStringTag);
}
return (type > LAST_DATA_TYPE);
}
void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) { void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)), v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
reinterpret_cast<Object**>(FIELD_ADDR(this, end))); reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
......
...@@ -714,6 +714,7 @@ enum InstanceType { ...@@ -714,6 +714,7 @@ enum InstanceType {
FIXED_UINT8_CLAMPED_ARRAY_TYPE, // LAST_FIXED_TYPED_ARRAY_TYPE FIXED_UINT8_CLAMPED_ARRAY_TYPE, // LAST_FIXED_TYPED_ARRAY_TYPE
FIXED_DOUBLE_ARRAY_TYPE, FIXED_DOUBLE_ARRAY_TYPE,
CONSTANT_POOL_ARRAY_TYPE,
FILLER_TYPE, // LAST_DATA_TYPE FILLER_TYPE, // LAST_DATA_TYPE
// Structs. // Structs.
...@@ -740,7 +741,6 @@ enum InstanceType { ...@@ -740,7 +741,6 @@ enum InstanceType {
BREAK_POINT_INFO_TYPE, BREAK_POINT_INFO_TYPE,
FIXED_ARRAY_TYPE, FIXED_ARRAY_TYPE,
CONSTANT_POOL_ARRAY_TYPE,
SHARED_FUNCTION_INFO_TYPE, SHARED_FUNCTION_INFO_TYPE,
// All the following types are subtypes of JSReceiver, which corresponds to // All the following types are subtypes of JSReceiver, which corresponds to
...@@ -1716,6 +1716,10 @@ class HeapObject: public Object { ...@@ -1716,6 +1716,10 @@ class HeapObject: public Object {
// Returns the heap object's size in bytes // Returns the heap object's size in bytes
inline int Size(); inline int Size();
// Returns true if this heap object contains only references to other
// heap objects.
inline bool ContainsPointers();
// Given a heap object's map pointer, returns the heap size in bytes // Given a heap object's map pointer, returns the heap size in bytes
// Useful when the map pointer field is used for other purposes. // Useful when the map pointer field is used for other purposes.
// GC internal. // GC internal.
......
...@@ -18,6 +18,9 @@ namespace internal { ...@@ -18,6 +18,9 @@ namespace internal {
// HeapObjectIterator // HeapObjectIterator
HeapObjectIterator::HeapObjectIterator(PagedSpace* space) { HeapObjectIterator::HeapObjectIterator(PagedSpace* space) {
// Check that we actually can iterate this space.
ASSERT(space->is_iterable());
// You can't actually iterate over the anchor page. It is not a real page, // You can't actually iterate over the anchor page. It is not a real page,
// just an anchor for the double linked page list. Initialize as if we have // just an anchor for the double linked page list. Initialize as if we have
// reached the end of the anchor page, then the first iteration will move on // reached the end of the anchor page, then the first iteration will move on
...@@ -32,6 +35,9 @@ HeapObjectIterator::HeapObjectIterator(PagedSpace* space) { ...@@ -32,6 +35,9 @@ HeapObjectIterator::HeapObjectIterator(PagedSpace* space) {
HeapObjectIterator::HeapObjectIterator(PagedSpace* space, HeapObjectIterator::HeapObjectIterator(PagedSpace* space,
HeapObjectCallback size_func) { HeapObjectCallback size_func) {
// Check that we actually can iterate this space.
ASSERT(space->is_iterable());
// You can't actually iterate over the anchor page. It is not a real page, // You can't actually iterate over the anchor page. It is not a real page,
// just an anchor for the double linked page list. Initialize the current // just an anchor for the double linked page list. Initialize the current
// address and end as NULL, then the first iteration will move on // address and end as NULL, then the first iteration will move on
...@@ -66,9 +72,6 @@ void HeapObjectIterator::Initialize(PagedSpace* space, ...@@ -66,9 +72,6 @@ void HeapObjectIterator::Initialize(PagedSpace* space,
Address cur, Address end, Address cur, Address end,
HeapObjectIterator::PageMode mode, HeapObjectIterator::PageMode mode,
HeapObjectCallback size_f) { HeapObjectCallback size_f) {
// Check that we actually can iterate this space.
ASSERT(space->is_iterable());
space_ = space; space_ = space;
cur_addr_ = cur; cur_addr_ = cur;
cur_end_ = end; cur_end_ = end;
......
...@@ -519,26 +519,23 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback, ...@@ -519,26 +519,23 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback,
FindPointersToNewSpaceInRegion(start, end, slot_callback, clear_maps); FindPointersToNewSpaceInRegion(start, end, slot_callback, clear_maps);
} else { } else {
Page* page = reinterpret_cast<Page*>(chunk); Page* page = reinterpret_cast<Page*>(chunk);
PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner()); ASSERT(page->owner() == heap_->map_space() ||
Address start = page->area_start(); page->owner() == heap_->old_pointer_space());
Address end = page->area_end(); CHECK(page->WasSweptPrecisely());
if (owner == heap_->map_space()) {
ASSERT(page->WasSweptPrecisely());
HeapObjectIterator iterator(page, NULL); HeapObjectIterator iterator(page, NULL);
for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; for (HeapObject* heap_object = iterator.Next();
heap_object != NULL;
heap_object = iterator.Next()) { heap_object = iterator.Next()) {
// We skip free space objects. // We iterate over objects that contain pointers only.
if (!heap_object->IsFiller()) { if (heap_object->ContainsPointers()) {
FindPointersToNewSpaceInRegion( FindPointersToNewSpaceInRegion(
heap_object->address() + HeapObject::kHeaderSize, heap_object->address() + HeapObject::kHeaderSize,
heap_object->address() + heap_object->Size(), slot_callback, heap_object->address() + heap_object->Size(),
slot_callback,
clear_maps); clear_maps);
} }
} }
} else {
FindPointersToNewSpaceInRegion(
start, end, slot_callback, clear_maps);
}
} }
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment