Unify object groups iteration in global handles.

This unifies the iteration logic of objects groups iteration in global
handles. The scavenger as well as mark-and-compact now rely on the same
underlying logic.

R=ulan@chromium.org

Review URL: https://codereview.chromium.org/11299248

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13128 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent b0128271
......@@ -551,6 +551,51 @@ void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
}
bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
WeakSlotCallbackWithHeap can_skip) {
int last = 0;
bool any_group_was_visited = false;
for (int i = 0; i < object_groups_.length(); i++) {
ObjectGroup* entry = object_groups_.at(i);
ASSERT(entry != NULL);
Object*** objects = entry->objects_;
bool group_should_be_visited = false;
for (size_t j = 0; j < entry->length_; j++) {
Object* object = *objects[j];
if (object->IsHeapObject()) {
if (!can_skip(isolate_->heap(), &object)) {
group_should_be_visited = true;
break;
}
}
}
if (!group_should_be_visited) {
object_groups_[last++] = entry;
continue;
}
// An object in the group requires visiting, so iterate over all
// objects in the group.
for (size_t j = 0; j < entry->length_; ++j) {
Object* object = *objects[j];
if (object->IsHeapObject()) {
v->VisitPointer(&object);
any_group_was_visited = true;
}
}
// Once the entire group has been iterated over, set the object
// group to NULL so it won't be processed again.
entry->Dispose();
object_groups_.at(i) = NULL;
}
object_groups_.Rewind(last);
return any_group_was_visited;
}
bool GlobalHandles::PostGarbageCollectionProcessing(
GarbageCollector collector) {
// Process weak global handle callbacks. This must be done after the
......
......@@ -206,6 +206,11 @@ class GlobalHandles {
// See the note above.
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v);
// Iterate over objects in object groups that have at least one object
// which requires visiting. The callback has to return true if objects
// can be skipped and false otherwise.
bool IterateObjectGroups(ObjectVisitor* v, WeakSlotCallbackWithHeap can_skip);
// Add an object group.
// Should be only used in GC callback function before a collection.
// All groups are destroyed after a garbage collection.
......
......@@ -1338,7 +1338,8 @@ void Heap::Scavenge() {
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
while (IterateObjectGroups(&scavenge_visitor)) {
while (isolate()->global_handles()->IterateObjectGroups(
&scavenge_visitor, &IsUnscavengedHeapObject)) {
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
}
isolate()->global_handles()->RemoveObjectGroups();
......@@ -1383,51 +1384,6 @@ void Heap::Scavenge() {
}
// TODO(mstarzinger): Unify this method with
// MarkCompactCollector::MarkObjectGroups().
bool Heap::IterateObjectGroups(ObjectVisitor* scavenge_visitor) {
List<ObjectGroup*>* object_groups =
isolate()->global_handles()->object_groups();
int last = 0;
bool changed = false;
for (int i = 0; i < object_groups->length(); i++) {
ObjectGroup* entry = object_groups->at(i);
ASSERT(entry != NULL);
Object*** objects = entry->objects_;
bool group_marked = false;
for (size_t j = 0; j < entry->length_; j++) {
Object* object = *objects[j];
if (object->IsHeapObject()) {
if (!IsUnscavengedHeapObject(this, &object)) {
group_marked = true;
break;
}
}
}
if (!group_marked) {
(*object_groups)[last++] = entry;
continue;
}
for (size_t j = 0; j < entry->length_; ++j) {
Object* object = *objects[j];
if (object->IsHeapObject()) {
scavenge_visitor->VisitPointer(&object);
changed = true;
}
}
entry->Dispose();
object_groups->at(i) = NULL;
}
object_groups->Rewind(last);
return changed;
}
String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
Object** p) {
MapWord first_word = HeapObject::cast(*p)->map_word();
......
......@@ -1901,8 +1901,6 @@ class Heap {
bool PerformGarbageCollection(GarbageCollector collector,
GCTracer* tracer);
bool IterateObjectGroups(ObjectVisitor* scavenge_visitor);
inline void UpdateOldSpaceLimits();
// Allocate an uninitialized object in map space. The behavior is identical
......
......@@ -1745,6 +1745,16 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
}
bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap,
Object** p) {
Object* o = *p;
ASSERT(o->IsHeapObject());
HeapObject* heap_object = HeapObject::cast(o);
MarkBit mark = Marking::MarkBitFrom(heap_object);
return !mark.Get();
}
void MarkCompactCollector::MarkSymbolTable() {
SymbolTable* symbol_table = heap()->symbol_table();
// Mark the symbol table itself.
......@@ -1773,54 +1783,6 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
}
void MarkCompactCollector::MarkObjectGroups() {
List<ObjectGroup*>* object_groups =
heap()->isolate()->global_handles()->object_groups();
int last = 0;
for (int i = 0; i < object_groups->length(); i++) {
ObjectGroup* entry = object_groups->at(i);
ASSERT(entry != NULL);
Object*** objects = entry->objects_;
bool group_marked = false;
for (size_t j = 0; j < entry->length_; j++) {
Object* object = *objects[j];
if (object->IsHeapObject()) {
HeapObject* heap_object = HeapObject::cast(object);
MarkBit mark = Marking::MarkBitFrom(heap_object);
if (mark.Get()) {
group_marked = true;
break;
}
}
}
if (!group_marked) {
(*object_groups)[last++] = entry;
continue;
}
// An object in the group is marked, so mark as grey all white heap
// objects in the group.
for (size_t j = 0; j < entry->length_; ++j) {
Object* object = *objects[j];
if (object->IsHeapObject()) {
HeapObject* heap_object = HeapObject::cast(object);
MarkBit mark = Marking::MarkBitFrom(heap_object);
MarkObject(heap_object, mark);
}
}
// Once the entire group has been colored grey, set the object group
// to NULL so it won't be processed again.
entry->Dispose();
object_groups->at(i) = NULL;
}
object_groups->Rewind(last);
}
void MarkCompactCollector::MarkImplicitRefGroups() {
List<ImplicitRefGroup*>* ref_groups =
heap()->isolate()->global_handles()->implicit_ref_groups();
......@@ -1939,11 +1901,12 @@ void MarkCompactCollector::ProcessMarkingDeque() {
}
void MarkCompactCollector::ProcessExternalMarking() {
void MarkCompactCollector::ProcessExternalMarking(RootMarkingVisitor* visitor) {
bool work_to_do = true;
ASSERT(marking_deque_.IsEmpty());
while (work_to_do) {
MarkObjectGroups();
heap()->isolate()->global_handles()->IterateObjectGroups(
visitor, &IsUnmarkedHeapObjectWithHeap);
MarkImplicitRefGroups();
work_to_do = !marking_deque_.IsEmpty();
ProcessMarkingDeque();
......@@ -2022,7 +1985,7 @@ void MarkCompactCollector::MarkLiveObjects() {
// The objects reachable from the roots are marked, yet unreachable
// objects are unmarked. Mark objects reachable due to host
// application specific logic.
ProcessExternalMarking();
ProcessExternalMarking(&root_visitor);
// The objects reachable from the roots or object groups are marked,
// yet unreachable objects are unmarked. Mark objects reachable
......@@ -2041,7 +2004,7 @@ void MarkCompactCollector::MarkLiveObjects() {
// Repeat host application specific marking to mark unmarked objects
// reachable from the weak roots.
ProcessExternalMarking();
ProcessExternalMarking(&root_visitor);
AfterMarking();
}
......
......@@ -756,17 +756,13 @@ class MarkCompactCollector {
// symbol table are weak.
void MarkSymbolTable();
// Mark objects in object groups that have at least one object in the
// group marked.
void MarkObjectGroups();
// Mark objects in implicit references groups if their parent object
// is marked.
void MarkImplicitRefGroups();
// Mark all objects which are reachable due to host application
// logic like object groups or implicit references' groups.
void ProcessExternalMarking();
void ProcessExternalMarking(RootMarkingVisitor* visitor);
// Mark objects reachable (transitively) from objects in the marking stack
// or overflowed in the heap.
......@@ -790,6 +786,7 @@ class MarkCompactCollector {
// Callback function for telling whether the object *p is an unmarked
// heap object.
static bool IsUnmarkedHeapObject(Object** p);
static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p);
// Map transitions from a live map to a dead map must be killed.
// We replace them with a null descriptor, with the same key.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment