Commit 92510704 authored by vegorov@chromium.org's avatar vegorov@chromium.org

Extend Handle API with MarkIndependent.

Garbage collector is free to ignore object groups for independent handles and can collect then in minor collections.

Review URL: http://codereview.chromium.org/7031005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7915 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent ecc25bfb
......@@ -387,6 +387,15 @@ template <class T> class Persistent : public Handle<T> {
/** Clears the weak reference to this object.*/
inline void ClearWeak();
/**
* Marks the reference to this object independent. Garbage collector
* is free to ignore any object groups containing this object.
* Weak callback for an independent handle should not
* assume that it will be preceded by a global GC prologue callback
* or followed by a global GC epilogue callback.
*/
inline void MarkIndependent();
/**
*Checks if the handle holds the only reference to an object.
*/
......@@ -3106,6 +3115,7 @@ class V8EXPORT V8 {
void* data,
WeakReferenceCallback);
static void ClearWeak(internal::Object** global_handle);
static void MarkIndependent(internal::Object** global_handle);
static bool IsGlobalNearDeath(internal::Object** global_handle);
static bool IsGlobalWeak(internal::Object** global_handle);
static void SetWrapperClassId(internal::Object** global_handle,
......@@ -3808,6 +3818,11 @@ void Persistent<T>::ClearWeak() {
V8::ClearWeak(reinterpret_cast<internal::Object**>(**this));
}
template <class T>
void Persistent<T>::MarkIndependent() {
V8::MarkIndependent(reinterpret_cast<internal::Object**>(**this));
}
template <class T>
void Persistent<T>::SetWrapperClassId(uint16_t class_id) {
V8::SetWrapperClassId(reinterpret_cast<internal::Object**>(**this), class_id);
......
......@@ -539,6 +539,13 @@ void V8::ClearWeak(i::Object** obj) {
}
void V8::MarkIndependent(i::Object** object) {
i::Isolate* isolate = i::Isolate::Current();
LOG_API(isolate, "MakeIndependent");
isolate->global_handles()->MarkIndependent(object);
}
bool V8::IsGlobalNearDeath(i::Object** obj) {
i::Isolate* isolate = i::Isolate::Current();
LOG_API(isolate, "IsGlobalNearDeath");
......
......@@ -48,6 +48,7 @@ class GlobalHandles::Node : public Malloced {
// Set the initial value of the handle.
object_ = object;
class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
independent_ = false;
state_ = NORMAL;
parameter_or_next_free_.parameter = NULL;
callback_ = NULL;
......@@ -138,6 +139,13 @@ class GlobalHandles::Node : public Malloced {
set_parameter(NULL);
}
void MarkIndependent(GlobalHandles* global_handles) {
LOG(global_handles->isolate(),
HandleEvent("GlobalHandle::MarkIndependent", handle().location()));
ASSERT(state_ != DESTROYED);
independent_ = true;
}
bool IsNearDeath() {
// Check for PENDING to ensure correct answer when processing callbacks.
return state_ == PENDING || state_ == NEAR_DEATH;
......@@ -222,6 +230,8 @@ class GlobalHandles::Node : public Malloced {
};
State state_ : 4; // Need one more bit for MSVC as it treats enums as signed.
bool independent_ : 1;
private:
// Handle specific callback.
WeakReferenceCallback callback_;
......@@ -364,6 +374,11 @@ void GlobalHandles::ClearWeakness(Object** location) {
}
void GlobalHandles::MarkIndependent(Object** location) {
Node::FromLocation(location)->MarkIndependent(this);
}
bool GlobalHandles::IsNearDeath(Object** location) {
return Node::FromLocation(location)->IsNearDeath();
}
......@@ -381,7 +396,7 @@ void GlobalHandles::SetWrapperClassId(Object** location, uint16_t class_id) {
void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
// Traversal of GC roots in the global handle list that are marked as
// WEAK or PENDING.
// WEAK, PENDING or NEAR_DEATH.
for (Node* current = head_; current != NULL; current = current->next()) {
if (current->state_ == Node::WEAK
|| current->state_ == Node::PENDING
......@@ -392,6 +407,20 @@ void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
}
void GlobalHandles::IterateWeakIndependentRoots(ObjectVisitor* v) {
// Traversal of GC roots in the global handle list that are independent
// and marked as WEAK, PENDING or NEAR_DEATH.
for (Node* current = head_; current != NULL; current = current->next()) {
if (!current->independent_) continue;
if (current->state_ == Node::WEAK
|| current->state_ == Node::PENDING
|| current->state_ == Node::NEAR_DEATH) {
v->VisitPointer(&current->object_);
}
}
}
void GlobalHandles::IterateWeakRoots(WeakReferenceGuest f,
WeakReferenceCallback callback) {
for (Node* current = head_; current != NULL; current = current->next()) {
......@@ -415,7 +444,21 @@ void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
}
bool GlobalHandles::PostGarbageCollectionProcessing() {
void GlobalHandles::IdentifyWeakIndependentHandles(WeakSlotCallbackWithHeap f) {
for (Node* current = head_; current != NULL; current = current->next()) {
if (current->state_ == Node::WEAK && current->independent_) {
if (f(isolate_->heap(), &current->object_)) {
current->state_ = Node::PENDING;
LOG(isolate_,
HandleEvent("GlobalHandle::Pending", current->handle().location()));
}
}
}
}
bool GlobalHandles::PostGarbageCollectionProcessing(
GarbageCollector collector) {
// Process weak global handle callbacks. This must be done after the
// GC is completely done, because the callbacks may invoke arbitrary
// API functions.
......@@ -425,6 +468,14 @@ bool GlobalHandles::PostGarbageCollectionProcessing() {
bool next_gc_likely_to_collect_more = false;
Node** p = &head_;
while (*p != NULL) {
// Skip dependent handles. Their weak callbacks might expect to be
// called between two global garbage collection callbacks which
// are not called for minor collections.
if (collector == SCAVENGER && !(*p)->independent_) {
p = (*p)->next_addr();
continue;
}
if ((*p)->PostGarbageCollectionProcessing(isolate_, this)) {
if (initial_post_gc_processing_count != post_gc_processing_count_) {
// Weak callback triggered another GC and another round of
......@@ -476,6 +527,16 @@ void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
}
void GlobalHandles::IterateStrongAndDependentRoots(ObjectVisitor* v) {
for (Node* current = head_; current != NULL; current = current->next()) {
if ((current->independent_ && current->state_ == Node::NORMAL) ||
(!current->independent_ && current->state_ != Node::DESTROYED)) {
v->VisitPointer(&current->object_);
}
}
}
void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
for (Node* current = head_; current != NULL; current = current->next()) {
if (current->class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId &&
......
......@@ -146,6 +146,9 @@ class GlobalHandles {
// Clear the weakness of a global handle.
void ClearWeakness(Object** location);
// Clear the weakness of a global handle.
void MarkIndependent(Object** location);
// Tells whether global handle is near death.
static bool IsNearDeath(Object** location);
......@@ -154,11 +157,14 @@ class GlobalHandles {
// Process pending weak handles.
// Returns true if next major GC is likely to collect more garbage.
bool PostGarbageCollectionProcessing();
bool PostGarbageCollectionProcessing(GarbageCollector collector);
// Iterates over all strong handles.
void IterateStrongRoots(ObjectVisitor* v);
// Iterates over all strong and dependent handles.
void IterateStrongAndDependentRoots(ObjectVisitor* v);
// Iterates over all handles.
void IterateAllRoots(ObjectVisitor* v);
......@@ -168,6 +174,9 @@ class GlobalHandles {
// Iterates over all weak roots in heap.
void IterateWeakRoots(ObjectVisitor* v);
// Iterates over all weak independent roots in heap.
void IterateWeakIndependentRoots(ObjectVisitor* v);
// Iterates over weak roots that are bound to a given callback.
void IterateWeakRoots(WeakReferenceGuest f,
WeakReferenceCallback callback);
......@@ -176,6 +185,10 @@ class GlobalHandles {
// them as pending.
void IdentifyWeakHandles(WeakSlotCallback f);
// Find all weak independent handles satisfying the callback predicate, mark
// them as pending.
void IdentifyWeakIndependentHandles(WeakSlotCallbackWithHeap f);
// Add an object group.
// Should be only used in GC callback function before a collection.
// All groups are destroyed after a mark-compact collection.
......
......@@ -771,11 +771,10 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
isolate_->counters()->objs_since_last_young()->Set(0);
if (collector == MARK_COMPACTOR) {
DisableAssertNoAllocation allow_allocation;
{ DisableAssertNoAllocation allow_allocation;
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
next_gc_likely_to_collect_more =
isolate_->global_handles()->PostGarbageCollectionProcessing();
isolate_->global_handles()->PostGarbageCollectionProcessing(collector);
}
// Update relocatables.
......@@ -935,6 +934,12 @@ void Heap::CheckNewSpaceExpansionCriteria() {
}
static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
return heap->InNewSpace(*p) &&
!HeapObject::cast(*p)->map_word().IsForwardingAddress();
}
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
......@@ -1029,6 +1034,11 @@ void Heap::Scavenge() {
scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
isolate_->global_handles()->IdentifyWeakIndependentHandles(
&IsUnscavengedHeapObject);
isolate_->global_handles()->IterateWeakIndependentRoots(&scavenge_visitor);
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
......@@ -4492,7 +4502,8 @@ void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
v->Synchronize("symbol_table");
if (mode != VISIT_ALL_IN_SCAVENGE) {
if (mode != VISIT_ALL_IN_SCAVENGE &&
mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
// Scavenge collections have special processing for this.
external_string_table_.Iterate(v);
}
......@@ -4528,16 +4539,24 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
// Iterate over the builtin code objects and code stubs in the
// heap. Note that it is not necessary to iterate over code objects
// on scavenge collections.
if (mode != VISIT_ALL_IN_SCAVENGE) {
if (mode != VISIT_ALL_IN_SCAVENGE &&
mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
isolate_->builtins()->IterateBuiltins(v);
}
v->Synchronize("builtins");
// Iterate over global handles.
if (mode == VISIT_ONLY_STRONG) {
isolate_->global_handles()->IterateStrongRoots(v);
} else {
isolate_->global_handles()->IterateAllRoots(v);
switch (mode) {
case VISIT_ONLY_STRONG:
isolate_->global_handles()->IterateStrongRoots(v);
break;
case VISIT_ALL_IN_SCAVENGE:
isolate_->global_handles()->IterateStrongAndDependentRoots(v);
break;
case VISIT_ALL_IN_SWEEP_NEWSPACE:
case VISIT_ALL:
isolate_->global_handles()->IterateAllRoots(v);
break;
}
v->Synchronize("globalhandles");
......
......@@ -2053,7 +2053,7 @@ static void SweepNewSpace(Heap* heap, NewSpace* space) {
}
// Update roots.
heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
LiveObjectList::IterateElements(&updating_visitor);
// Update pointers in old spaces.
......
......@@ -185,6 +185,8 @@ class Mutex;
typedef bool (*WeakSlotCallback)(Object** pointer);
typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer);
// -----------------------------------------------------------------------------
// Miscellaneous
......@@ -218,7 +220,12 @@ enum GarbageCollector { SCAVENGER, MARK_COMPACTOR };
enum Executability { NOT_EXECUTABLE, EXECUTABLE };
enum VisitMode { VISIT_ALL, VISIT_ALL_IN_SCAVENGE, VISIT_ONLY_STRONG };
enum VisitMode {
VISIT_ALL,
VISIT_ALL_IN_SCAVENGE,
VISIT_ALL_IN_SWEEP_NEWSPACE,
VISIT_ONLY_STRONG
};
// Flag indicating whether code is built into the VM (one of the natives files).
enum NativesFlag { NOT_NATIVES_CODE, NATIVES_CODE };
......
......@@ -4432,55 +4432,116 @@ THREADED_TEST(WeakReference) {
}
static bool in_scavenge = false;
static int last = -1;
static void ForceScavenge(v8::Persistent<v8::Value> obj, void* data) {
CHECK_EQ(-1, last);
last = 0;
static void DisposeAndSetFlag(v8::Persistent<v8::Value> obj, void* data) {
obj.Dispose();
obj.Clear();
in_scavenge = true;
HEAP->PerformScavenge();
in_scavenge = false;
*(reinterpret_cast<bool*>(data)) = true;
}
static void CheckIsNotInvokedInScavenge(v8::Persistent<v8::Value> obj,
void* data) {
CHECK_EQ(0, last);
last = 1;
*(reinterpret_cast<bool*>(data)) = in_scavenge;
obj.Dispose();
obj.Clear();
}
THREADED_TEST(NoWeakRefCallbacksInScavenge) {
// Test verifies that scavenge cannot invoke WeakReferenceCallbacks.
// Calling callbacks from scavenges is unsafe as objects held by those
// handlers might have become strongly reachable, but scavenge doesn't
// check that.
THREADED_TEST(IndependentWeakHandle) {
v8::Persistent<Context> context = Context::New();
Context::Scope context_scope(context);
v8::Persistent<v8::Object> object_a;
v8::Persistent<v8::Object> object_b;
{
v8::HandleScope handle_scope;
object_b = v8::Persistent<v8::Object>::New(v8::Object::New());
object_a = v8::Persistent<v8::Object>::New(v8::Object::New());
}
bool object_a_disposed = false;
object_a.MakeWeak(&object_a_disposed, &ForceScavenge);
bool released_in_scavenge = false;
object_b.MakeWeak(&released_in_scavenge, &CheckIsNotInvokedInScavenge);
object_a.MakeWeak(&object_a_disposed, &DisposeAndSetFlag);
object_a.MarkIndependent();
HEAP->PerformScavenge();
CHECK(object_a_disposed);
}
while (!object_a_disposed) {
HEAP->CollectAllGarbage(false);
static void InvokeScavenge() {
HEAP->PerformScavenge();
}
static void InvokeMarkSweep() {
HEAP->CollectAllGarbage(false);
}
static void ForceScavenge(v8::Persistent<v8::Value> obj, void* data) {
obj.Dispose();
obj.Clear();
*(reinterpret_cast<bool*>(data)) = true;
InvokeScavenge();
}
static void ForceMarkSweep(v8::Persistent<v8::Value> obj, void* data) {
obj.Dispose();
obj.Clear();
*(reinterpret_cast<bool*>(data)) = true;
InvokeMarkSweep();
}
THREADED_TEST(GCFromWeakCallbacks) {
v8::Persistent<Context> context = Context::New();
Context::Scope context_scope(context);
static const int kNumberOfGCTypes = 2;
v8::WeakReferenceCallback gc_forcing_callback[kNumberOfGCTypes] =
{&ForceScavenge, &ForceMarkSweep};
typedef void (*GCInvoker)();
GCInvoker invoke_gc[kNumberOfGCTypes] = {&InvokeScavenge, &InvokeMarkSweep};
for (int outer_gc = 0; outer_gc < kNumberOfGCTypes; outer_gc++) {
for (int inner_gc = 0; inner_gc < kNumberOfGCTypes; inner_gc++) {
v8::Persistent<v8::Object> object;
{
v8::HandleScope handle_scope;
object = v8::Persistent<v8::Object>::New(v8::Object::New());
}
bool disposed = false;
object.MakeWeak(&disposed, gc_forcing_callback[inner_gc]);
object.MarkIndependent();
invoke_gc[outer_gc]();
CHECK(disposed);
}
}
}
static void RevivingCallback(v8::Persistent<v8::Value> obj, void* data) {
obj.ClearWeak();
*(reinterpret_cast<bool*>(data)) = true;
}
THREADED_TEST(IndependentHandleRevival) {
v8::Persistent<Context> context = Context::New();
Context::Scope context_scope(context);
v8::Persistent<v8::Object> object;
{
v8::HandleScope handle_scope;
object = v8::Persistent<v8::Object>::New(v8::Object::New());
object->Set(v8_str("x"), v8::Integer::New(1));
v8::Local<String> y_str = v8_str("y");
object->Set(y_str, y_str);
}
bool revived = false;
object.MakeWeak(&revived, &RevivingCallback);
object.MarkIndependent();
HEAP->PerformScavenge();
CHECK(revived);
HEAP->CollectAllGarbage(true);
{
v8::HandleScope handle_scope;
v8::Local<String> y_str = v8_str("y");
CHECK_EQ(v8::Integer::New(1), object->Get(v8_str("x")));
CHECK(object->Get(y_str)->Equals(y_str));
}
CHECK(!released_in_scavenge);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment