Commit 78867ad8 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

Remove object grouping

Enbedders should switch to EmbedderHeapTracer API.

BUG=v8:5828

Change-Id: I82f2bc583d246617865a17f5904e02cd35f92fec
Reviewed-on: https://chromium-review.googlesource.com/448539Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#43551}
parent 2e000250
......@@ -812,11 +812,6 @@ class V8_EXPORT HeapProfiler {
/** Returns memory used for profiler internal data and snapshots. */
size_t GetProfilerMemorySize();
/**
* Sets a RetainedObjectInfo for an object group (see V8::SetObjectGroupId).
*/
void SetRetainedObjectInfo(UniqueId id, RetainedObjectInfo* info);
private:
HeapProfiler();
~HeapProfiler();
......
......@@ -6826,45 +6826,6 @@ class V8_EXPORT Isolate {
*/
Local<Value> ThrowException(Local<Value> exception);
/**
* Allows the host application to group objects together. If one
* object in the group is alive, all objects in the group are alive.
* After each garbage collection, object groups are removed. It is
* intended to be used in the before-garbage-collection callback
* function, for instance to simulate DOM tree connections among JS
* wrapper objects. Object groups for all dependent handles need to
* be provided for kGCTypeMarkSweepCompact collections, for all other
* garbage collection types it is sufficient to provide object groups
* for partially dependent handles only.
*/
template <typename T>
V8_DEPRECATED("Use EmbedderHeapTracer",
void SetObjectGroupId(const Persistent<T>& object,
UniqueId id));
/**
* Allows the host application to declare implicit references from an object
* group to an object. If the objects of the object group are alive, the child
* object is alive too. After each garbage collection, all implicit references
* are removed. It is intended to be used in the before-garbage-collection
* callback function.
*/
template <typename T>
V8_DEPRECATED("Use EmbedderHeapTracer",
void SetReferenceFromGroup(UniqueId id,
const Persistent<T>& child));
/**
* Allows the host application to declare implicit references from an object
* to another object. If the parent object is alive, the child object is alive
* too. After each garbage collection, all implicit references are removed. It
* is intended to be used in the before-garbage-collection callback function.
*/
template <typename T, typename S>
V8_DEPRECATED("Use EmbedderHeapTracer",
void SetReference(const Persistent<T>& parent,
const Persistent<S>& child));
typedef void (*GCCallback)(Isolate* isolate, GCType type,
GCCallbackFlags flags);
......@@ -7330,9 +7291,6 @@ class V8_EXPORT Isolate {
template <class K, class V, class Traits>
friend class PersistentValueMapBase;
void SetObjectGroupId(internal::Object** object, UniqueId id);
void SetReferenceFromGroup(UniqueId id, internal::Object** object);
void SetReference(internal::Object** parent, internal::Object** child);
void ReportExternalAllocationLimitReached();
};
......@@ -9742,33 +9700,6 @@ int64_t Isolate::AdjustAmountOfExternalAllocatedMemory(
return *external_memory;
}
template<typename T>
void Isolate::SetObjectGroupId(const Persistent<T>& object,
UniqueId id) {
TYPE_CHECK(Value, T);
SetObjectGroupId(reinterpret_cast<internal::Object**>(object.val_), id);
}
template<typename T>
void Isolate::SetReferenceFromGroup(UniqueId id,
const Persistent<T>& object) {
TYPE_CHECK(Value, T);
SetReferenceFromGroup(id, reinterpret_cast<internal::Object**>(object.val_));
}
template<typename T, typename S>
void Isolate::SetReference(const Persistent<T>& parent,
const Persistent<S>& child) {
TYPE_CHECK(Object, T);
TYPE_CHECK(Value, S);
SetReference(reinterpret_cast<internal::Object**>(parent.val_),
reinterpret_cast<internal::Object**>(child.val_));
}
Local<Value> Context::GetEmbedderData(int index) {
#ifndef V8_ENABLE_CHECKS
typedef internal::Object O;
......
......@@ -8055,31 +8055,6 @@ v8::Local<Value> Isolate::ThrowException(v8::Local<v8::Value> value) {
return v8::Undefined(reinterpret_cast<v8::Isolate*>(isolate));
}
void Isolate::SetObjectGroupId(internal::Object** object, UniqueId id) {
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
internal_isolate->global_handles()->SetObjectGroupId(
i::Handle<i::Object>(object).location(), id);
}
void Isolate::SetReferenceFromGroup(UniqueId id, internal::Object** object) {
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
internal_isolate->global_handles()->SetReferenceFromGroup(
id, i::Handle<i::Object>(object).location());
}
void Isolate::SetReference(internal::Object** parent,
internal::Object** child) {
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
i::Object** parent_location = i::Handle<i::Object>(parent).location();
internal_isolate->global_handles()->SetReference(
reinterpret_cast<i::HeapObject**>(parent_location),
i::Handle<i::Object>(child).location());
}
void Isolate::AddGCPrologueCallback(GCCallback callback, GCType gc_type) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->heap()->AddGCPrologueCallback(callback, gc_type);
......@@ -10032,13 +10007,6 @@ size_t HeapProfiler::GetProfilerMemorySize() {
GetMemorySizeUsedByProfiler();
}
void HeapProfiler::SetRetainedObjectInfo(UniqueId id,
RetainedObjectInfo* info) {
reinterpret_cast<i::HeapProfiler*>(this)->SetRetainedObjectInfo(id, info);
}
v8::Testing::StressType internal::Testing::stress_type_ =
v8::Testing::kStressTypeOpt;
......
This diff is collapsed.
......@@ -24,78 +24,6 @@ class ObjectVisitor;
// At GC the destroyed global handles are removed from the free list
// and deallocated.
// Data structures for tracking object groups and implicit references.
// An object group is treated like a single JS object: if one of object in
// the group is alive, all objects in the same group are considered alive.
// An object group is used to simulate object relationship in a DOM tree.
// An implicit references group consists of two parts: a parent object and a
// list of children objects. If the parent is alive, all the children are alive
// too.
struct ObjectGroup {
explicit ObjectGroup(size_t length)
: info(NULL), length(length) {
DCHECK(length > 0);
objects = new Object**[length];
}
~ObjectGroup();
v8::RetainedObjectInfo* info;
Object*** objects;
size_t length;
};
struct ImplicitRefGroup {
ImplicitRefGroup(HeapObject** parent, size_t length)
: parent(parent), length(length) {
DCHECK(length > 0);
children = new Object**[length];
}
~ImplicitRefGroup();
HeapObject** parent;
Object*** children;
size_t length;
};
// For internal bookkeeping.
struct ObjectGroupConnection {
ObjectGroupConnection(UniqueId id, Object** object)
: id(id), object(object) {}
bool operator==(const ObjectGroupConnection& other) const {
return id == other.id;
}
bool operator<(const ObjectGroupConnection& other) const {
return id < other.id;
}
UniqueId id;
Object** object;
};
struct ObjectGroupRetainerInfo {
ObjectGroupRetainerInfo(UniqueId id, RetainedObjectInfo* info)
: id(id), info(info) {}
bool operator==(const ObjectGroupRetainerInfo& other) const {
return id == other.id;
}
bool operator<(const ObjectGroupRetainerInfo& other) const {
return id < other.id;
}
UniqueId id;
RetainedObjectInfo* info;
};
enum WeaknessType {
// Embedder gets a handle to the dying object.
FINALIZER_WEAK,
......@@ -170,7 +98,7 @@ class GlobalHandles {
// Clear the weakness of a global handle.
static void* ClearWeakness(Object** location);
// Mark the reference to this object independent of any object group.
// Mark the reference to this object independent.
static void MarkIndependent(Object** location);
static bool IsIndependent(Object** location);
......@@ -240,55 +168,6 @@ class GlobalHandles {
// unmodified
void IdentifyWeakUnmodifiedObjects(WeakSlotCallback is_unmodified);
// Iterate over objects in object groups that have at least one object
// which requires visiting. The callback has to return true if objects
// can be skipped and false otherwise.
bool IterateObjectGroups(ObjectVisitor* v, WeakSlotCallbackWithHeap can_skip);
// Print all objects in object groups
void PrintObjectGroups();
// Add an object group.
// Should be only used in GC callback function before a collection.
// All groups are destroyed after a garbage collection.
void AddObjectGroup(Object*** handles,
size_t length,
v8::RetainedObjectInfo* info);
// Associates handle with the object group represented by id.
// Should be only used in GC callback function before a collection.
// All groups are destroyed after a garbage collection.
void SetObjectGroupId(Object** handle, UniqueId id);
// Set RetainedObjectInfo for an object group. Should not be called more than
// once for a group. Should not be called for a group which contains no
// handles.
void SetRetainedObjectInfo(UniqueId id, RetainedObjectInfo* info);
// Adds an implicit reference from a group to an object. Should be only used
// in GC callback function before a collection. All implicit references are
// destroyed after a mark-compact collection.
void SetReferenceFromGroup(UniqueId id, Object** child);
// Adds an implicit reference from a parent object to a child object. Should
// be only used in GC callback function before a collection. All implicit
// references are destroyed after a mark-compact collection.
void SetReference(HeapObject** parent, Object** child);
List<ObjectGroup*>* object_groups() {
ComputeObjectGroupsAndImplicitReferences();
return &object_groups_;
}
List<ImplicitRefGroup*>* implicit_ref_groups() {
ComputeObjectGroupsAndImplicitReferences();
return &implicit_ref_groups_;
}
// Remove bags, this should only happen after GC.
void RemoveObjectGroups();
void RemoveImplicitRefGroups();
// Tear down the global handle structure.
void TearDown();
......@@ -302,15 +181,6 @@ class GlobalHandles {
private:
explicit GlobalHandles(Isolate* isolate);
// Migrates data from the internal representation (object_group_connections_,
// retainer_infos_ and implicit_ref_connections_) to the public and more
// efficient representation (object_groups_ and implicit_ref_groups_).
void ComputeObjectGroupsAndImplicitReferences();
// v8::internal::List is inefficient even for small number of elements, if we
// don't assign any initial capacity.
static const int kObjectGroupConnectionsCapacity = 20;
class PendingPhantomCallback;
// Helpers for PostGarbageCollectionProcessing.
......@@ -349,17 +219,6 @@ class GlobalHandles {
size_t number_of_phantom_handle_resets_;
// Object groups and implicit references, public and more efficient
// representation.
List<ObjectGroup*> object_groups_;
List<ImplicitRefGroup*> implicit_ref_groups_;
// Object groups and implicit references, temporary representation while
// constructing the groups.
List<ObjectGroupConnection> object_group_connections_;
List<ObjectGroupRetainerInfo> retainer_infos_;
List<ObjectGroupConnection> implicit_ref_connections_;
List<PendingPhantomCallback> pending_phantom_callbacks_;
friend class Isolate;
......
......@@ -537,7 +537,6 @@ void GCTracer::PrintNVP() const {
"clear=%1.f "
"clear.code_flush=%.1f "
"clear.dependent_code=%.1f "
"clear.global_handles=%.1f "
"clear.maps=%.1f "
"clear.slots_buffer=%.1f "
"clear.store_buffer=%.1f "
......@@ -563,7 +562,6 @@ void GCTracer::PrintNVP() const {
"finish=%.1f "
"mark=%.1f "
"mark.finish_incremental=%.1f "
"mark.object_grouping=%.1f "
"mark.prepare_code_flush=%.1f "
"mark.roots=%.1f "
"mark.weak_closure=%.1f "
......@@ -584,7 +582,6 @@ void GCTracer::PrintNVP() const {
"incremental.finalize.body=%.1f "
"incremental.finalize.external.prologue=%.1f "
"incremental.finalize.external.epilogue=%.1f "
"incremental.finalize.object_grouping=%.1f "
"incremental.sweeping=%.1f "
"incremental.wrapper_prologue=%.1f "
"incremental.wrapper_tracing=%.1f "
......@@ -623,7 +620,6 @@ void GCTracer::PrintNVP() const {
current_.reduce_memory, current_.scopes[Scope::MC_CLEAR],
current_.scopes[Scope::MC_CLEAR_CODE_FLUSH],
current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
current_.scopes[Scope::MC_CLEAR_GLOBAL_HANDLES],
current_.scopes[Scope::MC_CLEAR_MAPS],
current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
current_.scopes[Scope::MC_CLEAR_STORE_BUFFER],
......@@ -648,7 +644,6 @@ void GCTracer::PrintNVP() const {
current_.scopes[Scope::EXTERNAL_WEAK_GLOBAL_HANDLES],
current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK],
current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
current_.scopes[Scope::MC_MARK_OBJECT_GROUPING],
current_.scopes[Scope::MC_MARK_PREPARE_CODE_FLUSH],
current_.scopes[Scope::MC_MARK_ROOTS],
current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
......@@ -668,7 +663,6 @@ void GCTracer::PrintNVP() const {
current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY],
current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_OBJECT_GROUPING],
current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE],
current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_TRACING],
......
......@@ -31,7 +31,6 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
F(MC_INCREMENTAL_WRAPPER_TRACING) \
F(MC_INCREMENTAL_FINALIZE) \
F(MC_INCREMENTAL_FINALIZE_BODY) \
F(MC_INCREMENTAL_FINALIZE_OBJECT_GROUPING) \
F(MC_INCREMENTAL_EXTERNAL_EPILOGUE) \
F(MC_INCREMENTAL_EXTERNAL_PROLOGUE)
......@@ -43,7 +42,6 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
F(MC_CLEAR) \
F(MC_CLEAR_CODE_FLUSH) \
F(MC_CLEAR_DEPENDENT_CODE) \
F(MC_CLEAR_GLOBAL_HANDLES) \
F(MC_CLEAR_MAPS) \
F(MC_CLEAR_SLOTS_BUFFER) \
F(MC_CLEAR_STORE_BUFFER) \
......@@ -76,7 +74,6 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
F(MC_MARK_WRAPPER_EPILOGUE) \
F(MC_MARK_WRAPPER_PROLOGUE) \
F(MC_MARK_WRAPPER_TRACING) \
F(MC_MARK_OBJECT_GROUPING) \
F(MC_PROLOGUE) \
F(MC_SWEEP) \
F(MC_SWEEP_CODE) \
......
......@@ -1450,10 +1450,6 @@ void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
}
}
}
if (FLAG_trace_object_groups && (gc_type == kGCTypeIncrementalMarking ||
gc_type == kGCTypeMarkSweepCompact)) {
isolate_->global_handles()->PrintObjectGroups();
}
}
......
......@@ -591,24 +591,6 @@ void IncrementalMarking::MarkRoots() {
heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
}
void IncrementalMarking::MarkObjectGroups() {
TRACE_GC(heap_->tracer(),
GCTracer::Scope::MC_INCREMENTAL_FINALIZE_OBJECT_GROUPING);
DCHECK(!heap_->local_embedder_heap_tracer()->InUse());
DCHECK(!finalize_marking_completed_);
DCHECK(IsMarking());
IncrementalMarkingRootMarkingVisitor visitor(this);
heap_->mark_compact_collector()->MarkImplicitRefGroups(&MarkGrey);
heap_->isolate()->global_handles()->IterateObjectGroups(
&visitor, &MarkCompactCollector::IsUnmarkedHeapObjectWithHeap);
heap_->isolate()->global_handles()->RemoveImplicitRefGroups();
heap_->isolate()->global_handles()->RemoveObjectGroups();
}
void IncrementalMarking::ProcessWeakCells() {
DCHECK(!finalize_marking_completed_);
DCHECK(IsMarking());
......@@ -721,14 +703,11 @@ void IncrementalMarking::FinalizeIncrementally() {
// After finishing incremental marking, we try to discover all unmarked
// objects to reduce the marking load in the final pause.
// 1) We scan and mark the roots again to find all changes to the root set.
// 2) We mark the object groups.
// 3) Age and retain maps embedded in optimized code.
// 4) Remove weak cell with live values from the list of weak cells, they
// 2) Age and retain maps embedded in optimized code.
// 3) Remove weak cell with live values from the list of weak cells, they
// do not need processing during GC.
MarkRoots();
if (!heap_->local_embedder_heap_tracer()->InUse()) {
MarkObjectGroups();
}
if (incremental_marking_finalization_rounds_ == 0) {
// Map retaining is needed for perfromance, not correctness,
// so we can do it only once at the beginning of the finalization.
......
......@@ -244,7 +244,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void FinishBlackAllocation();
void MarkRoots();
void MarkObjectGroups();
void ProcessWeakCells();
// Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
// increase chances of reusing of map transition tree in future.
......
......@@ -1941,14 +1941,6 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
return ObjectMarking::IsWhite(HeapObject::cast(o));
}
bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap,
Object** p) {
Object* o = *p;
DCHECK(o->IsHeapObject());
return ObjectMarking::IsWhite(HeapObject::cast(o));
}
void MarkCompactCollector::MarkStringTable(
RootMarkingVisitor<MarkingMode::FULL>* visitor) {
StringTable* string_table = heap()->string_table();
......@@ -1978,38 +1970,6 @@ void MarkCompactCollector::MarkRoots(
}
}
void MarkCompactCollector::MarkImplicitRefGroups(
MarkObjectFunction mark_object) {
List<ImplicitRefGroup*>* ref_groups =
isolate()->global_handles()->implicit_ref_groups();
int last = 0;
for (int i = 0; i < ref_groups->length(); i++) {
ImplicitRefGroup* entry = ref_groups->at(i);
DCHECK(entry != NULL);
if (ObjectMarking::IsWhite(*entry->parent)) {
(*ref_groups)[last++] = entry;
continue;
}
Object*** children = entry->children;
// A parent object is marked, so mark all child heap objects.
for (size_t j = 0; j < entry->length; ++j) {
if ((*children[j])->IsHeapObject()) {
mark_object(heap(), HeapObject::cast(*children[j]));
}
}
// Once the entire group has been marked, dispose it because it's
// not needed anymore.
delete entry;
}
ref_groups->Rewind(last);
}
// Mark all objects reachable from the objects on the marking stack.
// Before: the marking stack contains zero or more heap object pointers.
// After: the marking stack is empty, and all objects reachable from the
......@@ -2097,11 +2057,6 @@ void MarkCompactCollector::ProcessEphemeralMarking(
0,
EmbedderHeapTracer::AdvanceTracingActions(
EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION));
} else {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_OBJECT_GROUPING);
isolate()->global_handles()->IterateObjectGroups(
visitor, &IsUnmarkedHeapObjectWithHeap);
MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject);
}
} else {
// TODO(mlippautz): We currently do not trace through blink when
......@@ -2487,14 +2442,6 @@ void MarkCompactCollector::ClearNonLiveReferences() {
heap()->ProcessAllWeakReferences(&mark_compact_object_retainer);
}
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_GLOBAL_HANDLES);
// Remove object groups after marking phase.
heap()->isolate()->global_handles()->RemoveObjectGroups();
heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
}
// Flush code from collected candidates.
if (is_code_flushing_enabled()) {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_CODE_FLUSH);
......
......@@ -530,8 +530,6 @@ class MarkCompactCollector {
static const uint32_t kSingleFreeEncoding = 0;
static const uint32_t kMultiFreeEncoding = 1;
static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p);
inline Heap* heap() const { return heap_; }
inline Isolate* isolate() const;
......@@ -583,10 +581,6 @@ class MarkCompactCollector {
bool evacuation() const { return evacuation_; }
// Mark objects in implicit references groups if their parent object
// is marked.
void MarkImplicitRefGroups(MarkObjectFunction mark_object);
MarkingDeque* marking_deque() { return &marking_deque_; }
Sweeper& sweeper() { return sweeper_; }
......
......@@ -201,14 +201,6 @@ void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) {
ids_->UpdateObjectSize(addr, size);
}
void HeapProfiler::SetRetainedObjectInfo(UniqueId id,
RetainedObjectInfo* info) {
// TODO(yurus, marja): Don't route this information through GlobalHandles.
heap()->isolate()->global_handles()->SetRetainedObjectInfo(id, info);
}
Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
HeapObject* object = NULL;
HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
......
......@@ -64,7 +64,6 @@ class HeapProfiler {
v8::RetainedObjectInfo* ExecuteWrapperClassCallback(uint16_t class_id,
Object** wrapper);
void SetRetainedObjectInfo(UniqueId id, RetainedObjectInfo* info);
void SetGetRetainerInfosCallback(
v8::HeapProfiler::GetRetainerInfosCallback callback);
......
......@@ -227,169 +227,12 @@ TEST(MapCompact) {
}
#endif
static int NumberOfWeakCalls = 0;
static void WeakPointerCallback(const v8::WeakCallbackInfo<void>& data) {
std::pair<v8::Persistent<v8::Value>*, int>* p =
reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
data.GetParameter());
CHECK_EQ(1234, p->second);
NumberOfWeakCalls++;
p->first->Reset();
}
HEAP_TEST(ObjectGroups) {
FLAG_incremental_marking = false;
CcTest::InitializeVM();
GlobalHandles* global_handles = CcTest::i_isolate()->global_handles();
Heap* heap = CcTest::heap();
NumberOfWeakCalls = 0;
v8::HandleScope handle_scope(CcTest::isolate());
Handle<Object> g1s1 =
global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
Handle<Object> g1s2 =
global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
Handle<Object> g1c1 =
global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
std::pair<Handle<Object>*, int> g1s1_and_id(&g1s1, 1234);
GlobalHandles::MakeWeak(
g1s1.location(), reinterpret_cast<void*>(&g1s1_and_id),
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
std::pair<Handle<Object>*, int> g1s2_and_id(&g1s2, 1234);
GlobalHandles::MakeWeak(
g1s2.location(), reinterpret_cast<void*>(&g1s2_and_id),
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
std::pair<Handle<Object>*, int> g1c1_and_id(&g1c1, 1234);
GlobalHandles::MakeWeak(
g1c1.location(), reinterpret_cast<void*>(&g1c1_and_id),
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
Handle<Object> g2s1 =
global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
Handle<Object> g2s2 =
global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
Handle<Object> g2c1 =
global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked());
std::pair<Handle<Object>*, int> g2s1_and_id(&g2s1, 1234);
GlobalHandles::MakeWeak(
g2s1.location(), reinterpret_cast<void*>(&g2s1_and_id),
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
std::pair<Handle<Object>*, int> g2s2_and_id(&g2s2, 1234);
GlobalHandles::MakeWeak(
g2s2.location(), reinterpret_cast<void*>(&g2s2_and_id),
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
std::pair<Handle<Object>*, int> g2c1_and_id(&g2c1, 1234);
GlobalHandles::MakeWeak(
g2c1.location(), reinterpret_cast<void*>(&g2c1_and_id),
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
Handle<Object> root = global_handles->Create(*g1s1); // make a root.
// Connect group 1 and 2, make a cycle.
Handle<FixedArray>::cast(g1s2)->set(0, *g2s2);
Handle<FixedArray>::cast(g2s1)->set(0, *g1s1);
{
Object** g1_objects[] = { g1s1.location(), g1s2.location() };
Object** g2_objects[] = { g2s1.location(), g2s2.location() };
global_handles->AddObjectGroup(g1_objects, 2, NULL);
global_handles->SetReference(Handle<HeapObject>::cast(g1s1).location(),
g1c1.location());
global_handles->AddObjectGroup(g2_objects, 2, NULL);
global_handles->SetReference(Handle<HeapObject>::cast(g2s1).location(),
g2c1.location());
}
// Do a full GC
CcTest::CollectGarbage(OLD_SPACE);
// All object should be alive.
CHECK_EQ(0, NumberOfWeakCalls);
// Weaken the root.
std::pair<Handle<Object>*, int> root_and_id(&root, 1234);
GlobalHandles::MakeWeak(
root.location(), reinterpret_cast<void*>(&root_and_id),
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
// But make children strong roots---all the objects (except for children)
// should be collectable now.
global_handles->ClearWeakness(g1c1.location());
global_handles->ClearWeakness(g2c1.location());
// Groups are deleted, rebuild groups.
{
Object** g1_objects[] = { g1s1.location(), g1s2.location() };
Object** g2_objects[] = { g2s1.location(), g2s2.location() };
global_handles->AddObjectGroup(g1_objects, 2, NULL);
global_handles->SetReference(Handle<HeapObject>::cast(g1s1).location(),
g1c1.location());
global_handles->AddObjectGroup(g2_objects, 2, NULL);
global_handles->SetReference(Handle<HeapObject>::cast(g2s1).location(),
g2c1.location());
}
CcTest::CollectGarbage(OLD_SPACE);
// All objects should be gone. 5 global handles in total.
CHECK_EQ(5, NumberOfWeakCalls);
// And now make children weak again and collect them.
GlobalHandles::MakeWeak(
g1c1.location(), reinterpret_cast<void*>(&g1c1_and_id),
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
GlobalHandles::MakeWeak(
g2c1.location(), reinterpret_cast<void*>(&g2c1_and_id),
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
CcTest::CollectGarbage(OLD_SPACE);
CHECK_EQ(7, NumberOfWeakCalls);
}
class TestRetainedObjectInfo : public v8::RetainedObjectInfo {
public:
TestRetainedObjectInfo() : has_been_disposed_(false) {}
bool has_been_disposed() { return has_been_disposed_; }
virtual void Dispose() {
CHECK(!has_been_disposed_);
has_been_disposed_ = true;
}
virtual bool IsEquivalent(v8::RetainedObjectInfo* other) {
return other == this;
}
virtual intptr_t GetHash() { return 0; }
virtual const char* GetLabel() { return "whatever"; }
private:
bool has_been_disposed_;
};
TEST(EmptyObjectGroups) {
CcTest::InitializeVM();
GlobalHandles* global_handles = CcTest::i_isolate()->global_handles();
v8::HandleScope handle_scope(CcTest::isolate());
TestRetainedObjectInfo info;
global_handles->AddObjectGroup(NULL, 0, &info);
CHECK(info.has_been_disposed());
}
#if defined(__has_feature)
#if __has_feature(address_sanitizer)
#define V8_WITH_ASAN 1
#endif
#endif
// Here is a memory use test that uses /proc, and is therefore Linux-only. We
// do not care how much memory the simulator uses, since it is only there for
// debugging purposes. Testing with ASAN doesn't make sense, either.
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment