Commit 0353c0af authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Support weak containers

This CL adds TraceWeakContainer and VisitWeakContainer to the Visitor
api. It also introduces the weak_container_worklist_ used to force
re-tracing of weak containers that are reachable from stack.

Bug: chromium:1056170
Change-Id: I4ba75bd64939b8df9ece7422828a5ac647b03fd1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2491022
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70708}
parent 494eef3b
......@@ -160,6 +160,22 @@ class V8_EXPORT Visitor {
Trace(value);
}
/**
* Trace method for weak containers.
*
* \param object reference of the weak container.
* \param callback to be invoked.
* \param data custom data that is passed to the callback.
*/
template <typename T>
void TraceWeakContainer(const T* object, WeakCallback callback,
const void* data) {
if (!object) return;
VisitWeakContainer(object, TraceTrait<T>::GetTraceDescriptor(object),
TraceTrait<T>::GetWeakTraceDescriptor(object), callback,
data);
}
/**
* Registers a weak callback that is invoked during garbage collection.
*
......@@ -195,6 +211,9 @@ class V8_EXPORT Visitor {
virtual void VisitWeakRoot(const void* self, TraceDescriptor, WeakCallback,
const void* weak_root, const SourceLocation&) {}
virtual void VisitEphemeron(const void* key, TraceDescriptor value_desc) {}
virtual void VisitWeakContainer(const void* self, TraceDescriptor strong_desc,
TraceDescriptor weak_desc,
WeakCallback callback, const void* data) {}
private:
template <typename T, void (T::*method)(const LivenessBroker&)>
......
......@@ -446,6 +446,21 @@ class VisiblityVisitor final : public JSVisitor {
const cppgc::SourceLocation&) final {}
void VisitWeakRoot(const void*, cppgc::TraceDescriptor, cppgc::WeakCallback,
const void*, const cppgc::SourceLocation&) final {}
void VisitWeakContainer(const void* object,
cppgc::TraceDescriptor strong_desc,
cppgc::TraceDescriptor weak_desc, cppgc::WeakCallback,
const void*) {
if (!weak_desc.callback) {
// Weak container does not contribute to liveness.
return;
}
// Heap snapshot is always run after a GC so we know there are no dead
// entries in the backing store, thus it safe to trace it strongly.
if (object) {
Visit(object, strong_desc);
}
}
// JS handling.
void Visit(const JSMemberBase& ref) final {}
......
......@@ -32,6 +32,19 @@ class UnifiedHeapVerificationVisitor final : public JSVisitor {
// at this point.
state_.VerifyMarked(desc.base_object_payload);
}
void VisitWeakContainer(const void* object, cppgc::TraceDescriptor,
cppgc::TraceDescriptor weak_desc, cppgc::WeakCallback,
const void*) {
if (!object) return;
// Contents of weak containers are found themselves through page iteration
// and are treated strongly, similar to how they are treated strongly when
// found through stack scanning. The verification here only makes sure that
// the container itself is properly marked.
state_.VerifyMarked(weak_desc.base_object_payload);
}
void Visit(const internal::JSMemberBase& ref) final {
// TODO(chromium:1056170): Verify V8 object is indeed marked.
}
......
......@@ -37,6 +37,12 @@ void UnifiedHeapMarkingVisitorBase::VisitEphemeron(const void* key,
marking_state_.ProcessEphemeron(key, value_desc);
}
void UnifiedHeapMarkingVisitorBase::VisitWeakContainer(
const void* self, TraceDescriptor strong_desc, TraceDescriptor weak_desc,
WeakCallback callback, const void* data) {
marking_state_.ProcessWeakContainer(self, weak_desc, callback, data);
}
void UnifiedHeapMarkingVisitorBase::RegisterWeakCallback(WeakCallback callback,
const void* object) {
marking_state_.RegisterWeakCallback(callback, object);
......
......@@ -44,6 +44,9 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVisitorBase : public JSVisitor {
void Visit(const void*, TraceDescriptor) final;
void VisitWeak(const void*, TraceDescriptor, WeakCallback, const void*) final;
void VisitEphemeron(const void*, TraceDescriptor) final;
void VisitWeakContainer(const void* self, TraceDescriptor strong_desc,
TraceDescriptor weak_desc, WeakCallback callback,
const void* data) final;
void RegisterWeakCallback(WeakCallback, const void*) final;
// JS handling.
......
......@@ -189,6 +189,8 @@ MarkerBase::~MarkerBase() {
marking_worklists_.discovered_ephemeron_pairs_worklist()->Clear();
#endif
}
marking_worklists_.weak_containers_worklist()->Clear();
}
void MarkerBase::StartMarking() {
......
......@@ -213,7 +213,9 @@ class V8_EXPORT_PRIVATE Marker final : public MarkerBase {
};
void MarkerBase::WriteBarrierForInConstructionObject(HeapObjectHeader& header) {
mutator_marking_state_.not_fully_constructed_worklist().Push(&header);
mutator_marking_state_.not_fully_constructed_worklist()
.Push<MarkingWorklists::NotFullyConstructedWorklist::AccessMode::kAtomic>(
&header);
}
void MarkerBase::WriteBarrierForObject(HeapObjectHeader& header) {
......
......@@ -11,7 +11,8 @@ namespace internal {
void MutatorMarkingState::FlushNotFullyConstructedObjects() {
std::unordered_set<HeapObjectHeader*> objects =
not_fully_constructed_worklist_.Extract();
not_fully_constructed_worklist_.Extract<
MarkingWorklists::NotFullyConstructedWorklist::AccessMode::kAtomic>();
for (HeapObjectHeader* object : objects) {
if (MarkNoPush(*object))
previously_not_fully_constructed_worklist_.Push(object);
......
......@@ -26,10 +26,19 @@ class MarkingStateBase {
inline void MarkAndPush(const void*, TraceDescriptor);
inline void MarkAndPush(HeapObjectHeader&);
inline void PushMarked(HeapObjectHeader&, TraceDescriptor desc);
inline void RegisterWeakReferenceIfNeeded(const void*, TraceDescriptor,
WeakCallback, const void*);
inline void RegisterWeakCallback(WeakCallback, const void*);
// Weak containers are special in that they may require re-tracing if
// reachable through stack, even if the container was already traced before.
// ProcessWeakContainer records which weak containers were already marked so
// that conservative stack scanning knows to retrace them.
inline void ProcessWeakContainer(const void*, TraceDescriptor, WeakCallback,
const void*);
inline void ProcessEphemeron(const void*, TraceDescriptor);
inline void AccountMarkedBytes(const HeapObjectHeader&);
......@@ -75,12 +84,17 @@ class MarkingStateBase {
ephemeron_pairs_for_processing_worklist() {
return ephemeron_pairs_for_processing_worklist_;
}
MarkingWorklists::WeakContainersWorklist& weak_containers_worklist() {
return weak_containers_worklist_;
}
protected:
inline void MarkAndPush(HeapObjectHeader&, TraceDescriptor);
inline bool MarkNoPush(HeapObjectHeader&);
inline void RegisterWeakContainer(HeapObjectHeader&);
#ifdef DEBUG
HeapBase& heap_;
#endif // DEBUG
......@@ -98,6 +112,7 @@ class MarkingStateBase {
discovered_ephemeron_pairs_worklist_;
MarkingWorklists::EphemeronPairsWorklist::Local
ephemeron_pairs_for_processing_worklist_;
MarkingWorklists::WeakContainersWorklist& weak_containers_worklist_;
size_t marked_bytes_ = 0;
};
......@@ -120,7 +135,8 @@ MarkingStateBase::MarkingStateBase(HeapBase& heap,
discovered_ephemeron_pairs_worklist_(
marking_worklists.discovered_ephemeron_pairs_worklist()),
ephemeron_pairs_for_processing_worklist_(
marking_worklists.ephemeron_pairs_for_processing_worklist()) {
marking_worklists.ephemeron_pairs_for_processing_worklist()),
weak_containers_worklist_(*marking_worklists.weak_containers_worklist()) {
}
void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) {
......@@ -135,9 +151,11 @@ void MarkingStateBase::MarkAndPush(HeapObjectHeader& header,
DCHECK_NOT_NULL(desc.callback);
if (header.IsInConstruction<HeapObjectHeader::AccessMode::kAtomic>()) {
not_fully_constructed_worklist_.Push(&header);
not_fully_constructed_worklist_.Push<
MarkingWorklists::NotFullyConstructedWorklist::AccessMode::kAtomic>(
&header);
} else if (MarkNoPush(header)) {
marking_worklist_.Push(desc);
PushMarked(header, desc);
}
}
......@@ -157,6 +175,15 @@ void MarkingStateBase::MarkAndPush(HeapObjectHeader& header) {
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}
void MarkingStateBase::PushMarked(HeapObjectHeader& header,
TraceDescriptor desc) {
DCHECK(header.IsMarked<HeapObjectHeader::AccessMode::kAtomic>());
DCHECK(!header.IsInConstruction<HeapObjectHeader::AccessMode::kAtomic>());
DCHECK_NOT_NULL(desc.callback);
marking_worklist_.Push(desc);
}
void MarkingStateBase::RegisterWeakReferenceIfNeeded(const void* object,
TraceDescriptor desc,
WeakCallback weak_callback,
......@@ -172,9 +199,47 @@ void MarkingStateBase::RegisterWeakReferenceIfNeeded(const void* object,
void MarkingStateBase::RegisterWeakCallback(WeakCallback callback,
const void* object) {
DCHECK_NOT_NULL(callback);
weak_callback_worklist_.Push({callback, object});
}
void MarkingStateBase::RegisterWeakContainer(HeapObjectHeader& header) {
weak_containers_worklist_
.Push<MarkingWorklists::WeakContainersWorklist::AccessMode::kAtomic>(
&header);
}
void MarkingStateBase::ProcessWeakContainer(const void* object,
TraceDescriptor desc,
WeakCallback callback,
const void* data) {
DCHECK_NOT_NULL(object);
HeapObjectHeader& header =
HeapObjectHeader::FromPayload(const_cast<void*>(object));
if (header.IsInConstruction<HeapObjectHeader::AccessMode::kAtomic>()) {
not_fully_constructed_worklist_.Push<
MarkingWorklists::NotFullyConstructedWorklist::AccessMode::kAtomic>(
&header);
return;
}
// Only mark the container initially. Its buckets will be processed after
// marking.
if (!MarkNoPush(header)) return;
RegisterWeakContainer(header);
// Register final weak processing of the backing store.
RegisterWeakCallback(callback, data);
// Weak containers might not require tracing. In such cases the callback in
// the TraceDescriptor will be nullptr. For ephemerons the callback will be
// non-nullptr so that the container is traced and the ephemeron pairs are
// processed.
if (desc.callback) PushMarked(header, desc);
}
void MarkingStateBase::ProcessEphemeron(const void* key,
TraceDescriptor value_desc) {
// Filter out already marked keys. The write barrier for WeakMember
......@@ -209,6 +274,8 @@ class MutatorMarkingState : public MarkingStateBase {
return MutatorMarkingState::MarkingStateBase::MarkNoPush(header);
}
inline void PushMarkedWeakContainer(HeapObjectHeader&);
inline void DynamicallyMarkAddress(ConstAddress);
// Moves objects in not_fully_constructed_worklist_ to
......@@ -221,8 +288,19 @@ class MutatorMarkingState : public MarkingStateBase {
inline void InvokeWeakRootsCallbackIfNeeded(const void*, TraceDescriptor,
WeakCallback, const void*);
inline bool IsMarkedWeakContainer(HeapObjectHeader&);
};
void MutatorMarkingState::PushMarkedWeakContainer(HeapObjectHeader& header) {
DCHECK(weak_containers_worklist_.Contains(&header));
weak_containers_worklist_.Erase(&header);
PushMarked(
header,
{header.Payload(),
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}
void MutatorMarkingState::DynamicallyMarkAddress(ConstAddress address) {
HeapObjectHeader& header =
BasePage::FromPayload(address)->ObjectHeaderFromInnerAddress(
......@@ -248,6 +326,12 @@ void MutatorMarkingState::InvokeWeakRootsCallbackIfNeeded(
weak_callback(LivenessBrokerFactory::Create(), parameter);
}
bool MutatorMarkingState::IsMarkedWeakContainer(HeapObjectHeader& header) {
const bool result = weak_containers_worklist_.Contains(&header);
DCHECK_IMPLIES(result, header.IsMarked());
return result;
}
class ConcurrentMarkingState : public MarkingStateBase {
public:
ConcurrentMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists)
......
......@@ -42,7 +42,7 @@ void VerificationState::VerifyMarked(const void* base_object_payload) const {
}
}
void MarkingVerifierBase::VisitConservatively(
void MarkingVerifierBase::VisitInConstructionConservatively(
HeapObjectHeader& header, TraceConservativelyCallback callback) {
CHECK(header.IsMarked());
in_construction_objects_->insert(&header);
......@@ -90,6 +90,18 @@ class VerificationVisitor final : public cppgc::Visitor {
state_.VerifyMarked(desc.base_object_payload);
}
void VisitWeakContainer(const void* object, TraceDescriptor,
TraceDescriptor weak_desc, WeakCallback,
const void*) {
if (!object) return;
// Contents of weak containers are found themselves through page iteration
// and are treated strongly, similar to how they are treated strongly when
// found through stack scanning. The verification here only makes sure that
// the container itself is properly marked.
state_.VerifyMarked(weak_desc.base_object_payload);
}
private:
VerificationState& state_;
};
......
......@@ -45,8 +45,8 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase
virtual void SetCurrentParent(const HeapObjectHeader*) = 0;
private:
void VisitConservatively(HeapObjectHeader&,
TraceConservativelyCallback) final;
void VisitInConstructionConservatively(HeapObjectHeader&,
TraceConservativelyCallback) final;
void VisitPointer(const void*) final;
bool VisitHeapObjectHeader(HeapObjectHeader*);
......
......@@ -30,6 +30,14 @@ void MarkingVisitorBase::VisitEphemeron(const void* key,
marking_state_.ProcessEphemeron(key, value_desc);
}
void MarkingVisitorBase::VisitWeakContainer(const void* object,
TraceDescriptor strong_desc,
TraceDescriptor weak_desc,
WeakCallback callback,
const void* data) {
marking_state_.ProcessWeakContainer(object, weak_desc, callback, data);
}
void MarkingVisitorBase::RegisterWeakCallback(WeakCallback callback,
const void* object) {
marking_state_.RegisterWeakCallback(callback, object);
......@@ -40,8 +48,19 @@ ConservativeMarkingVisitor::ConservativeMarkingVisitor(
: ConservativeTracingVisitor(heap, *heap.page_backend(), visitor),
marking_state_(marking_state) {}
void ConservativeMarkingVisitor::VisitConservatively(
void ConservativeMarkingVisitor::VisitFullyConstructedConservatively(
HeapObjectHeader& header) {
if (header.IsMarked()) {
if (marking_state_.IsMarkedWeakContainer(header))
marking_state_.PushMarkedWeakContainer(header);
return;
}
ConservativeTracingVisitor::VisitFullyConstructedConservatively(header);
}
void ConservativeMarkingVisitor::VisitInConstructionConservatively(
HeapObjectHeader& header, TraceConservativelyCallback callback) {
DCHECK(!marking_state_.IsMarkedWeakContainer(header));
marking_state_.MarkNoPush(header);
marking_state_.AccountMarkedBytes(header);
callback(this, header);
......
......@@ -29,6 +29,9 @@ class V8_EXPORT_PRIVATE MarkingVisitorBase : public VisitorBase {
void Visit(const void*, TraceDescriptor) final;
void VisitWeak(const void*, TraceDescriptor, WeakCallback, const void*) final;
void VisitEphemeron(const void*, TraceDescriptor) final;
void VisitWeakContainer(const void* self, TraceDescriptor strong_desc,
TraceDescriptor weak_desc, WeakCallback callback,
const void* data) final;
void RegisterWeakCallback(WeakCallback, const void*) final;
MarkingStateBase& marking_state_;
......@@ -71,8 +74,9 @@ class ConservativeMarkingVisitor : public ConservativeTracingVisitor,
~ConservativeMarkingVisitor() override = default;
private:
void VisitConservatively(HeapObjectHeader&,
TraceConservativelyCallback) final;
void VisitFullyConstructedConservatively(HeapObjectHeader&) final;
void VisitInConstructionConservatively(HeapObjectHeader&,
TraceConservativelyCallback) final;
void VisitPointer(const void*) final;
MutatorMarkingState& marking_state_;
......
......@@ -21,41 +21,9 @@ void MarkingWorklists::ClearForTesting() {
ephemeron_pairs_for_processing_worklist_.Clear();
}
void MarkingWorklists::NotFullyConstructedWorklist::Push(
HeapObjectHeader* object) {
DCHECK_NOT_NULL(object);
v8::base::MutexGuard guard(&lock_);
objects_.insert(object);
}
std::unordered_set<HeapObjectHeader*>
MarkingWorklists::NotFullyConstructedWorklist::Extract() {
v8::base::MutexGuard guard(&lock_);
std::unordered_set<HeapObjectHeader*> extracted;
std::swap(extracted, objects_);
DCHECK(objects_.empty());
return extracted;
}
void MarkingWorklists::NotFullyConstructedWorklist::Clear() {
v8::base::MutexGuard guard(&lock_);
objects_.clear();
}
bool MarkingWorklists::NotFullyConstructedWorklist::IsEmpty() {
v8::base::MutexGuard guard(&lock_);
return objects_.empty();
}
MarkingWorklists::NotFullyConstructedWorklist::~NotFullyConstructedWorklist() {
MarkingWorklists::ExternalMarkingWorklist::~ExternalMarkingWorklist() {
DCHECK(IsEmpty());
}
bool MarkingWorklists::NotFullyConstructedWorklist::ContainsForTesting(
HeapObjectHeader* object) {
v8::base::MutexGuard guard(&lock_);
return objects_.find(object) != objects_.end();
}
} // namespace internal
} // namespace cppgc
......@@ -10,13 +10,45 @@
#include "include/cppgc/visitor.h"
#include "src/base/platform/mutex.h"
#include "src/heap/base/worklist.h"
#include "src/heap/cppgc/heap-object-header.h"
namespace cppgc {
namespace internal {
class HeapObjectHeader;
class MarkingWorklists {
private:
class V8_EXPORT_PRIVATE ExternalMarkingWorklist {
public:
using AccessMode = HeapObjectHeader::AccessMode;
template <AccessMode = AccessMode::kNonAtomic>
void Push(HeapObjectHeader*);
template <AccessMode = AccessMode::kNonAtomic>
void Erase(HeapObjectHeader*);
template <AccessMode = AccessMode::kNonAtomic>
bool Contains(HeapObjectHeader*);
template <AccessMode = AccessMode::kNonAtomic>
std::unordered_set<HeapObjectHeader*> Extract();
template <AccessMode = AccessMode::kNonAtomic>
void Clear();
template <AccessMode = AccessMode::kNonAtomic>
bool IsEmpty();
~ExternalMarkingWorklist();
private:
template <AccessMode>
struct ConditionalMutexGuard;
void* operator new(size_t) = delete;
void* operator new[](size_t) = delete;
void operator delete(void*) = delete;
void operator delete[](void*) = delete;
v8::base::Mutex lock_;
std::unordered_set<HeapObjectHeader*> objects_;
};
public:
static constexpr int kMutatorThreadId = 0;
......@@ -42,6 +74,7 @@ class MarkingWorklists {
// Since the work list is currently a temporary object this is not a problem.
using MarkingWorklist =
heap::base::Worklist<MarkingItem, 512 /* local entries */>;
using NotFullyConstructedWorklist = ExternalMarkingWorklist;
using PreviouslyNotFullyConstructedWorklist =
heap::base::Worklist<HeapObjectHeader*, 16 /* local entries */>;
using WeakCallbackWorklist =
......@@ -53,27 +86,7 @@ class MarkingWorklists {
64 /* local entries */>;
using EphemeronPairsWorklist =
heap::base::Worklist<EphemeronPairItem, 64 /* local entries */>;
class V8_EXPORT_PRIVATE NotFullyConstructedWorklist {
public:
void Push(HeapObjectHeader*);
std::unordered_set<HeapObjectHeader*> Extract();
void Clear();
bool IsEmpty();
~NotFullyConstructedWorklist();
bool ContainsForTesting(HeapObjectHeader*);
private:
void* operator new(size_t) = delete;
void* operator new[](size_t) = delete;
void operator delete(void*) = delete;
void operator delete[](void*) = delete;
v8::base::Mutex lock_;
std::unordered_set<HeapObjectHeader*> objects_;
};
using WeakContainersWorklist = ExternalMarkingWorklist;
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
NotFullyConstructedWorklist* not_fully_constructed_worklist() {
......@@ -98,6 +111,9 @@ class MarkingWorklists {
EphemeronPairsWorklist* ephemeron_pairs_for_processing_worklist() {
return &ephemeron_pairs_for_processing_worklist_;
}
WeakContainersWorklist* weak_containers_worklist() {
return &weak_containers_worklist_;
}
void ClearForTesting();
......@@ -111,8 +127,68 @@ class MarkingWorklists {
ConcurrentMarkingBailoutWorklist concurrent_marking_bailout_worklist_;
EphemeronPairsWorklist discovered_ephemeron_pairs_worklist_;
EphemeronPairsWorklist ephemeron_pairs_for_processing_worklist_;
WeakContainersWorklist weak_containers_worklist_;
};
template <>
struct MarkingWorklists::ExternalMarkingWorklist::ConditionalMutexGuard<
MarkingWorklists::ExternalMarkingWorklist::AccessMode::kNonAtomic> {
explicit ConditionalMutexGuard(v8::base::Mutex*) {}
};
template <>
struct MarkingWorklists::ExternalMarkingWorklist::ConditionalMutexGuard<
MarkingWorklists::ExternalMarkingWorklist::AccessMode::kAtomic> {
explicit ConditionalMutexGuard(v8::base::Mutex* lock) : guard_(lock) {}
private:
v8::base::MutexGuard guard_;
};
template <MarkingWorklists::ExternalMarkingWorklist::AccessMode mode>
void MarkingWorklists::ExternalMarkingWorklist::Push(HeapObjectHeader* object) {
DCHECK_NOT_NULL(object);
ConditionalMutexGuard<mode> guard(&lock_);
objects_.insert(object);
}
template <MarkingWorklists::ExternalMarkingWorklist::AccessMode mode>
void MarkingWorklists::ExternalMarkingWorklist::Erase(
HeapObjectHeader* object) {
DCHECK_NOT_NULL(object);
ConditionalMutexGuard<mode> guard(&lock_);
objects_.erase(object);
}
template <MarkingWorklists::ExternalMarkingWorklist::AccessMode mode>
bool MarkingWorklists::ExternalMarkingWorklist::Contains(
HeapObjectHeader* object) {
ConditionalMutexGuard<mode> guard(&lock_);
return objects_.find(object) != objects_.end();
}
template <MarkingWorklists::ExternalMarkingWorklist::AccessMode mode>
std::unordered_set<HeapObjectHeader*>
MarkingWorklists::ExternalMarkingWorklist::Extract() {
ConditionalMutexGuard<mode> guard(&lock_);
std::unordered_set<HeapObjectHeader*> extracted;
std::swap(extracted, objects_);
DCHECK(objects_.empty());
return extracted;
}
template <MarkingWorklists::ExternalMarkingWorklist::AccessMode mode>
void MarkingWorklists::ExternalMarkingWorklist::Clear() {
ConditionalMutexGuard<mode> guard(&lock_);
objects_.clear();
}
template <MarkingWorklists::ExternalMarkingWorklist::AccessMode mode>
bool MarkingWorklists::ExternalMarkingWorklist::IsEmpty() {
ConditionalMutexGuard<mode> guard(&lock_);
return objects_.empty();
}
} // namespace internal
} // namespace cppgc
......
......@@ -69,14 +69,19 @@ void ConservativeTracingVisitor::TraceConservativelyIfNeeded(
void ConservativeTracingVisitor::TraceConservativelyIfNeeded(
HeapObjectHeader& header) {
if (!header.IsInConstruction<HeapObjectHeader::AccessMode::kNonAtomic>()) {
visitor_.Visit(
header.Payload(),
{header.Payload(),
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
VisitFullyConstructedConservatively(header);
} else {
VisitConservatively(header, TraceConservatively);
VisitInConstructionConservatively(header, TraceConservatively);
}
}
void ConservativeTracingVisitor::VisitFullyConstructedConservatively(
HeapObjectHeader& header) {
visitor_.Visit(
header.Payload(),
{header.Payload(),
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}
} // namespace internal
} // namespace cppgc
......@@ -53,8 +53,10 @@ class ConservativeTracingVisitor {
protected:
using TraceConservativelyCallback = void(ConservativeTracingVisitor*,
const HeapObjectHeader&);
virtual void VisitConservatively(HeapObjectHeader&,
TraceConservativelyCallback) {}
virtual void V8_EXPORT_PRIVATE
VisitFullyConstructedConservatively(HeapObjectHeader&);
virtual void VisitInConstructionConservatively(HeapObjectHeader&,
TraceConservativelyCallback) {}
HeapBase& heap_;
PageBackend& page_backend_;
......
......@@ -115,6 +115,7 @@ v8_source_set("cppgc_unittests_sources") {
"heap/cppgc/tests.cc",
"heap/cppgc/tests.h",
"heap/cppgc/visitor-unittest.cc",
"heap/cppgc/weak-container-unittest.cc",
"heap/cppgc/write-barrier-unittest.cc",
]
......
......@@ -220,9 +220,8 @@ TEST_F(MarkingVisitorTest, MarkMemberInConstruction) {
visitor.Trace(object);
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
EXPECT_TRUE(visitor.marking_state()
.not_fully_constructed_worklist()
.ContainsForTesting(&header));
EXPECT_TRUE(visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
}
......@@ -236,9 +235,8 @@ TEST_F(MarkingVisitorTest, MarkMemberMixinInConstruction) {
visitor.Trace(mixin);
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
EXPECT_TRUE(visitor.marking_state()
.not_fully_constructed_worklist()
.ContainsForTesting(&header));
EXPECT_TRUE(visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
}
......@@ -252,9 +250,9 @@ TEST_F(MarkingVisitorTest, DontMarkWeakMemberInConstruction) {
visitor.Trace(object);
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
EXPECT_FALSE(visitor.marking_state()
.not_fully_constructed_worklist()
.ContainsForTesting(&header));
EXPECT_FALSE(
visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
}
......@@ -268,9 +266,9 @@ TEST_F(MarkingVisitorTest, DontMarkWeakMemberMixinInConstruction) {
visitor.Trace(mixin);
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
EXPECT_FALSE(visitor.marking_state()
.not_fully_constructed_worklist()
.ContainsForTesting(&header));
EXPECT_FALSE(
visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
}
......@@ -284,9 +282,8 @@ TEST_F(MarkingVisitorTest, MarkPersistentInConstruction) {
visitor.TraceRootForTesting(object, SourceLocation::Current());
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
EXPECT_TRUE(visitor.marking_state()
.not_fully_constructed_worklist()
.ContainsForTesting(&header));
EXPECT_TRUE(visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
}
......@@ -300,9 +297,8 @@ TEST_F(MarkingVisitorTest, MarkPersistentMixinInConstruction) {
visitor.TraceRootForTesting(mixin, SourceLocation::Current());
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
EXPECT_TRUE(visitor.marking_state()
.not_fully_constructed_worklist()
.ContainsForTesting(&header));
EXPECT_TRUE(visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
}
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "include/cppgc/allocation.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/marking-visitor.h"
#include "src/heap/cppgc/stats-collector.h"
#include "test/unittests/heap/cppgc/tests.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace cppgc {
namespace internal {
namespace {
class WeakContainerTest : public testing::TestWithHeap {
public:
using Config = Marker::MarkingConfig;
void StartMarking() {
Config config = {Config::CollectionType::kMajor,
Config::StackState::kNoHeapPointers,
Config::MarkingType::kIncremental};
GetMarkerRef() = MarkerFactory::CreateAndStartMarking<Marker>(
Heap::From(GetHeap())->AsBase(), GetPlatformHandle().get(), config);
}
void FinishMarking(Config::StackState stack_state) {
GetMarkerRef()->FinishMarking(stack_state);
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted();
}
};
class TraceableGCed : public GarbageCollected<TraceableGCed> {
public:
void Trace(cppgc::Visitor*) const { n_trace_calls++; }
static size_t n_trace_calls;
};
size_t TraceableGCed::n_trace_calls = 0u;
class NonTraceableGCed : public GarbageCollected<NonTraceableGCed> {
public:
void Trace(cppgc::Visitor*) const { n_trace_calls++; }
static size_t n_trace_calls;
};
size_t NonTraceableGCed::n_trace_calls = 0u;
void EmptyWeakCallback(const LivenessBroker&, const void*) {}
} // namespace
} // namespace internal
template <>
struct TraceTrait<internal::TraceableGCed>
: public internal::TraceTraitBase<internal::TraceableGCed> {
static TraceDescriptor GetWeakTraceDescriptor(const void* self) {
return {self, Trace};
}
};
template <>
struct TraceTrait<internal::NonTraceableGCed>
: public internal::TraceTraitBase<internal::NonTraceableGCed> {
static TraceDescriptor GetWeakTraceDescriptor(const void* self) {
return {self, nullptr};
}
};
namespace internal {
TEST_F(WeakContainerTest, TraceableGCedTraced) {
TraceableGCed* obj =
MakeGarbageCollected<TraceableGCed>(GetAllocationHandle());
TraceableGCed::n_trace_calls = 0u;
StartMarking();
GetMarkerRef()->VisitorForTesting().TraceWeakContainer(obj, EmptyWeakCallback,
nullptr);
FinishMarking(Config::StackState::kNoHeapPointers);
EXPECT_NE(0u, TraceableGCed::n_trace_calls);
}
TEST_F(WeakContainerTest, NonTraceableGCedNotTraced) {
NonTraceableGCed* obj =
MakeGarbageCollected<NonTraceableGCed>(GetAllocationHandle());
NonTraceableGCed::n_trace_calls = 0u;
StartMarking();
GetMarkerRef()->VisitorForTesting().TraceWeakContainer(obj, EmptyWeakCallback,
nullptr);
FinishMarking(Config::StackState::kNoHeapPointers);
EXPECT_EQ(0u, NonTraceableGCed::n_trace_calls);
}
TEST_F(WeakContainerTest, NonTraceableGCedNotTracedConservatively) {
NonTraceableGCed* obj =
MakeGarbageCollected<NonTraceableGCed>(GetAllocationHandle());
NonTraceableGCed::n_trace_calls = 0u;
StartMarking();
GetMarkerRef()->VisitorForTesting().TraceWeakContainer(obj, EmptyWeakCallback,
nullptr);
FinishMarking(Config::StackState::kMayContainHeapPointers);
EXPECT_NE(0u, NonTraceableGCed::n_trace_calls);
}
TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainer) {
size_t trace_count_without_conservative;
{
TraceableGCed* obj =
MakeGarbageCollected<TraceableGCed>(GetAllocationHandle());
TraceableGCed::n_trace_calls = 0u;
StartMarking();
GetMarkerRef()->VisitorForTesting().TraceWeakContainer(
obj, EmptyWeakCallback, nullptr);
FinishMarking(Config::StackState::kNoHeapPointers);
trace_count_without_conservative = TraceableGCed::n_trace_calls;
}
{
TraceableGCed* obj =
MakeGarbageCollected<TraceableGCed>(GetAllocationHandle());
TraceableGCed::n_trace_calls = 0u;
StartMarking();
GetMarkerRef()->VisitorForTesting().TraceWeakContainer(
obj, EmptyWeakCallback, nullptr);
FinishMarking(Config::StackState::kMayContainHeapPointers);
EXPECT_LT(trace_count_without_conservative, TraceableGCed::n_trace_calls);
}
}
TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainerOnce) {
NonTraceableGCed* obj =
MakeGarbageCollected<NonTraceableGCed>(GetAllocationHandle());
NonTraceableGCed* copy_obj = obj;
USE(copy_obj);
NonTraceableGCed* another_copy_obj = obj;
USE(another_copy_obj);
NonTraceableGCed::n_trace_calls = 0u;
StartMarking();
GetMarkerRef()->VisitorForTesting().TraceWeakContainer(obj, EmptyWeakCallback,
nullptr);
FinishMarking(Config::StackState::kMayContainHeapPointers);
EXPECT_EQ(1u, NonTraceableGCed::n_trace_calls);
}
namespace {
struct WeakCallback {
static void callback(const LivenessBroker&, const void* data) {
n_callback_called++;
obj = data;
}
static size_t n_callback_called;
static const void* obj;
};
size_t WeakCallback::n_callback_called = 0u;
const void* WeakCallback::obj = nullptr;
} // namespace
TEST_F(WeakContainerTest, WeakContainerWeakCallbackCalled) {
TraceableGCed* obj =
MakeGarbageCollected<TraceableGCed>(GetAllocationHandle());
WeakCallback::n_callback_called = 0u;
WeakCallback::obj = nullptr;
StartMarking();
GetMarkerRef()->VisitorForTesting().TraceWeakContainer(
obj, WeakCallback::callback, obj);
FinishMarking(Config::StackState::kMayContainHeapPointers);
EXPECT_NE(0u, WeakCallback::n_callback_called);
EXPECT_EQ(obj, WeakCallback::obj);
}
} // namespace internal
} // namespace cppgc
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment