Commit 186baea1 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

[heap] Refactorings and cleanups around global handles

Splitting off cosmetics and unrelated test refactorings from a larger
CL reworking traced global handles.

Bug: v8:13141
Change-Id: I675cdbd4898346ab55b0db65d53e992f2eb95744
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3816671
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82295}
parent ddbe3966
......@@ -117,7 +117,7 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
void IterateAllRoots(RootVisitor* v);
void IterateAllYoungRoots(RootVisitor* v);
// Iterates over all traces handles represented by TracedGlobal.
// Iterates over all traces handles represented by `v8::TracedReferenceBase`.
void IterateTracedNodes(
v8::EmbedderHeapTracer::TracedGlobalHandleVisitor* visitor);
......@@ -166,11 +166,11 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
void CleanupOnStackReferencesBelowCurrentStackPosition();
size_t NumberOfOnStackHandlesForTesting();
void IterateAllRootsForTesting(v8::PersistentHandleVisitor* v);
using NodeBounds = std::vector<std::pair<const void*, const void*>>;
NodeBounds GetTracedNodeBounds() const;
void IterateAllRootsForTesting(v8::PersistentHandleVisitor* v);
#ifdef DEBUG
void PrintStats();
void Print();
......
......@@ -12,6 +12,7 @@ namespace base {
class StackVisitor {
public:
virtual ~StackVisitor() = default;
virtual void VisitPointer(const void* address) = 0;
};
......
......@@ -658,8 +658,9 @@ void CppHeap::StartTracing() {
// Reuse the same local worklist for the mutator marking state which results
// in directly processing the objects by the JS logic. Also avoids
// publishing local objects.
static_cast<UnifiedHeapMarker*>(marker_.get())
->GetMutatorUnifiedHeapMarkingState()
marker_.get()
->To<UnifiedHeapMarker>()
.GetMutatorUnifiedHeapMarkingState()
.Update(isolate_->heap()
->mark_compact_collector()
->local_marking_worklists());
......@@ -697,18 +698,16 @@ bool CppHeap::IsTracingDone() { return marking_done_; }
void CppHeap::EnterFinalPause(cppgc::EmbedderStackState stack_state) {
CHECK(!in_disallow_gc_scope());
in_atomic_pause_ = true;
auto* marker = static_cast<UnifiedHeapMarker*>(marker_.get());
auto& marker = marker_.get()->To<UnifiedHeapMarker>();
// Scan global handles conservatively in case we are attached to an Isolate.
if (isolate_) {
marker->conservative_visitor().SetTracedNodeBounds(
marker.conservative_visitor().SetTracedNodeBounds(
isolate()->global_handles()->GetTracedNodeBounds());
}
marker_->EnterAtomicPause(stack_state);
marker.EnterAtomicPause(stack_state);
if (isolate_ && *collection_type_ == CollectionType::kMinor) {
// Visit V8 -> cppgc references.
TraceV8ToCppGCReferences(isolate_,
static_cast<UnifiedHeapMarker*>(marker_.get())
->GetMutatorMarkingState(),
TraceV8ToCppGCReferences(isolate_, marker.GetMutatorMarkingState(),
wrapper_descriptor_);
}
compactor_.CancelIfShouldNotCompact(MarkingType::kAtomic, stack_state);
......@@ -993,8 +992,7 @@ std::unique_ptr<CppMarkingState> CppHeap::CreateCppMarkingState() {
return std::make_unique<CppMarkingState>(
isolate(), wrapper_descriptor_,
std::make_unique<cppgc::internal::MarkingStateBase>(
AsBase(),
static_cast<UnifiedHeapMarker*>(marker())->GetMarkingWorklists()));
AsBase(), marker()->To<UnifiedHeapMarker>().GetMarkingWorklists()));
}
std::unique_ptr<CppMarkingState>
......@@ -1002,7 +1000,7 @@ CppHeap::CreateCppMarkingStateForMutatorThread() {
DCHECK(IsMarking());
return std::make_unique<CppMarkingState>(
isolate(), wrapper_descriptor_,
static_cast<UnifiedHeapMarker*>(marker())->GetMutatorMarkingState());
marker()->To<UnifiedHeapMarker>().GetMutatorMarkingState());
}
CppHeap::PauseConcurrentMarkingScope::PauseConcurrentMarkingScope(
......
......@@ -80,6 +80,11 @@ class V8_EXPORT_PRIVATE MarkerBase {
MarkerBase(const MarkerBase&) = delete;
MarkerBase& operator=(const MarkerBase&) = delete;
template <typename Class>
Class& To() {
return *static_cast<Class*>(this);
}
// Signals entering the atomic marking pause. The method
// - stops incremental/concurrent marking;
// - flushes back any in-construction worklists if needed;
......
......@@ -24,7 +24,7 @@ TEST_F(TracedReferenceTest, ResetFromLocal) {
v8::HandleScope handles(v8_isolate());
v8::Local<v8::Object> local =
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
EXPECT_TRUE(ref.IsEmpty());
ASSERT_TRUE(ref.IsEmpty());
EXPECT_NE(ref, local);
ref.Reset(v8_isolate(), local);
EXPECT_FALSE(ref.IsEmpty());
......@@ -205,7 +205,8 @@ TEST_F(TracedReferenceTest, TracedReferenceTrace) {
}
TEST_F(TracedReferenceTest, NoWriteBarrierOnConstruction) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
......@@ -224,7 +225,8 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnConstruction) {
}
TEST_F(TracedReferenceTest, WriteBarrierOnHeapReset) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
......@@ -262,7 +264,8 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackReset) {
}
TEST_F(TracedReferenceTest, WriteBarrierOnHeapCopy) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
......@@ -306,7 +309,8 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackCopy) {
}
TEST_F(TracedReferenceTest, WriteBarrierOnMove) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
......@@ -327,7 +331,8 @@ TEST_F(TracedReferenceTest, WriteBarrierOnMove) {
}
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackMove) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking";
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
......
......@@ -923,30 +923,6 @@ V8_NOINLINE void OnStackTest(v8::Isolate* v8_isolate,
EXPECT_FALSE(observer.IsEmpty());
}
V8_NOINLINE void CreateTracedReferenceInDeepStack(
v8::Isolate* isolate, v8::Global<v8::Object>* observer) {
v8::TracedReference<v8::Value> stack_ref;
v8::HandleScope scope(isolate);
v8::Local<v8::Object> object(ConstructTraceableJSApiObject(
isolate->GetCurrentContext(), nullptr, nullptr));
stack_ref.Reset(isolate, object);
observer->Reset(isolate, object);
observer->SetWeak();
}
V8_NOINLINE void TracedReferenceNotifyEmptyStackTest(
v8::Isolate* v8_isolate, TestEmbedderHeapTracer* tracer) {
v8::Global<v8::Object> observer;
CreateTracedReferenceInDeepStack(v8_isolate, &observer);
EXPECT_FALSE(observer.IsEmpty());
reinterpret_cast<i::Isolate*>(v8_isolate)
->heap()
->local_embedder_heap_tracer()
->NotifyEmptyEmbedderStack();
FullGC(v8_isolate);
EXPECT_TRUE(observer.IsEmpty());
}
enum class Operation {
kCopy,
kMove,
......@@ -1179,6 +1155,34 @@ TEST_F(EmbedderTracingTest, TracedReferenceCopy) {
TargetHandling::kInitializedOldGen);
}
namespace {
V8_NOINLINE void CreateTracedReferenceInDeepStack(
v8::Isolate* isolate, v8::Global<v8::Object>* observer) {
v8::TracedReference<v8::Value> stack_ref;
v8::HandleScope scope(isolate);
v8::Local<v8::Object> object(ConstructTraceableJSApiObject(
isolate->GetCurrentContext(), nullptr, nullptr));
stack_ref.Reset(isolate, object);
observer->Reset(isolate, object);
observer->SetWeak();
}
V8_NOINLINE void TracedReferenceNotifyEmptyStackTest(
v8::Isolate* v8_isolate, TestEmbedderHeapTracer* tracer) {
v8::Global<v8::Object> observer;
CreateTracedReferenceInDeepStack(v8_isolate, &observer);
EXPECT_FALSE(observer.IsEmpty());
reinterpret_cast<i::Isolate*>(v8_isolate)
->heap()
->local_embedder_heap_tracer()
->NotifyEmptyEmbedderStack();
FullGC(v8_isolate);
EXPECT_TRUE(observer.IsEmpty());
}
} // namespace
TEST_F(EmbedderTracingTest, NotifyEmptyStack) {
ManualGCScope manual_gc(i_isolate());
TestEmbedderHeapTracer tracer;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment