Commit 51d2256b authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: Conservatively scan compressed pointers

Due to collections with inlined storage, Oilpan still supports on-stack
Members, which are always compressed if pointer compression is enabled.
This CL scans halfwords (together with full words) on stack to find
potential pointers. Since on-heap pointers can only be compressed and
in-construction objects always reside on heap, only halfwords need to be
scanned for them.

The alternative potential followup approaches:
1) Use a separate uncompressed type for pointer in inlined collections;
2) Dynamically register regions of stack containing compressed pointers.

Bug: chromium:1325007
Change-Id: Ia706fd8e7383d30aff11f4014faa9edd3d289a55
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3644959Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80747}
parent 9e7ada8e
......@@ -32,46 +32,83 @@ namespace {
void TraceConservatively(ConservativeTracingVisitor* conservative_visitor,
const HeapObjectHeader& header) {
#if defined(CPPGC_POINTER_COMPRESSION)
using PointerType = uint32_t;
#else // !defined(CPPGC_POINTER_COMPRESSION)
using PointerType = uintptr_t;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
const auto object_view = ObjectView<>(header);
Address* object = reinterpret_cast<Address*>(object_view.Start());
for (size_t i = 0; i < (object_view.Size() / sizeof(Address)); ++i) {
Address maybe_ptr = object[i];
PointerType* object = reinterpret_cast<PointerType*>(object_view.Start());
for (size_t i = 0; i < (object_view.Size() / sizeof(PointerType)); ++i) {
PointerType maybe_ptr = object[i];
#if defined(MEMORY_SANITIZER)
// |object| may be uninitialized by design or just contain padding bytes.
// Copy into a local variable that is not poisoned for conservative marking.
// Copy into a temporary variable to maintain the original MSAN state.
MSAN_MEMORY_IS_INITIALIZED(&maybe_ptr, sizeof(maybe_ptr));
#endif
if (maybe_ptr) {
conservative_visitor->TraceConservativelyIfNeeded(maybe_ptr);
if (maybe_ptr > SentinelPointer::kSentinelValue) {
#if defined(CPPGC_POINTER_COMPRESSION)
// We know that all on-heap pointers are compressed, so don't check full
// pointers.
Address decompressed_ptr =
static_cast<Address>(CompressedPointer::Decompress(maybe_ptr));
#else // !defined(CPPGC_POINTER_COMPRESSION)
Address decompressed_ptr = reinterpret_cast<Address>(maybe_ptr);
#endif // !defined(CPPGC_POINTER_COMPRESSION)
conservative_visitor->TraceConservativelyIfNeeded(decompressed_ptr);
}
}
}
} // namespace
void ConservativeTracingVisitor::TraceConservativelyIfNeeded(
const void* address) {
void ConservativeTracingVisitor::TryTracePointerConservatively(
Address pointer) {
#if defined(CPPGC_CAGED_HEAP)
// TODO(chromium:1056170): Add support for SIMD in stack scanning.
if (V8_LIKELY(!heap_.caged_heap().IsOnHeap(address))) return;
#endif
if (V8_LIKELY(!heap_.caged_heap().IsOnHeap(pointer))) return;
#endif // defined(CPPGC_CAGED_HEAP)
const BasePage* page = reinterpret_cast<const BasePage*>(
page_backend_.Lookup(static_cast<ConstAddress>(address)));
page_backend_.Lookup(const_cast<ConstAddress>(pointer)));
if (!page) return;
DCHECK_EQ(&heap_, &page->heap());
auto* header = page->TryObjectHeaderFromInnerAddress(
const_cast<Address>(reinterpret_cast<ConstAddress>(address)));
auto* header = page->TryObjectHeaderFromInnerAddress(pointer);
if (!header) return;
TraceConservativelyIfNeeded(*header);
}
void ConservativeTracingVisitor::TraceConservativelyIfNeeded(
const void* address) {
auto pointer = reinterpret_cast<Address>(const_cast<void*>(address));
TryTracePointerConservatively(pointer);
#if defined(CPPGC_POINTER_COMPRESSION)
// If pointer compression enabled, we may have random compressed pointers on
// stack (e.g. due to inlined collections). Extract, decompress and trace both
// halfwords.
auto decompressed_low =
reinterpret_cast<Address>(CompressedPointer::Decompress(
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(pointer))));
if (decompressed_low >
reinterpret_cast<void*>(SentinelPointer::kSentinelValue))
TryTracePointerConservatively(decompressed_low);
auto decompressed_high =
reinterpret_cast<Address>(CompressedPointer::Decompress(
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(pointer) >>
(sizeof(uint32_t) * CHAR_BIT))));
if (decompressed_high >
reinterpret_cast<void*>(SentinelPointer::kSentinelValue))
TryTracePointerConservatively(decompressed_high);
#endif // defined(CPPGC_POINTER_COMPRESSION)
}
void ConservativeTracingVisitor::TraceConservativelyIfNeeded(
HeapObjectHeader& header) {
if (!header.IsInConstruction<AccessMode::kNonAtomic>()) {
......
......@@ -57,6 +57,8 @@ class ConservativeTracingVisitor {
virtual void VisitInConstructionConservatively(HeapObjectHeader&,
TraceConservativelyCallback) {}
void V8_EXPORT_PRIVATE TryTracePointerConservatively(Address address);
HeapBase& heap_;
PageBackend& page_backend_;
cppgc::Visitor& visitor_;
......
......@@ -317,5 +317,80 @@ TEST_F(MarkingVisitorTest, StrongTracingMarksWeakMember) {
EXPECT_TRUE(header.IsMarked());
}
namespace {
struct GCedWithDestructor : GarbageCollected<GCedWithDestructor> {
~GCedWithDestructor() { ++g_finalized; }
static size_t g_finalized;
void Trace(Visitor* v) const {}
};
size_t GCedWithDestructor::g_finalized = 0;
struct GCedWithInConstructionCallbackWithMember : GCedWithDestructor {
template <typename Callback>
explicit GCedWithInConstructionCallbackWithMember(Callback callback) {
callback(this);
}
void Trace(Visitor* v) const {
GCedWithDestructor::Trace(v);
v->Trace(member);
}
Member<GCed> member;
};
struct ConservativeTracerTest : public testing::TestWithHeap {
ConservativeTracerTest() { GCedWithDestructor::g_finalized = 0; }
};
} // namespace
TEST_F(ConservativeTracerTest, TraceConservativelyInConstructionObject) {
auto* volatile gced =
MakeGarbageCollected<GCedWithInConstructionCallbackWithMember>(
GetAllocationHandle(),
[this](GCedWithInConstructionCallbackWithMember* obj) V8_NOINLINE {
[](GCedWithInConstructionCallbackWithMember* obj,
AllocationHandle& handle) V8_NOINLINE {
obj->member = MakeGarbageCollected<GCed>(handle);
}(obj, GetAllocationHandle());
ConservativeGC();
});
USE(gced);
ConservativeGC();
EXPECT_EQ(0u, GCedWithDestructor::g_finalized);
// Call into HoH::GetGCInfoIndex to prevent the compiler to optimize away the
// stack variable.
EXPECT_EQ(HeapObjectHeader::FromObject(gced).GetGCInfoIndex(),
GCInfoTrait<GCedWithInConstructionCallbackWithMember>::Index());
}
TEST_F(ConservativeTracerTest, TraceConservativelyStack) {
volatile std::array<Member<GCedWithDestructor>, 16u> members =
[this]() V8_NOINLINE {
std::array<Member<GCedWithDestructor>, 16u> members;
for (auto& member : members)
member =
MakeGarbageCollected<GCedWithDestructor>(GetAllocationHandle());
return members;
}();
USE(members);
ConservativeGC();
EXPECT_EQ(0u, GCedWithDestructor::g_finalized);
// Call into HoH::GetGCInfoIndex to prevent the compiler to optimize away the
// stack variable.
auto member =
const_cast<std::remove_volatile_t<decltype(members)>&>(members)[0];
EXPECT_EQ(HeapObjectHeader::FromObject(member.Get()).GetGCInfoIndex(),
GCInfoTrait<GCedWithDestructor>::Index());
}
} // namespace internal
} // namespace cppgc
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment