Commit d3a72e3c authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Stack scanning using ObjectStartBitmap

This CL implements stack scanning for cppgc.
Given a value on the stack, the MarkingVisitor uses
PageBackend::Lookup to checks whether that address is on
the heap. If it is, BasePage::TryObjectHeaderFromInnerAddress
(introduced in this CL) is used to get the relevant object
header. Note that random addresses on the heap might point to
free memory, object-start-bitmap, etc.

If a valid object header is found:
* If the object is not in construction, the GCInfoIndex is used
the get the relevant Trace method and the object is traced.
* Otherwise, the object is conservatively scanned - i.e. the
payload of the object is iterated word by word and each word is
treated as a possible pointer.

Only addresses pointing to the payload on non-free objects are
traced.

BasePage::TryObjectHeaderFromInnerAddress assumes on LAB on the
relevant space, thus all LABs are reset before scanning the stack.

Bug: chromium:1056170
Change-Id: I172850f6f1bbb6f0efca8e44ad8fdfe222977b9f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2190426
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67795}
parent 2b720150
......@@ -14,7 +14,7 @@
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/object-start-bitmap-inl.h"
#include "src/heap/cppgc/object-start-bitmap.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/page-memory-inl.h"
#include "src/heap/cppgc/raw-heap.h"
namespace cppgc {
......@@ -27,6 +27,21 @@ Address AlignAddress(Address address, size_t alignment) {
RoundUp(reinterpret_cast<uintptr_t>(address), alignment));
}
const HeapObjectHeader* ObjectHeaderFromInnerAddressImpl(const BasePage* page,
const void* address) {
if (page->is_large()) {
return LargePage::From(page)->ObjectHeader();
}
const ObjectStartBitmap& bitmap =
NormalPage::From(page)->object_start_bitmap();
const HeapObjectHeader* header =
bitmap.FindHeader(static_cast<ConstAddress>(address));
DCHECK_LT(address,
reinterpret_cast<ConstAddress>(header) +
header->GetSize<HeapObjectHeader::AccessMode::kAtomic>());
return header;
}
} // namespace
STATIC_ASSERT(kPageSize == api_constants::kPageAlignment);
......@@ -45,22 +60,58 @@ const BasePage* BasePage::FromPayload(const void* payload) {
kGuardPageSize);
}
HeapObjectHeader* BasePage::ObjectHeaderFromInnerAddress(void* address) {
return const_cast<HeapObjectHeader*>(
// static
BasePage* BasePage::FromInnerAddress(const Heap* heap, void* address) {
return const_cast<BasePage*>(
FromInnerAddress(heap, const_cast<const void*>(address)));
}
// static
const BasePage* BasePage::FromInnerAddress(const Heap* heap,
const void* address) {
return reinterpret_cast<const BasePage*>(
heap->page_backend()->Lookup(static_cast<ConstAddress>(address)));
}
HeapObjectHeader& BasePage::ObjectHeaderFromInnerAddress(void* address) const {
return const_cast<HeapObjectHeader&>(
ObjectHeaderFromInnerAddress(const_cast<const void*>(address)));
}
const HeapObjectHeader* BasePage::ObjectHeaderFromInnerAddress(
const void* address) {
const HeapObjectHeader& BasePage::ObjectHeaderFromInnerAddress(
const void* address) const {
const HeapObjectHeader* header =
ObjectHeaderFromInnerAddressImpl(this, address);
DCHECK_NE(kFreeListGCInfoIndex, header->GetGCInfoIndex());
return *header;
}
HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress(
void* address) const {
return const_cast<HeapObjectHeader*>(
TryObjectHeaderFromInnerAddress(const_cast<const void*>(address)));
}
const HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress(
const void* address) const {
if (is_large()) {
return LargePage::From(this)->ObjectHeader();
if (!LargePage::From(this)->PayloadContains(
static_cast<ConstAddress>(address)))
return nullptr;
} else {
const NormalPage* normal_page = NormalPage::From(this);
if (!normal_page->PayloadContains(static_cast<ConstAddress>(address)))
return nullptr;
// Check that the space has no linear allocation buffer.
DCHECK(!NormalPageSpace::From(normal_page->space())
->linear_allocation_buffer()
.size());
}
ObjectStartBitmap& bitmap = NormalPage::From(this)->object_start_bitmap();
HeapObjectHeader* header =
bitmap.FindHeader(static_cast<ConstAddress>(address));
DCHECK_LT(address,
reinterpret_cast<ConstAddress>(header) +
header->GetSize<HeapObjectHeader::AccessMode::kAtomic>());
// |address| is on the heap, so we FromInnerAddress can get the header.
const HeapObjectHeader* header =
ObjectHeaderFromInnerAddressImpl(this, address);
if (header->IsFree()) return nullptr;
DCHECK_NE(kFreeListGCInfoIndex, header->GetGCInfoIndex());
return header;
}
......
......@@ -25,6 +25,9 @@ class V8_EXPORT_PRIVATE BasePage {
static BasePage* FromPayload(void*);
static const BasePage* FromPayload(const void*);
static BasePage* FromInnerAddress(const Heap*, void*);
static const BasePage* FromInnerAddress(const Heap*, const void*);
BasePage(const BasePage&) = delete;
BasePage& operator=(const BasePage&) = delete;
......@@ -38,8 +41,16 @@ class V8_EXPORT_PRIVATE BasePage {
bool is_large() const { return type_ == PageType::kLarge; }
// |address| must refer to real object.
HeapObjectHeader* ObjectHeaderFromInnerAddress(void* address);
const HeapObjectHeader* ObjectHeaderFromInnerAddress(const void* address);
HeapObjectHeader& ObjectHeaderFromInnerAddress(void* address) const;
const HeapObjectHeader& ObjectHeaderFromInnerAddress(
const void* address) const;
// |address| is guaranteed to point into the page but not payload. Returns
// nullptr when pointing into free list entries and the valid header
// otherwise.
HeapObjectHeader* TryObjectHeaderFromInnerAddress(void* address) const;
const HeapObjectHeader* TryObjectHeaderFromInnerAddress(
const void* address) const;
protected:
enum class PageType { kNormal, kLarge };
......@@ -135,6 +146,10 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
return object_start_bitmap_;
}
bool PayloadContains(ConstAddress address) const {
return (PayloadStart() <= address) && (address < PayloadEnd());
}
private:
NormalPage(Heap* heap, BaseSpace* space);
~NormalPage();
......@@ -168,6 +183,10 @@ class V8_EXPORT_PRIVATE LargePage final : public BasePage {
size_t PayloadSize() const { return payload_size_; }
bool PayloadContains(ConstAddress address) const {
return (PayloadStart() <= address) && (address < PayloadEnd());
}
private:
LargePage(Heap* heap, BaseSpace* space, size_t);
~LargePage();
......
......@@ -5,12 +5,25 @@
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/marking-visitor.h"
namespace cppgc {
namespace internal {
namespace {
class ResetLocalAllocationBufferVisitor final
: public HeapVisitor<ResetLocalAllocationBufferVisitor> {
public:
bool VisitLargePageSpace(LargePageSpace*) { return true; }
bool VisitNormalPageSpace(NormalPageSpace* space) {
space->ResetLinearAllocationBuffer();
return true;
}
};
} // namespace
namespace {
template <typename Worklist, typename Callback>
bool DrainWorklistWithDeadline(v8::base::TimeTicks deadline, Worklist* worklist,
......@@ -89,6 +102,13 @@ void Marker::ProcessWeakness() {
}
void Marker::VisitRoots() {
{
// Reset LABs before scanning roots. LABs are cleared to allow
// ObjectStartBitmap handling without considering LABs.
ResetLocalAllocationBufferVisitor visitor;
visitor.Traverse(&heap_->raw_heap());
}
heap_->GetStrongPersistentRegion().Trace(marking_visitor_.get());
if (config_.stack_state_ != MarkingConfig::StackState::kNoHeapPointers)
heap_->stack()->IteratePointers(marking_visitor_.get());
......
......@@ -8,6 +8,7 @@
#include "include/cppgc/internal/accessors.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/page-memory-inl.h"
namespace cppgc {
namespace internal {
......@@ -126,14 +127,57 @@ void MarkingVisitor::DynamicallyMarkAddress(ConstAddress address) {
}
void MarkingVisitor::VisitPointer(const void* address) {
for (auto* header : marker_->heap()->objects()) {
if (address >= header->Payload() &&
address < (header->Payload() + header->GetSize())) {
header->TryMarkAtomic();
}
// TODO(chromium:1056170): Add page bloom filter
const BasePage* page = BasePage::FromInnerAddress(
marker_->heap(), static_cast<ConstAddress>(address));
if (!page) return;
DCHECK_EQ(marker_->heap(), page->heap());
HeapObjectHeader* const header =
page->TryObjectHeaderFromInnerAddress(const_cast<void*>(address));
if (!header || header->IsMarked()) return;
// Simple case for fully constructed objects. This just adds the object to the
// regular marking worklist.
if (!IsInConstruction(*header)) {
MarkHeader(
header,
{header->Payload(),
GlobalGCInfoTable::GCInfoFromIndex(header->GetGCInfoIndex()).trace});
return;
}
// This case is reached for not-fully-constructed objects with vtables.
// We can differentiate multiple cases:
// 1. No vtable set up. Example:
// class A : public GarbageCollected<A> { virtual void f() = 0; };
// class B : public A { B() : A(foo()) {}; };
// The vtable for A is not set up if foo() allocates and triggers a GC.
//
// 2. Vtables properly set up (non-mixin case).
// 3. Vtables not properly set up (mixin) if GC is allowed during mixin
// construction.
//
// We use a simple conservative approach for these cases as they are not
// performance critical.
MarkHeaderNoTracing(header);
Address* payload = reinterpret_cast<Address*>(header->Payload());
const size_t payload_size = header->GetSize();
for (size_t i = 0; i < (payload_size / sizeof(Address)); ++i) {
Address maybe_ptr = payload[i];
#if defined(MEMORY_SANITIZER)
// |payload| may be uninitialized by design or just contain padding bytes.
// Copy into a local variable that is unpoisoned for conservative marking.
// Copy into a temporary variable to maintain the original MSAN state.
__msan_unpoison(&maybe_ptr, sizeof(maybe_ptr));
#endif
if (maybe_ptr) VisitPointer(maybe_ptr);
}
// TODO(chromium:1056170): Implement proper conservative scanning for
// on-stack objects. Requires page bloom filter.
AccountMarkedBytes(*header);
}
MutatorThreadMarkingVisitor::MutatorThreadMarkingVisitor(Marker* marker)
......
......@@ -19,6 +19,7 @@ ObjectStartBitmap::ObjectStartBitmap(Address offset) : offset_(offset) {
HeapObjectHeader* ObjectStartBitmap::FindHeader(
ConstAddress address_maybe_pointing_to_the_middle_of_object) const {
DCHECK_LE(offset_, address_maybe_pointing_to_the_middle_of_object);
size_t object_offset =
address_maybe_pointing_to_the_middle_of_object - offset_;
size_t object_start_number = object_offset / kAllocationGranularity;
......
......@@ -16,19 +16,19 @@ inline bool SupportsCommittingGuardPages(PageAllocator* allocator) {
return kGuardPageSize % allocator->CommitPageSize() == 0;
}
Address NormalPageMemoryRegion::Lookup(Address address) const {
Address NormalPageMemoryRegion::Lookup(ConstAddress address) const {
size_t index = GetIndex(address);
if (!page_memories_in_use_[index]) return nullptr;
const MemoryRegion writeable_region = GetPageMemory(index).writeable_region();
return writeable_region.Contains(address) ? writeable_region.base() : nullptr;
}
Address LargePageMemoryRegion::Lookup(Address address) const {
Address LargePageMemoryRegion::Lookup(ConstAddress address) const {
const MemoryRegion writeable_region = GetPageMemory().writeable_region();
return writeable_region.Contains(address) ? writeable_region.base() : nullptr;
}
Address PageMemoryRegion::Lookup(Address address) const {
Address PageMemoryRegion::Lookup(ConstAddress address) const {
DCHECK(reserved_region().Contains(address));
return is_large()
? static_cast<const LargePageMemoryRegion*>(this)->Lookup(address)
......@@ -36,7 +36,7 @@ Address PageMemoryRegion::Lookup(Address address) const {
address);
}
PageMemoryRegion* PageMemoryRegionTree::Lookup(Address address) const {
PageMemoryRegion* PageMemoryRegionTree::Lookup(ConstAddress address) const {
auto it = set_.upper_bound(address);
// This check also covers set_.size() > 0, since for empty vectors it is
// guaranteed that begin() == end().
......@@ -46,7 +46,7 @@ PageMemoryRegion* PageMemoryRegionTree::Lookup(Address address) const {
return nullptr;
}
Address PageBackend::Lookup(Address address) const {
Address PageBackend::Lookup(ConstAddress address) const {
PageMemoryRegion* pmr = page_memory_region_tree_.Lookup(address);
return pmr ? pmr->Lookup(address) : nullptr;
}
......
......@@ -30,7 +30,7 @@ class V8_EXPORT_PRIVATE MemoryRegion final {
size_t size() const { return size_; }
Address end() const { return base_ + size_; }
bool Contains(Address addr) const {
bool Contains(ConstAddress addr) const {
return (reinterpret_cast<uintptr_t>(addr) -
reinterpret_cast<uintptr_t>(base_)) < size_;
}
......@@ -70,7 +70,7 @@ class V8_EXPORT_PRIVATE PageMemoryRegion {
// Lookup writeable base for an |address| that's contained in
// PageMemoryRegion. Filters out addresses that are contained in non-writeable
// regions (e.g. guard pages).
inline Address Lookup(Address address) const;
inline Address Lookup(ConstAddress address) const;
// Disallow copy/move.
PageMemoryRegion(const PageMemoryRegion&) = delete;
......@@ -111,7 +111,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion {
// protection.
void Free(Address);
inline Address Lookup(Address) const;
inline Address Lookup(ConstAddress) const;
void UnprotectForTesting() final;
......@@ -122,7 +122,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion {
page_memories_in_use_[index] = value;
}
size_t GetIndex(Address address) const {
size_t GetIndex(ConstAddress address) const {
return static_cast<size_t>(address - reserved_region().base()) >>
kPageSizeLog2;
}
......@@ -143,7 +143,7 @@ class V8_EXPORT_PRIVATE LargePageMemoryRegion final : public PageMemoryRegion {
reserved_region().size() - 2 * kGuardPageSize));
}
inline Address Lookup(Address) const;
inline Address Lookup(ConstAddress) const;
void UnprotectForTesting() final;
};
......@@ -161,10 +161,10 @@ class V8_EXPORT_PRIVATE PageMemoryRegionTree final {
void Add(PageMemoryRegion*);
void Remove(PageMemoryRegion*);
inline PageMemoryRegion* Lookup(Address) const;
inline PageMemoryRegion* Lookup(ConstAddress) const;
private:
std::map<Address, PageMemoryRegion*> set_;
std::map<ConstAddress, PageMemoryRegion*> set_;
};
// A pool of PageMemory objects represented by the writeable base addresses.
......@@ -216,7 +216,7 @@ class V8_EXPORT_PRIVATE PageBackend final {
// Returns the writeable base if |address| is contained in a valid page
// memory.
inline Address Lookup(Address) const;
inline Address Lookup(ConstAddress) const;
// Disallow copy/move.
PageBackend(const PageBackend&) = delete;
......
......@@ -252,10 +252,10 @@ TEST_F(PageTest, ObjectHeaderFromInnerAddress) {
for (auto* inner_ptr = reinterpret_cast<ConstAddress>(object);
inner_ptr < reinterpret_cast<ConstAddress>(object + 1); ++inner_ptr) {
const HeapObjectHeader* hoh =
const HeapObjectHeader& hoh =
BasePage::FromPayload(object)->ObjectHeaderFromInnerAddress(
inner_ptr);
EXPECT_EQ(&expected, hoh);
EXPECT_EQ(&expected, &hoh);
}
}
{
......@@ -263,10 +263,10 @@ TEST_F(PageTest, ObjectHeaderFromInnerAddress) {
MakeGarbageCollected<GCed<2 * kLargeObjectSizeThreshold>>(GetHeap());
const HeapObjectHeader& expected = HeapObjectHeader::FromPayload(object);
const HeapObjectHeader* hoh =
const HeapObjectHeader& hoh =
BasePage::FromPayload(object)->ObjectHeaderFromInnerAddress(
reinterpret_cast<ConstAddress>(object) + kLargeObjectSizeThreshold);
EXPECT_EQ(&expected, hoh);
EXPECT_EQ(&expected, &hoh);
}
}
......
......@@ -184,5 +184,15 @@ TEST_F(MarkerTest, DeepHierarchyIsMarked) {
}
}
TEST_F(MarkerTest, NestedObjectsOnStackAreMarked) {
GCed* root = MakeGarbageCollected<GCed>(GetHeap());
root->SetChild(MakeGarbageCollected<GCed>(GetHeap()));
root->child()->SetChild(MakeGarbageCollected<GCed>(GetHeap()));
DoMarking(MarkingConfig(MarkingConfig::StackState::kMayContainHeapPointers));
EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromPayload(root->child()).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromPayload(root->child()->child()).IsMarked());
}
} // namespace internal
} // namespace cppgc
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment