Commit f8e0b4c6 authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: Always keep ObjectStartBitmap in consistent state

Currently, OSB can not be safely accessed if sweeping is in progress.
This can, however, be easily lifted with atomic stores.

Having the consistent bitmap is needed for the generational barrier for
source objects (to retrieve the source object beginning).

Bug: chromium:1029379
Change-Id: I5fb8db579f881ddf240ce68ad51fa8264ee645dc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3545071Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79606}
parent 0129218b
......@@ -57,7 +57,8 @@ class V8_EXPORT_PRIVATE BasePage {
// |address| is guaranteed to point into the page but not payload. Returns
// nullptr when pointing into free list entries and the valid header
// otherwise.
// otherwise. The function is not thread-safe and cannot be called when
// e.g. sweeping is in progress.
HeapObjectHeader* TryObjectHeaderFromInnerAddress(void* address) const;
const HeapObjectHeader* TryObjectHeaderFromInnerAddress(
const void* address) const;
......
......@@ -64,9 +64,10 @@ void SameThreadEnabledCheckingPolicyBase::CheckPointerImpl(
const HeapObjectHeader* header = nullptr;
if (points_to_payload) {
header = &HeapObjectHeader::FromObject(ptr);
} else if (!heap_->sweeper().IsSweepingInProgress()) {
// Mixin case.
header = &base_page->ObjectHeaderFromInnerAddress(ptr);
} else {
// Mixin case. Access the ObjectStartBitmap atomically since sweeping can be
// in progress.
header = &base_page->ObjectHeaderFromInnerAddress<AccessMode::kAtomic>(ptr);
DCHECK_LE(header->ObjectStart(), ptr);
DCHECK_GT(header->ObjectEnd(), ptr);
}
......
......@@ -37,7 +37,16 @@ class ObjectStartBitmapVerifier
friend class HeapVisitor<ObjectStartBitmapVerifier>;
public:
void Verify(RawHeap& heap) { Traverse(heap); }
void Verify(RawHeap& heap) {
#if DEBUG
Traverse(heap);
#endif // DEBUG
}
void Verify(NormalPage& page) {
#if DEBUG
Traverse(page);
#endif // DEBUG
}
private:
bool VisitNormalPage(NormalPage& page) {
......@@ -51,9 +60,10 @@ class ObjectStartBitmapVerifier
if (header.IsLargeObject()) return true;
auto* raw_header = reinterpret_cast<ConstAddress>(&header);
CHECK(bitmap_->CheckBit(raw_header));
CHECK(bitmap_->CheckBit<AccessMode::kAtomic>(raw_header));
if (prev_) {
CHECK_EQ(prev_, bitmap_->FindHeader(raw_header - 1));
// No other bits in the range [prev_, raw_header) should be set.
CHECK_EQ(prev_, bitmap_->FindHeader<AccessMode::kAtomic>(raw_header - 1));
}
prev_ = &header;
return true;
......@@ -286,14 +296,26 @@ typename FinalizationBuilder::ResultType SweepNormalPage(
FinalizationBuilder builder(*page, page_allocator);
PlatformAwareObjectStartBitmap& bitmap = page->object_start_bitmap();
bitmap.Clear();
size_t largest_new_free_list_entry = 0;
size_t live_bytes = 0;
Address start_of_gap = page->PayloadStart();
const auto clear_bit_if_coalesced_entry = [&bitmap,
&start_of_gap](Address address) {
if (address != start_of_gap) {
// Clear only if not the first freed entry.
bitmap.ClearBit<AccessMode::kAtomic>(address);
} else {
// Otherwise check that the bit is set.
DCHECK(bitmap.CheckBit<AccessMode::kAtomic>(address));
}
};
for (Address begin = page->PayloadStart(), end = page->PayloadEnd();
begin != end;) {
DCHECK(bitmap.CheckBit<AccessMode::kAtomic>(begin));
HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(begin);
const size_t size = header->AllocatedSize();
// Check if this is a free list entry.
......@@ -302,12 +324,14 @@ typename FinalizationBuilder::ResultType SweepNormalPage(
// This prevents memory from being discarded in configurations where
// `CheckMemoryIsInaccessibleIsNoop()` is false.
CheckMemoryIsInaccessible(header, size);
clear_bit_if_coalesced_entry(begin);
begin += size;
continue;
}
// Check if object is not marked (not reachable).
if (!header->IsMarked<kAtomicAccess>()) {
builder.AddFinalizer(header, size);
clear_bit_if_coalesced_entry(begin);
begin += size;
continue;
}
......@@ -317,12 +341,11 @@ typename FinalizationBuilder::ResultType SweepNormalPage(
size_t new_free_list_entry_size =
static_cast<size_t>(header_address - start_of_gap);
builder.AddFreeListEntry(start_of_gap, new_free_list_entry_size);
DCHECK(bitmap.CheckBit<AccessMode::kAtomic>(start_of_gap));
largest_new_free_list_entry =
std::max(largest_new_free_list_entry, new_free_list_entry_size);
bitmap.SetBit(start_of_gap);
}
StickyUnmark(header);
bitmap.SetBit(begin);
begin += size;
start_of_gap = begin;
live_bytes += size;
......@@ -332,10 +355,9 @@ typename FinalizationBuilder::ResultType SweepNormalPage(
start_of_gap != page->PayloadEnd()) {
builder.AddFreeListEntry(
start_of_gap, static_cast<size_t>(page->PayloadEnd() - start_of_gap));
bitmap.SetBit(start_of_gap);
DCHECK(bitmap.CheckBit<AccessMode::kAtomic>(start_of_gap));
}
page->SetAllocatedBytesAtLastGC(live_bytes);
bitmap.MarkAsFullyPopulated();
const bool is_empty = (start_of_gap == page->PayloadStart());
return builder.GetResult(is_empty, largest_new_free_list_entry);
......@@ -400,7 +422,7 @@ class SweepFinalizer final {
#if defined(CPPGC_CAGED_HEAP)
const uint64_t cage_base =
reinterpret_cast<uint64_t>(page->heap().caged_heap().base());
HeapObjectHeader* next_unfinalized = 0;
HeapObjectHeader* next_unfinalized = nullptr;
for (auto* unfinalized_header = page_state->unfinalized_objects_head;
unfinalized_header; unfinalized_header = next_unfinalized) {
......@@ -438,6 +460,10 @@ class SweepFinalizer final {
largest_new_free_list_entry_ = std::max(
page_state->largest_new_free_list_entry, largest_new_free_list_entry_);
// After the page was fully finalized and freelists have been merged, verify
// that the bitmap is consistent.
ObjectStartBitmapVerifier().Verify(static_cast<NormalPage&>(*page));
// Add the page to the space.
page->space().AddPage(page);
}
......@@ -533,6 +559,9 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
if (result.is_empty) {
NormalPage::Destroy(&page);
} else {
// The page was eagerly finalized and all the freelist have been merged.
// Verify that the bitmap is consistent with headers.
ObjectStartBitmapVerifier().Verify(page);
page.space().AddPage(&page);
largest_new_free_list_entry_ = std::max(
result.largest_new_free_list_entry, largest_new_free_list_entry_);
......@@ -715,10 +744,9 @@ class Sweeper::SweeperImpl final {
is_in_progress_ = true;
platform_ = platform;
config_ = config;
#if DEBUG
// Verify bitmap for all spaces regardless of |compactable_space_handling|.
ObjectStartBitmapVerifier().Verify(heap_);
#endif
// If inaccessible memory is touched to check whether it is set up
// correctly it cannot be discarded.
......
......@@ -149,7 +149,7 @@ void WriteBarrier::GenerationalBarrierForSourceObjectSlow(
auto& object_header =
BasePage::FromInnerAddress(&local_data.heap_base, inner_pointer)
->ObjectHeaderFromInnerAddress(inner_pointer);
->ObjectHeaderFromInnerAddress<AccessMode::kAtomic>(inner_pointer);
// Record the source object.
local_data.heap_base.remembered_set().AddSourceObject(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment