Commit 9e1db518 authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

cppgc: Add DCHECK that object start bitmap is safe to use

During sweeeping/compaction the bitmap is being reconstructed and
should not be relied on for finding object start.
Add a DCHECK that the bitmap is fully populated.

Bug: chromium:1307471
Change-Id: I4aa414722262bb6fb169123a49fce1510a60d3ef
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3540680Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79575}
parent cabf441d
...@@ -292,6 +292,7 @@ class CompactionState final { ...@@ -292,6 +292,7 @@ class CompactionState final {
page->PayloadSize() - used_bytes_in_current_page_); page->PayloadSize() - used_bytes_in_current_page_);
} }
#endif #endif
page->object_start_bitmap().MarkAsFullyPopulated();
} }
private: private:
......
...@@ -66,6 +66,11 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap { ...@@ -66,6 +66,11 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap {
// Clear the object start bitmap. // Clear the object start bitmap.
inline void Clear(); inline void Clear();
// Marks the bitmap as fully populated. Unpopulated bitmaps are in an
// inconsistent state and must be populated before they can be used to find
// object headers.
inline void MarkAsFullyPopulated();
private: private:
template <AccessMode = AccessMode::kNonAtomic> template <AccessMode = AccessMode::kNonAtomic>
inline void store(size_t cell_index, uint8_t value); inline void store(size_t cell_index, uint8_t value);
...@@ -83,6 +88,17 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap { ...@@ -83,6 +88,17 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap {
inline void ObjectStartIndexAndBit(ConstAddress, size_t*, size_t*) const; inline void ObjectStartIndexAndBit(ConstAddress, size_t*, size_t*) const;
const Address offset_; const Address offset_;
// `fully_populated_` is used to denote that the bitmap is popluated with all
// currently allocated objects on the page and is in a consistent state. It is
// used to guard against using the bitmap for finding headers during
// concurrent sweeping.
//
// Although this flag can be used by both the main thread and concurrent
// sweeping threads, it is not atomic. The flag should never be accessed by
// multiple threads at the same time. If data races are observed on this flag,
// it likely means that the bitmap is queried while concurrent sweeping is
// active, which is not supported and should be avoided.
bool fully_populated_ = false;
// The bitmap contains a bit for every kGranularity aligned address on a // The bitmap contains a bit for every kGranularity aligned address on a
// a NormalPage, i.e., for a page of size kBlinkPageSize. // a NormalPage, i.e., for a page of size kBlinkPageSize.
std::array<uint8_t, kReservedForBitmap> object_start_bit_map_; std::array<uint8_t, kReservedForBitmap> object_start_bit_map_;
...@@ -90,11 +106,13 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap { ...@@ -90,11 +106,13 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap {
ObjectStartBitmap::ObjectStartBitmap(Address offset) : offset_(offset) { ObjectStartBitmap::ObjectStartBitmap(Address offset) : offset_(offset) {
Clear(); Clear();
MarkAsFullyPopulated();
} }
template <AccessMode mode> template <AccessMode mode>
HeapObjectHeader* ObjectStartBitmap::FindHeader( HeapObjectHeader* ObjectStartBitmap::FindHeader(
ConstAddress address_maybe_pointing_to_the_middle_of_object) const { ConstAddress address_maybe_pointing_to_the_middle_of_object) const {
DCHECK(fully_populated_);
DCHECK_LE(offset_, address_maybe_pointing_to_the_middle_of_object); DCHECK_LE(offset_, address_maybe_pointing_to_the_middle_of_object);
size_t object_offset = size_t object_offset =
address_maybe_pointing_to_the_middle_of_object - offset_; address_maybe_pointing_to_the_middle_of_object - offset_;
...@@ -187,7 +205,13 @@ inline void ObjectStartBitmap::Iterate(Callback callback) const { ...@@ -187,7 +205,13 @@ inline void ObjectStartBitmap::Iterate(Callback callback) const {
} }
} }
void ObjectStartBitmap::MarkAsFullyPopulated() {
DCHECK(!fully_populated_);
fully_populated_ = true;
}
void ObjectStartBitmap::Clear() { void ObjectStartBitmap::Clear() {
fully_populated_ = false;
std::fill(object_start_bit_map_.begin(), object_start_bit_map_.end(), 0); std::fill(object_start_bit_map_.begin(), object_start_bit_map_.end(), 0);
} }
......
...@@ -335,6 +335,7 @@ typename FinalizationBuilder::ResultType SweepNormalPage( ...@@ -335,6 +335,7 @@ typename FinalizationBuilder::ResultType SweepNormalPage(
bitmap.SetBit(start_of_gap); bitmap.SetBit(start_of_gap);
} }
page->SetAllocatedBytesAtLastGC(live_bytes); page->SetAllocatedBytesAtLastGC(live_bytes);
bitmap.MarkAsFullyPopulated();
const bool is_empty = (start_of_gap == page->PayloadStart()); const bool is_empty = (start_of_gap == page->PayloadStart());
return builder.GetResult(is_empty, largest_new_free_list_entry); return builder.GetResult(is_empty, largest_new_free_list_entry);
......
...@@ -184,11 +184,9 @@ TEST_F(CppgcAllocationTest, LargeDoubleWordAlignedAllocation) { ...@@ -184,11 +184,9 @@ TEST_F(CppgcAllocationTest, LargeDoubleWordAlignedAllocation) {
TEST_F(CppgcAllocationTest, AlignToDoubleWordFromUnaligned) { TEST_F(CppgcAllocationTest, AlignToDoubleWordFromUnaligned) {
static constexpr size_t kAlignmentMask = kDoubleWord - 1; static constexpr size_t kAlignmentMask = kDoubleWord - 1;
auto* padding_object = auto* padding_object =
MakeGarbageCollected<CustomPadding<16>>(GetAllocationHandle()); MakeGarbageCollected<CustomPadding<kWord>>(GetAllocationHandle());
// First allocation is not aligned. // The address from which the next object can be allocated, i.e. the end of
ASSERT_EQ(kWord, // |padding_object|, should not be properly aligned.
reinterpret_cast<uintptr_t>(padding_object) & kAlignmentMask);
// The end should also not be properly aligned.
ASSERT_EQ(kWord, (reinterpret_cast<uintptr_t>(padding_object) + ASSERT_EQ(kWord, (reinterpret_cast<uintptr_t>(padding_object) +
sizeof(*padding_object)) & sizeof(*padding_object)) &
kAlignmentMask); kAlignmentMask);
...@@ -204,11 +202,9 @@ TEST_F(CppgcAllocationTest, AlignToDoubleWordFromUnaligned) { ...@@ -204,11 +202,9 @@ TEST_F(CppgcAllocationTest, AlignToDoubleWordFromUnaligned) {
TEST_F(CppgcAllocationTest, AlignToDoubleWordFromAligned) { TEST_F(CppgcAllocationTest, AlignToDoubleWordFromAligned) {
static constexpr size_t kAlignmentMask = kDoubleWord - 1; static constexpr size_t kAlignmentMask = kDoubleWord - 1;
auto* padding_object = auto* padding_object =
MakeGarbageCollected<CustomPadding<kWord>>(GetAllocationHandle()); MakeGarbageCollected<CustomPadding<16>>(GetAllocationHandle());
// First allocation is not aligned. // The address from which the next object can be allocated, i.e. the end of
ASSERT_EQ(kWord, // |padding_object|, should be properly aligned.
reinterpret_cast<uintptr_t>(padding_object) & kAlignmentMask);
// The end should be properly aligned.
ASSERT_EQ(0u, (reinterpret_cast<uintptr_t>(padding_object) + ASSERT_EQ(0u, (reinterpret_cast<uintptr_t>(padding_object) +
sizeof(*padding_object)) & sizeof(*padding_object)) &
kAlignmentMask); kAlignmentMask);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment