Commit 9e1db518 authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

cppgc: Add DCHECK that object start bitmap is safe to use

During sweeeping/compaction the bitmap is being reconstructed and
should not be relied on for finding object start.
Add a DCHECK that the bitmap is fully populated.

Bug: chromium:1307471
Change-Id: I4aa414722262bb6fb169123a49fce1510a60d3ef
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3540680Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79575}
parent cabf441d
......@@ -292,6 +292,7 @@ class CompactionState final {
page->PayloadSize() - used_bytes_in_current_page_);
}
#endif
page->object_start_bitmap().MarkAsFullyPopulated();
}
private:
......
......@@ -66,6 +66,11 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap {
// Clear the object start bitmap.
inline void Clear();
// Marks the bitmap as fully populated. Unpopulated bitmaps are in an
// inconsistent state and must be populated before they can be used to find
// object headers.
inline void MarkAsFullyPopulated();
private:
template <AccessMode = AccessMode::kNonAtomic>
inline void store(size_t cell_index, uint8_t value);
......@@ -83,6 +88,17 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap {
inline void ObjectStartIndexAndBit(ConstAddress, size_t*, size_t*) const;
const Address offset_;
// `fully_populated_` is used to denote that the bitmap is popluated with all
// currently allocated objects on the page and is in a consistent state. It is
// used to guard against using the bitmap for finding headers during
// concurrent sweeping.
//
// Although this flag can be used by both the main thread and concurrent
// sweeping threads, it is not atomic. The flag should never be accessed by
// multiple threads at the same time. If data races are observed on this flag,
// it likely means that the bitmap is queried while concurrent sweeping is
// active, which is not supported and should be avoided.
bool fully_populated_ = false;
// The bitmap contains a bit for every kGranularity aligned address on a
// a NormalPage, i.e., for a page of size kBlinkPageSize.
std::array<uint8_t, kReservedForBitmap> object_start_bit_map_;
......@@ -90,11 +106,13 @@ class V8_EXPORT_PRIVATE ObjectStartBitmap {
ObjectStartBitmap::ObjectStartBitmap(Address offset) : offset_(offset) {
Clear();
MarkAsFullyPopulated();
}
template <AccessMode mode>
HeapObjectHeader* ObjectStartBitmap::FindHeader(
ConstAddress address_maybe_pointing_to_the_middle_of_object) const {
DCHECK(fully_populated_);
DCHECK_LE(offset_, address_maybe_pointing_to_the_middle_of_object);
size_t object_offset =
address_maybe_pointing_to_the_middle_of_object - offset_;
......@@ -187,7 +205,13 @@ inline void ObjectStartBitmap::Iterate(Callback callback) const {
}
}
void ObjectStartBitmap::MarkAsFullyPopulated() {
DCHECK(!fully_populated_);
fully_populated_ = true;
}
void ObjectStartBitmap::Clear() {
fully_populated_ = false;
std::fill(object_start_bit_map_.begin(), object_start_bit_map_.end(), 0);
}
......
......@@ -335,6 +335,7 @@ typename FinalizationBuilder::ResultType SweepNormalPage(
bitmap.SetBit(start_of_gap);
}
page->SetAllocatedBytesAtLastGC(live_bytes);
bitmap.MarkAsFullyPopulated();
const bool is_empty = (start_of_gap == page->PayloadStart());
return builder.GetResult(is_empty, largest_new_free_list_entry);
......
......@@ -184,11 +184,9 @@ TEST_F(CppgcAllocationTest, LargeDoubleWordAlignedAllocation) {
TEST_F(CppgcAllocationTest, AlignToDoubleWordFromUnaligned) {
static constexpr size_t kAlignmentMask = kDoubleWord - 1;
auto* padding_object =
MakeGarbageCollected<CustomPadding<16>>(GetAllocationHandle());
// First allocation is not aligned.
ASSERT_EQ(kWord,
reinterpret_cast<uintptr_t>(padding_object) & kAlignmentMask);
// The end should also not be properly aligned.
MakeGarbageCollected<CustomPadding<kWord>>(GetAllocationHandle());
// The address from which the next object can be allocated, i.e. the end of
// |padding_object|, should not be properly aligned.
ASSERT_EQ(kWord, (reinterpret_cast<uintptr_t>(padding_object) +
sizeof(*padding_object)) &
kAlignmentMask);
......@@ -204,11 +202,9 @@ TEST_F(CppgcAllocationTest, AlignToDoubleWordFromUnaligned) {
TEST_F(CppgcAllocationTest, AlignToDoubleWordFromAligned) {
static constexpr size_t kAlignmentMask = kDoubleWord - 1;
auto* padding_object =
MakeGarbageCollected<CustomPadding<kWord>>(GetAllocationHandle());
// First allocation is not aligned.
ASSERT_EQ(kWord,
reinterpret_cast<uintptr_t>(padding_object) & kAlignmentMask);
// The end should be properly aligned.
MakeGarbageCollected<CustomPadding<16>>(GetAllocationHandle());
// The address from which the next object can be allocated, i.e. the end of
// |padding_object|, should be properly aligned.
ASSERT_EQ(0u, (reinterpret_cast<uintptr_t>(padding_object) +
sizeof(*padding_object)) &
kAlignmentMask);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment