Commit f74d6931 authored by Nikolaos Papaspyrou's avatar Nikolaos Papaspyrou Committed by V8 LUCI CQ

heap: Refactor MarkCompactCollector::FindBasePtrForMarking

This CL refactors the implementation of inner pointer resolution, based
on the marking bitmap. MarkCompactCollector::FindBasePtrForMarking has
most of its code that processes the marking bitmap moved to a utility
function FindPreviousObjectForConservativeMarking, which iterates
backwards to find the closest previous object on the page that has been
marked.

Bug: v8:12851
Change-Id: I980ac5712d8b1df792196d77edb9526ca2e13e2c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3758227Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81750}
parent 5e227beb
...@@ -54,15 +54,6 @@ bool ConservativeStackVisitor::CheckPage(Address address, MemoryChunk* page) { ...@@ -54,15 +54,6 @@ bool ConservativeStackVisitor::CheckPage(Address address, MemoryChunk* page) {
void ConservativeStackVisitor::VisitConservativelyIfPointer( void ConservativeStackVisitor::VisitConservativelyIfPointer(
const void* pointer) { const void* pointer) {
auto address = reinterpret_cast<Address>(pointer); auto address = reinterpret_cast<Address>(pointer);
// TODO(v8:12851): Let's figure out what this meant to do...
// This condition is always true, as the LAB invariant requires
// start <= top <= limit
#if 0
if (address > isolate_->heap()->old_space()->top() ||
address < isolate_->heap()->old_space()->limit()) {
return;
}
#endif
for (Page* page : *isolate_->heap()->old_space()) { for (Page* page : *isolate_->heap()->old_space()) {
if (CheckPage(address, page)) { if (CheckPage(address, page)) {
......
...@@ -2073,82 +2073,115 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor, ...@@ -2073,82 +2073,115 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
} }
#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB #ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB
Address MarkCompactCollector::FindBasePtrForMarking(Address maybe_inner_ptr) { namespace {
// TODO(v8:12851): If this implementation is kept:
// 1. This function will have to be refactored. Most of the bit hacking // This utility function returns the highest address in the page that is lower
// belongs to some reverse-iterator abstraction for bitmaps. // than maybe_inner_ptr, has its markbit set, and whose previous address (if it
// 2. Unit tests will have to be added. // exists) does not have its markbit set. This address is guaranteed to be the
const Page* page = Page::FromAddress(maybe_inner_ptr); // start of a valid object in the page. In case the markbit corresponding to
Bitmap* bitmap = page->marking_bitmap<AccessMode::NON_ATOMIC>(); // maybe_inner_ptr is set, the function bails out and returns kNullAddress.
MarkBit::CellType* cells = bitmap->cells(); Address FindPreviousObjectForConservativeMarking(const Page* page,
uint32_t index = page->AddressToMarkbitIndex(maybe_inner_ptr); Address maybe_inner_ptr) {
unsigned int cell_index = Bitmap::IndexToCell(index); auto* bitmap = page->marking_bitmap<AccessMode::NON_ATOMIC>();
MarkBit::CellType mask = 1u << Bitmap::IndexInCell(index); const MarkBit::CellType* cells = bitmap->cells();
// The first actual bit of the bitmap, corresponding to page->area_start(),
// is at start_index which is somewhere in (not necessarily at the start of)
// start_cell_index.
const uint32_t start_index = page->AddressToMarkbitIndex(page->area_start());
const uint32_t start_cell_index = Bitmap::IndexToCell(start_index);
// We assume that all markbits before start_index are clear:
// SLOW_DCHECK(bitmap->AllBitsClearInRange(0, start_index));
// This has already been checked for the entire bitmap before starting marking
// by MarkCompactCollector::VerifyMarkbitsAreClean.
const uint32_t index = page->AddressToMarkbitIndex(maybe_inner_ptr);
uint32_t cell_index = Bitmap::IndexToCell(index);
const MarkBit::CellType mask = 1u << Bitmap::IndexInCell(index);
MarkBit::CellType cell = cells[cell_index]; MarkBit::CellType cell = cells[cell_index];
// If the markbit is set, then we have an object that does not need be marked.
// If the markbit is already set, bail out.
if ((cell & mask) != 0) return kNullAddress; if ((cell & mask) != 0) return kNullAddress;
// Clear the bits corresponding to higher addresses in the cell. // Clear the bits corresponding to higher addresses in the cell.
cell &= ((~static_cast<MarkBit::CellType>(0)) >> cell &= ((~static_cast<MarkBit::CellType>(0)) >>
(Bitmap::kBitsPerCell - Bitmap::IndexInCell(index) - 1)); (Bitmap::kBitsPerCell - Bitmap::IndexInCell(index) - 1));
// Find the start of a valid object by traversing the bitmap backwards, until
// we find a markbit that is set and whose previous markbit (if it exists) is // Traverse the bitmap backwards, until we find a markbit that is set and
// unset. // whose previous markbit (if it exists) is unset.
uint32_t object_index; // First, iterate backwards to find a cell with any set markbit.
// Iterate backwards to find a cell with any set markbit. while (cell == 0 && cell_index > start_cell_index) cell = cells[--cell_index];
while (cell == 0 && cell_index > 0) cell = cells[--cell_index];
if (cell == 0) { if (cell == 0) {
// There is no cell with a set markbit, we reached the start of the page. DCHECK_EQ(start_cell_index, cell_index);
object_index = 0; // We have reached the start of the page.
} else { return page->area_start();
uint32_t leading_zeros = base::bits::CountLeadingZeros(cell); }
uint32_t leftmost_ones =
base::bits::CountLeadingZeros(~(cell << leading_zeros)); // We have found such a cell.
uint32_t index_of_last_leftmost_one = const uint32_t leading_zeros = base::bits::CountLeadingZeros(cell);
Bitmap::kBitsPerCell - leading_zeros - leftmost_ones; const uint32_t leftmost_ones =
if (index_of_last_leftmost_one > 0) { base::bits::CountLeadingZeros(~(cell << leading_zeros));
// The leftmost contiguous sequence of set bits does not reach the start const uint32_t index_of_last_leftmost_one =
// of the cell. Bitmap::kBitsPerCell - leading_zeros - leftmost_ones;
object_index =
cell_index * Bitmap::kBitsPerCell + index_of_last_leftmost_one; // If the leftmost sequence of set bits does not reach the start of the cell,
} else { // we found it.
// The leftmost contiguous sequence of set bits reaches the start of the if (index_of_last_leftmost_one > 0) {
// cell. We must keep traversing backwards until we find the first unset return page->MarkbitIndexToAddress(cell_index * Bitmap::kBitsPerCell +
// markbit. index_of_last_leftmost_one);
if (cell_index == 0) { }
object_index = 0;
} else { // The leftmost sequence of set bits reaches the start of the cell. We must
// Iterate backwards to find a cell with any unset markbit. // keep traversing backwards until we find the first unset markbit.
do { if (cell_index == start_cell_index) {
cell = cells[--cell_index]; // We have reached the start of the page.
} while (~cell == 0 && cell_index > 0); return page->area_start();
if (~cell == 0) {
// There is no cell with a clear markbit, we reached the start of the
// page.
object_index = 0;
} else {
uint32_t leading_ones = base::bits::CountLeadingZeros(~cell);
uint32_t index_of_last_leading_one =
Bitmap::kBitsPerCell - leading_ones;
DCHECK_LT(0, index_of_last_leading_one);
object_index =
cell_index * Bitmap::kBitsPerCell + index_of_last_leading_one;
}
}
}
} }
// Iterate backwards to find a cell with any unset markbit.
do {
cell = cells[--cell_index];
} while (~cell == 0 && cell_index > start_cell_index);
if (~cell == 0) {
DCHECK_EQ(start_cell_index, cell_index);
// We have reached the start of the page.
return page->area_start();
}
// We have found such a cell.
const uint32_t leading_ones = base::bits::CountLeadingZeros(~cell);
const uint32_t index_of_last_leading_one =
Bitmap::kBitsPerCell - leading_ones;
DCHECK_LT(0, index_of_last_leading_one);
return page->MarkbitIndexToAddress(cell_index * Bitmap::kBitsPerCell +
index_of_last_leading_one);
}
} // namespace
Address MarkCompactCollector::FindBasePtrForMarking(Address maybe_inner_ptr) {
const Page* page = Page::FromAddress(maybe_inner_ptr);
// TODO(v8:12851): We need a mechanism for checking that this is a valid page,
// otherwise return kNullAddress.
DCHECK_LT(maybe_inner_ptr, page->area_end());
if (maybe_inner_ptr < page->area_start()) return kNullAddress;
Address base_ptr =
FindPreviousObjectForConservativeMarking(page, maybe_inner_ptr);
// If the markbit is set, then we have an object that does not need be marked.
if (base_ptr == kNullAddress) return kNullAddress;
// Iterate through the objects in the page forwards, until we find the object // Iterate through the objects in the page forwards, until we find the object
// containing maybe_inner_pointer. // containing maybe_inner_ptr.
Address base_ptr = page->MarkbitIndexToAddress(object_index); DCHECK_LE(base_ptr, maybe_inner_ptr);
const Address limit = page->area_end();
PtrComprCageBase cage_base{page->heap()->isolate()}; PtrComprCageBase cage_base{page->heap()->isolate()};
while (base_ptr < limit) { while (true) {
if (maybe_inner_ptr < base_ptr) break; HeapObject obj(HeapObject::FromAddress(base_ptr));
const int size = HeapObject::FromAddress(base_ptr).Size(cage_base); const int size = obj.Size(cage_base);
if (maybe_inner_ptr < base_ptr + size) return base_ptr; DCHECK_LT(0, size);
if (maybe_inner_ptr < base_ptr + size)
return obj.IsFreeSpaceOrFiller(cage_base) ? kNullAddress : base_ptr;
base_ptr += size; base_ptr += size;
DCHECK_LE(base_ptr, limit); DCHECK_LT(base_ptr, page->area_end());
} }
return kNullAddress;
} }
#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB
...@@ -2805,7 +2838,7 @@ void MarkCompactCollector::ClearNonLiveReferences() { ...@@ -2805,7 +2838,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
{ {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_FLUSHABLE_BYTECODE); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_FLUSHABLE_BYTECODE);
// `ProcessFlusheBaselineCandidates()` must be called after // `ProcessFlushedBaselineCandidates()` must be called after
// `ProcessOldCodeCandidates()` so that we correctly set the code object on // `ProcessOldCodeCandidates()` so that we correctly set the code object on
// the JSFunction after flushing. // the JSFunction after flushing.
ProcessOldCodeCandidates(); ProcessOldCodeCandidates();
...@@ -4684,7 +4717,7 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4684,7 +4717,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) && if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
chunk_->invalidated_slots<OLD_TO_OLD>() != nullptr) { chunk_->invalidated_slots<OLD_TO_OLD>() != nullptr) {
// The invalidated slots are not needed after old-to-old slots were // The invalidated slots are not needed after old-to-old slots were
// processsed. // processed.
chunk_->ReleaseInvalidatedSlots<OLD_TO_OLD>(); chunk_->ReleaseInvalidatedSlots<OLD_TO_OLD>();
} }
if (V8_EXTERNAL_CODE_SPACE_BOOL) { if (V8_EXTERNAL_CODE_SPACE_BOOL) {
...@@ -4711,7 +4744,7 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4711,7 +4744,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
chunk_->ReleaseSlotSet<OLD_TO_CODE>(); chunk_->ReleaseSlotSet<OLD_TO_CODE>();
} }
// The invalidated slots are not needed after old-to-code slots were // The invalidated slots are not needed after old-to-code slots were
// processsed, but since there are no invalidated OLD_TO_CODE slots, // processed, but since there are no invalidated OLD_TO_CODE slots,
// there's nothing to clear. // there's nothing to clear.
} }
if (updating_mode_ == RememberedSetUpdatingMode::ALL) { if (updating_mode_ == RememberedSetUpdatingMode::ALL) {
......
...@@ -129,6 +129,10 @@ class V8_EXPORT_PRIVATE Bitmap { ...@@ -129,6 +129,10 @@ class V8_EXPORT_PRIVATE Bitmap {
return reinterpret_cast<MarkBit::CellType*>(this); return reinterpret_cast<MarkBit::CellType*>(this);
} }
V8_INLINE const MarkBit::CellType* cells() const {
return reinterpret_cast<const MarkBit::CellType*>(this);
}
V8_INLINE static Bitmap* FromAddress(Address addr) { V8_INLINE static Bitmap* FromAddress(Address addr) {
return reinterpret_cast<Bitmap*>(addr); return reinterpret_cast<Bitmap*>(addr);
} }
......
...@@ -378,6 +378,7 @@ v8_source_set("unittests_sources") { ...@@ -378,6 +378,7 @@ v8_source_set("unittests_sources") {
"heap/list-unittest.cc", "heap/list-unittest.cc",
"heap/local-factory-unittest.cc", "heap/local-factory-unittest.cc",
"heap/local-heap-unittest.cc", "heap/local-heap-unittest.cc",
"heap/marking-inner-pointer-resolution-unittest.cc",
"heap/marking-unittest.cc", "heap/marking-unittest.cc",
"heap/marking-worklist-unittest.cc", "heap/marking-worklist-unittest.cc",
"heap/memory-reducer-unittest.cc", "heap/memory-reducer-unittest.cc",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment