Commit 5ae59192 authored by Nikolaos Papaspyrou's avatar Nikolaos Papaspyrou Committed by V8 LUCI CQ

heap: Implement IPR using the marking bitmap

This CL introduces a compile flag v8_enable_inner_pointer_resolution_mb.
Behind it, it introduces a method `FindBasePtr` in `MemoryChunk`, which
implements inner pointer resolution using the chunk's marking bitmap.
This method is intended to be used for conservative stack scanning, to
resolve inner pointers to heap objects, at some point late in the
marking phase.

It also delays stack scanning during the marking phase of a full GC, to
ensure that marking has proceeded and most heap objects have already
been marked.

Bug: v8:12851
Change-Id: I40e291a86bb8d2587a2c1d9505574dde3c65eb16
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3703837
Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarToon Verwaest <verwaest@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81404}
parent 547053d1
......@@ -576,9 +576,11 @@ assert(!cppgc_enable_young_generation || cppgc_enable_caged_heap,
assert(!cppgc_enable_pointer_compression || cppgc_enable_caged_heap,
"Pointer compression in CppGC requires caged heap")
assert(!v8_enable_conservative_stack_scanning ||
v8_enable_inner_pointer_resolution_osb,
"Conservative stack scanning requires inner pointer resolution (OSB)")
assert(
!v8_enable_conservative_stack_scanning ||
v8_enable_inner_pointer_resolution_osb ||
v8_enable_inner_pointer_resolution_mb,
"Conservative stack scanning requires inner pointer resolution (OSB or MB)")
if (v8_enable_single_generation == true) {
assert(
......@@ -938,6 +940,9 @@ config("features") {
if (v8_enable_inner_pointer_resolution_osb) {
defines += [ "V8_ENABLE_INNER_POINTER_RESOLUTION_OSB" ]
}
if (v8_enable_inner_pointer_resolution_mb) {
defines += [ "V8_ENABLE_INNER_POINTER_RESOLUTION_MB" ]
}
if (v8_disable_write_barriers) {
defines += [ "V8_DISABLE_WRITE_BARRIERS" ]
}
......
......@@ -84,6 +84,9 @@ declare_args() {
# Use the object start bitmap for inner pointer resolution.
v8_enable_inner_pointer_resolution_osb = false
# Use the marking bitmap for inner pointer resolution.
v8_enable_inner_pointer_resolution_mb = false
v8_enable_google_benchmark = false
cppgc_is_standalone = false
......
......@@ -486,6 +486,15 @@ DEFINE_BOOL_READONLY(inner_pointer_resolution_osb,
V8_ENABLE_INNER_POINTER_RESOLUTION_OSB_BOOL,
"use object start bitmap for IPR")
#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB
#define V8_ENABLE_INNER_POINTER_RESOLUTION_MB_BOOL true
#else
#define V8_ENABLE_INNER_POINTER_RESOLUTION_MB_BOOL false
#endif
DEFINE_BOOL_READONLY(inner_pointer_resolution_mb,
V8_ENABLE_INNER_POINTER_RESOLUTION_MB_BOOL,
"use marking bitmap for IPR")
#ifdef V8_ENABLE_FUTURE
#define FUTURE_BOOL true
#else
......
......@@ -22,10 +22,16 @@ void ConservativeStackVisitor::VisitPointer(const void* pointer) {
bool ConservativeStackVisitor::CheckPage(Address address, MemoryChunk* page) {
if (address < page->area_start() || address >= page->area_end()) return false;
auto base_ptr = page->object_start_bitmap()->FindBasePtr(address);
if (base_ptr == kNullAddress) {
return false;
}
Address base_ptr;
#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB
base_ptr = page->object_start_bitmap()->FindBasePtr(address);
#elif V8_ENABLE_INNER_POINTER_RESOLUTION_MB
base_ptr = MarkCompactCollector::FindBasePtrForMarking(address);
#else
#error "Some inner pointer resolution mechanism is needed"
#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_(OSB|MB)
if (base_ptr == kNullAddress) return false;
// At this point, base_ptr *must* refer to the valid object. We check if
// |address| resides inside the object or beyond it in unused memory.
......
......@@ -5090,6 +5090,11 @@ void Heap::IterateRootsIncludingClients(RootVisitor* v,
}
}
void Heap::IterateRootsFromStack(RootVisitor* v) {
IterateStackRoots(v);
v->Synchronize(VisitorSynchronization::kStackRoots);
}
void Heap::IterateWeakGlobalHandles(RootVisitor* v) {
isolate_->global_handles()->IterateWeakRoots(v);
}
......
......@@ -1060,6 +1060,7 @@ class Heap {
void IterateRoots(RootVisitor* v, base::EnumSet<SkipRoot> options);
void IterateRootsIncludingClients(RootVisitor* v,
base::EnumSet<SkipRoot> options);
void IterateRootsFromStack(RootVisitor* v);
// Iterates over entries in the smi roots list. Only interesting to the
// serializer/deserializer, since GC does not care about smis.
......
......@@ -2051,7 +2051,7 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
// Mark the heap roots including global variables, stack variables,
// etc., and all objects reachable from them.
heap()->IterateRootsIncludingClients(
root_visitor, base::EnumSet<SkipRoot>{SkipRoot::kWeak});
root_visitor, base::EnumSet<SkipRoot>{SkipRoot::kWeak, SkipRoot::kStack});
// Custom marking for top optimized frame.
ProcessTopOptimizedFrame(custom_root_body_visitor, isolate());
......@@ -2064,6 +2064,90 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
}
}
#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB
Address MarkCompactCollector::FindBasePtrForMarking(Address maybe_inner_ptr) {
// TODO(v8:12851): If this implementation is kept:
// 1. This function will have to be refactored. Most of the bit hacking
// belongs to some reverse-iterator abstraction for bitmaps.
// 2. Unit tests will have to be added.
const Page* page = Page::FromAddress(maybe_inner_ptr);
Bitmap* bitmap = page->marking_bitmap<AccessMode::NON_ATOMIC>();
MarkBit::CellType* cells = bitmap->cells();
uint32_t index = page->AddressToMarkbitIndex(maybe_inner_ptr);
unsigned int cell_index = Bitmap::IndexToCell(index);
MarkBit::CellType mask = 1u << Bitmap::IndexInCell(index);
MarkBit::CellType cell = cells[cell_index];
// If the markbit is set, then we have an object that does not need be marked.
if ((cell & mask) != 0) return kNullAddress;
// Clear the bits corresponding to higher addresses in the cell.
cell &= ((~static_cast<MarkBit::CellType>(0)) >>
(Bitmap::kBitsPerCell - Bitmap::IndexInCell(index) - 1));
// Find the start of a valid object by traversing the bitmap backwards, until
// we find a markbit that is set and whose previous markbit (if it exists) is
// unset.
uint32_t object_index;
// Iterate backwards to find a cell with any set markbit.
while (cell == 0 && cell_index > 0) cell = cells[--cell_index];
if (cell == 0) {
// There is no cell with a set markbit, we reached the start of the page.
object_index = 0;
} else {
uint32_t leading_zeros = base::bits::CountLeadingZeros(cell);
uint32_t leftmost_ones =
base::bits::CountLeadingZeros(~(cell << leading_zeros));
uint32_t index_of_last_leftmost_one =
Bitmap::kBitsPerCell - leading_zeros - leftmost_ones;
if (index_of_last_leftmost_one > 0) {
// The leftmost contiguous sequence of set bits does not reach the start
// of the cell.
object_index =
cell_index * Bitmap::kBitsPerCell + index_of_last_leftmost_one;
} else {
// The leftmost contiguous sequence of set bits reaches the start of the
// cell. We must keep traversing backwards until we find the first unset
// markbit.
if (cell_index == 0) {
object_index = 0;
} else {
// Iterate backwards to find a cell with any unset markbit.
do {
cell = cells[--cell_index];
} while (~cell == 0 && cell_index > 0);
if (~cell == 0) {
// There is no cell with a clear markbit, we reached the start of the
// page.
object_index = 0;
} else {
uint32_t leading_ones = base::bits::CountLeadingZeros(~cell);
uint32_t index_of_last_leading_one =
Bitmap::kBitsPerCell - leading_ones;
DCHECK_LT(0, index_of_last_leading_one);
object_index =
cell_index * Bitmap::kBitsPerCell + index_of_last_leading_one;
}
}
}
}
// Iterate through the objects in the page forwards, until we find the object
// containing maybe_inner_pointer.
Address base_ptr = page->MarkbitIndexToAddress(object_index);
const Address limit = page->area_end();
PtrComprCageBase cage_base{page->heap()->isolate()};
while (base_ptr < limit) {
if (maybe_inner_ptr < base_ptr) break;
const int size = HeapObject::FromAddress(base_ptr).Size(cage_base);
if (maybe_inner_ptr < base_ptr + size) return base_ptr;
base_ptr += size;
DCHECK_LE(base_ptr, limit);
}
return kNullAddress;
}
#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB
void MarkCompactCollector::MarkRootsFromStack(RootVisitor* root_visitor) {
heap()->IterateRootsFromStack(root_visitor);
}
void MarkCompactCollector::MarkObjectsFromClientHeaps() {
if (!isolate()->is_shared()) return;
......@@ -2571,6 +2655,11 @@ void MarkCompactCollector::MarkLiveObjects() {
parallel_marking_ = false;
}
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS);
MarkRootsFromStack(&root_visitor);
}
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FULL_CLOSURE);
// Complete the transitive closure single-threaded to avoid races with
......
......@@ -568,6 +568,14 @@ class MarkCompactCollector final {
std::unique_ptr<UpdatingItem> CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode);
#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB
// Finds an object header based on a `maybe_inner_ptr`. It returns
// `kNullAddress` if the parameter does not point to (the interior of) a valid
// heap object, or if it points to (the interior of) some object that is
// already marked as live (black or grey).
static Address FindBasePtrForMarking(Address maybe_inner_ptr);
#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB
private:
void ComputeEvacuationHeuristics(size_t area_size,
int* target_fragmentation_percent,
......@@ -598,6 +606,9 @@ class MarkCompactCollector final {
void MarkRoots(RootVisitor* root_visitor,
ObjectVisitor* custom_root_body_visitor);
// Mark the stack roots and all objects reachable from them.
void MarkRootsFromStack(RootVisitor* root_visitor);
// Mark all objects that are directly referenced from one of the clients
// heaps.
void MarkObjectsFromClientHeaps();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment