Commit 2864a436 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Combine fast path of generational and shared heap barrier

The fast path of all write barriers already got mostly unified in
https://crrev.com/c/3644964. However, the shared heap write barrier
still added a new branch in the fast path of the full write barrier.

This CL unifies the branch for the generational and the shared heap
write barrier in the fast path at the cost of an additional branch in
the slow path. This should hopefully the rest of the regressions caused
by introducing the shared heap write barrier.

Bug: chromium:1326446, v8:11708
Change-Id: Id5a8334c50a7455e53caf65891d4304d9d2e7702
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3663091
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80749}
parent 5480e036
......@@ -23,14 +23,10 @@ namespace internal {
// Defined in heap.cc.
V8_EXPORT_PRIVATE bool Heap_PageFlagsAreConsistent(HeapObject object);
V8_EXPORT_PRIVATE bool Heap_ValueMightRequireGenerationalWriteBarrier(
HeapObject value);
V8_EXPORT_PRIVATE void Heap_GenerationalBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
V8_EXPORT_PRIVATE void Heap_SharedHeapBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
V8_EXPORT_PRIVATE void Heap_CombinedGenerationalAndSharedBarrierSlow(
HeapObject object, Address slot, HeapObject value);
V8_EXPORT_PRIVATE void Heap_CombinedGenerationalAndSharedEphemeronBarrierSlow(
EphemeronHashTable table, Address slot, HeapObject value);
V8_EXPORT_PRIVATE void Heap_WriteBarrierForCodeSlow(Code host);
V8_EXPORT_PRIVATE void Heap_GenerationalBarrierForCodeSlow(Code host,
......@@ -73,6 +69,14 @@ struct MemoryChunk {
return GetFlags() & kYoungGenerationMask;
}
// Checks whether chunk is either in young gen or shared heap.
V8_INLINE bool IsYoungOrSharedChunk() const {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return false;
constexpr uintptr_t kYoungOrSharedChunkMask =
kFromPageBit | kToPageBit | kInSharedHeapBit;
return GetFlags() & kYoungOrSharedChunkMask;
}
V8_INLINE uintptr_t GetFlags() const {
return *reinterpret_cast<const uintptr_t*>(reinterpret_cast<Address>(this) +
kFlagsOffset);
......@@ -104,20 +108,11 @@ inline void CombinedWriteBarrierInternal(HeapObject host, HeapObjectSlot slot,
heap_internals::MemoryChunk::FromHeapObject(value);
const bool host_in_young_gen = host_chunk->InYoungGeneration();
const bool host_in_shared = host_chunk->InSharedHeap();
const bool is_marking = host_chunk->IsMarking();
if (!host_in_young_gen) {
if (value_chunk->InYoungGeneration()) {
// Generational write barrier (old-to-new).
Heap_GenerationalBarrierSlow(host, slot.address(), value);
} else if (value_chunk->InSharedHeap()) {
// Shared heap write barrier (old-to-shared).
if (!host_in_shared) {
Heap_SharedHeapBarrierSlow(host, slot.address(), value);
}
}
if (!host_in_young_gen && value_chunk->IsYoungOrSharedChunk()) {
// Generational or shared heap write barrier (old-to-new or old-to-shared).
Heap_CombinedGenerationalAndSharedBarrierSlow(host, slot.address(), value);
}
// Marking barrier: mark value & record slots when marking is on.
......@@ -189,21 +184,11 @@ inline void CombinedEphemeronWriteBarrier(EphemeronHashTable host,
heap_internals::MemoryChunk::FromHeapObject(heap_object_value);
const bool host_in_young_gen = host_chunk->InYoungGeneration();
const bool host_in_shared = host_chunk->InSharedHeap();
const bool is_marking = host_chunk->IsMarking();
if (!host_in_young_gen) {
if (value_chunk->InYoungGeneration()) {
// Generational write barrier (old-to-new).
Heap_GenerationalEphemeronKeyBarrierSlow(host_chunk->GetHeap(), host,
slot.address());
} else if (value_chunk->InSharedHeap()) {
// Shared heap write barrier (old-to-shared).
if (!host_in_shared) {
Heap_SharedHeapBarrierSlow(host, slot.address(), heap_object_value);
}
}
if (!host_in_young_gen && value_chunk->IsYoungOrSharedChunk()) {
Heap_CombinedGenerationalAndSharedEphemeronBarrierSlow(host, slot.address(),
heap_object_value);
}
// Marking barrier: mark value & record slots when marking is on.
......
......@@ -132,22 +132,15 @@ bool Heap_PageFlagsAreConsistent(HeapObject object) {
return Heap::PageFlagsAreConsistent(object);
}
bool Heap_ValueMightRequireGenerationalWriteBarrier(HeapObject value) {
if (!value.IsCode()) return true;
// Code objects are never in new space and thus don't require generational
// write barrier.
DCHECK(!ObjectInYoungGeneration(value));
return false;
}
void Heap_GenerationalBarrierSlow(HeapObject object, Address slot,
HeapObject value) {
Heap::GenerationalBarrierSlow(object, slot, value);
void Heap_CombinedGenerationalAndSharedBarrierSlow(HeapObject object,
Address slot,
HeapObject value) {
Heap::CombinedGenerationalAndSharedBarrierSlow(object, slot, value);
}
void Heap_SharedHeapBarrierSlow(HeapObject object, Address slot,
HeapObject value) {
Heap::SharedHeapBarrierSlow(object, slot, value);
void Heap_CombinedGenerationalAndSharedEphemeronBarrierSlow(
EphemeronHashTable table, Address slot, HeapObject value) {
Heap::CombinedGenerationalAndSharedEphemeronBarrierSlow(table, slot, value);
}
void Heap_WriteBarrierForCodeSlow(Code host) {
......@@ -164,12 +157,6 @@ void Heap_SharedHeapBarrierForCodeSlow(Code host, RelocInfo* rinfo,
Heap::SharedHeapBarrierForCodeSlow(host, rinfo, object);
}
void Heap_GenerationalEphemeronKeyBarrierSlow(Heap* heap, HeapObject host,
Address slot) {
EphemeronHashTable table = EphemeronHashTable::cast(host);
heap->RecordEphemeronKeyWrite(table, slot);
}
void Heap::SetConstructStubCreateDeoptPCOffset(int pc_offset) {
DCHECK_EQ(Smi::zero(), construct_stub_create_deopt_pc_offset());
set_construct_stub_create_deopt_pc_offset(Smi::FromInt(pc_offset));
......@@ -7301,6 +7288,43 @@ void Heap::WriteBarrierForCodeSlow(Code code) {
}
}
void Heap::CombinedGenerationalAndSharedBarrierSlow(HeapObject object,
Address slot,
HeapObject value) {
MemoryChunk* value_chunk = MemoryChunk::FromHeapObject(value);
if (value_chunk->InYoungGeneration()) {
Heap::GenerationalBarrierSlow(object, slot, value);
} else {
DCHECK(value_chunk->InSharedHeap());
heap_internals::MemoryChunk* object_chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
if (!object_chunk->InSharedHeap())
Heap::SharedHeapBarrierSlow(object, slot, value);
}
}
void Heap::CombinedGenerationalAndSharedEphemeronBarrierSlow(
EphemeronHashTable table, Address slot, HeapObject value) {
MemoryChunk* value_chunk = MemoryChunk::FromHeapObject(value);
if (value_chunk->InYoungGeneration()) {
MemoryChunk* table_chunk = MemoryChunk::FromHeapObject(table);
table_chunk->heap()->RecordEphemeronKeyWrite(table, slot);
} else {
DCHECK(value_chunk->InSharedHeap());
heap_internals::MemoryChunk* table_chunk =
heap_internals::MemoryChunk::FromHeapObject(table);
if (!table_chunk->InSharedHeap()) {
Heap::SharedHeapBarrierSlow(table, slot, value);
}
}
}
void Heap::GenerationalBarrierSlow(HeapObject object, Address slot,
HeapObject value) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
......
......@@ -492,9 +492,18 @@ class Heap {
V8_EXPORT_PRIVATE static void WriteBarrierForCodeSlow(Code host);
// Implements slow path of both generational & shared heap barrier.
V8_EXPORT_PRIVATE static void CombinedGenerationalAndSharedBarrierSlow(
HeapObject object, Address slot, HeapObject value);
V8_EXPORT_PRIVATE static void
CombinedGenerationalAndSharedEphemeronBarrierSlow(EphemeronHashTable table,
Address slot,
HeapObject value);
V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
V8_EXPORT_PRIVATE static void SharedHeapBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment