Commit 94ca8fa8 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Drop UPDATE_WEAK_WRITE_BARRIER write barrier mode

Replace all usages of UPDATE_WEAK_WRITE_BARRIER with
UPDATE_WRITE_BARRIER. The barrier wasn't hot, so the additional branch
for the marking barrier shouldn't be a problem.

Performing the marking barrier could in theory cause more floating
garbage. However in this case the write barrier is only run once
directly after e.g. allocating a Code or NativeContext object. Since
UPDATE_WEAK_WRITE_BARRIER only skips the marking barrier, we should only
observe different behavior when marking is on. But since we already
have black allocation for objects in old space, we will not cause
additional floating garbage.

In case of performance regression, we should also be able to replace
those usages with SKIP_WRITE_BARRIER, since NativeContext and Code
objects are never allocated in the young generation, so running the
generational barrier shouldn't be necessary. It's just hard to DCHECK
that SKIP_WRITE_BARRIER is valid here.

Bug: v8:11708
Change-Id: I25d760a46d1d7ec973cc589f51ddf80ca3b5419d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3663080Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80839}
parent 171ee1a7
......@@ -99,7 +99,7 @@ struct MemoryChunk {
inline void CombinedWriteBarrierInternal(HeapObject host, HeapObjectSlot slot,
HeapObject value,
WriteBarrierMode mode) {
DCHECK(mode == UPDATE_WRITE_BARRIER || mode == UPDATE_WEAK_WRITE_BARRIER);
DCHECK_EQ(mode, UPDATE_WRITE_BARRIER);
heap_internals::MemoryChunk* host_chunk =
heap_internals::MemoryChunk::FromHeapObject(host);
......@@ -116,7 +116,7 @@ inline void CombinedWriteBarrierInternal(HeapObject host, HeapObjectSlot slot,
}
// Marking barrier: mark value & record slots when marking is on.
if (mode == UPDATE_WRITE_BARRIER && is_marking) {
if (is_marking) {
WriteBarrier::MarkingSlow(host_chunk->GetHeap(), host, HeapObjectSlot(slot),
value);
}
......
......@@ -87,8 +87,8 @@ static void ClearWeakList(Heap* heap, Object list) {
template <>
struct WeakListVisitor<CodeT> {
static void SetWeakNext(CodeT code, Object next) {
CodeDataContainerFromCodeT(code).set_next_code_link(
next, UPDATE_WEAK_WRITE_BARRIER);
CodeDataContainerFromCodeT(code).set_next_code_link(next,
UPDATE_WRITE_BARRIER);
}
static Object WeakNext(CodeT code) {
......@@ -113,7 +113,7 @@ struct WeakListVisitor<CodeT> {
template <>
struct WeakListVisitor<Context> {
static void SetWeakNext(Context context, Object next) {
context.set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
context.set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WRITE_BARRIER);
}
static Object WeakNext(Context context) {
......@@ -171,7 +171,7 @@ struct WeakListVisitor<Context> {
template <>
struct WeakListVisitor<AllocationSite> {
static void SetWeakNext(AllocationSite obj, Object next) {
obj.set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
obj.set_weak_next(next, UPDATE_WRITE_BARRIER);
}
static Object WeakNext(AllocationSite obj) { return obj.weak_next(); }
......@@ -188,7 +188,7 @@ struct WeakListVisitor<AllocationSite> {
template <>
struct WeakListVisitor<JSFinalizationRegistry> {
static void SetWeakNext(JSFinalizationRegistry obj, HeapObject next) {
obj.set_next_dirty(next, UPDATE_WEAK_WRITE_BARRIER);
obj.set_next_dirty(next, UPDATE_WRITE_BARRIER);
}
static Object WeakNext(JSFinalizationRegistry obj) {
......
......@@ -1238,7 +1238,7 @@ static void AddToWeakNativeContextList(Isolate* isolate, Context context) {
}
#endif
context.set(Context::NEXT_CONTEXT_LINK, heap->native_contexts_list(),
UPDATE_WEAK_WRITE_BARRIER);
UPDATE_WRITE_BARRIER);
heap->set_native_contexts_list(context);
}
......
......@@ -296,7 +296,7 @@ ScriptContextTable NativeContext::synchronized_script_context_table() const {
}
void NativeContext::SetOptimizedCodeListHead(Object head) {
set(OPTIMIZED_CODE_LIST, head, UPDATE_WEAK_WRITE_BARRIER, kReleaseStore);
set(OPTIMIZED_CODE_LIST, head, UPDATE_WRITE_BARRIER, kReleaseStore);
}
Object NativeContext::OptimizedCodeListHead() {
......@@ -304,7 +304,7 @@ Object NativeContext::OptimizedCodeListHead() {
}
void NativeContext::SetDeoptimizedCodeListHead(Object head) {
set(DEOPTIMIZED_CODE_LIST, head, UPDATE_WEAK_WRITE_BARRIER, kReleaseStore);
set(DEOPTIMIZED_CODE_LIST, head, UPDATE_WRITE_BARRIER, kReleaseStore);
}
Object NativeContext::DeoptimizedCodeListHead() {
......
......@@ -430,7 +430,7 @@ void NativeContext::AddOptimizedCode(CodeT code) {
DCHECK(CodeKindCanDeoptimize(code.kind()));
DCHECK(code.next_code_link().IsUndefined());
code.set_next_code_link(OptimizedCodeListHead());
set(OPTIMIZED_CODE_LIST, code, UPDATE_WEAK_WRITE_BARRIER, kReleaseStore);
set(OPTIMIZED_CODE_LIST, code, UPDATE_WRITE_BARRIER, kReleaseStore);
}
Handle<Object> Context::ErrorMessageForCodeGenerationFromStrings() {
......
......@@ -218,13 +218,10 @@ class PropertyDescriptorObject;
// UNSAFE_SKIP_WRITE_BARRIER skips the write barrier.
// SKIP_WRITE_BARRIER skips the write barrier and asserts that this is safe in
// the MemoryOptimizer
// UPDATE_WEAK_WRITE_BARRIER skips the marking part of the write barrier and
// only performs the generational part.
// UPDATE_WRITE_BARRIER is doing the full barrier, marking and generational.
enum WriteBarrierMode {
SKIP_WRITE_BARRIER,
UNSAFE_SKIP_WRITE_BARRIER,
UPDATE_WEAK_WRITE_BARRIER,
UPDATE_EPHEMERON_KEY_WRITE_BARRIER,
UPDATE_WRITE_BARRIER
};
......
......@@ -687,8 +687,7 @@ class V8_NODISCARD UnlinkWeakNextScope {
~UnlinkWeakNextScope() {
if (next_ == Smi::zero()) return;
AllocationSite::cast(object_).set_weak_next(next_,
UPDATE_WEAK_WRITE_BARRIER);
AllocationSite::cast(object_).set_weak_next(next_, UPDATE_WRITE_BARRIER);
}
private:
......
......@@ -644,7 +644,7 @@ UNINITIALIZED_TEST(ContextSerializerCustomContext) {
// Add context to the weak native context list
context->set(Context::NEXT_CONTEXT_LINK,
isolate->heap()->native_contexts_list(),
UPDATE_WEAK_WRITE_BARRIER);
UPDATE_WRITE_BARRIER);
isolate->heap()->set_native_contexts_list(*context);
CHECK(context->global_proxy() == *global_proxy);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment