Commit a3610907 authored by Leon Bettscheider's avatar Leon Bettscheider Committed by V8 LUCI CQ

[heap] Skip generated code write barrier for minor incremental marking

This CL bails out on the generated code write barrier when minor
incremental marking is active.

Currently is_minor_marking_flag_ is always false. It will be connected with incremental marking in subsequent CLs.

Bug: v8:13012
Change-Id: I0f5bc4aa14e9d56adbdad305499f2ca8f951765b
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3838784Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Leon Bettscheider <bettscheider@google.com>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82590}
parent 8167182a
...@@ -118,6 +118,13 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler { ...@@ -118,6 +118,13 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler {
return Word32NotEqual(Load<Uint8T>(is_marking_addr), Int32Constant(0)); return Word32NotEqual(Load<Uint8T>(is_marking_addr), Int32Constant(0));
} }
TNode<BoolT> IsMinorMarking() {
TNode<ExternalReference> is_minor_marking_addr = ExternalConstant(
ExternalReference::heap_is_minor_marking_flag_address(this->isolate()));
return Word32NotEqual(Load<Uint8T>(is_minor_marking_addr),
Int32Constant(0));
}
TNode<BoolT> IsPageFlagSet(TNode<IntPtrT> object, int mask) { TNode<BoolT> IsPageFlagSet(TNode<IntPtrT> object, int mask) {
TNode<IntPtrT> page = PageFromAddress(object); TNode<IntPtrT> page = PageFromAddress(object);
TNode<IntPtrT> flags = UncheckedCast<IntPtrT>( TNode<IntPtrT> flags = UncheckedCast<IntPtrT>(
...@@ -361,6 +368,9 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler { ...@@ -361,6 +368,9 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler {
SaveFPRegsMode fp_mode) { SaveFPRegsMode fp_mode) {
Label call_incremental_wb(this), next(this); Label call_incremental_wb(this), next(this);
// No write barrier for minor incremental marking.
GotoIf(IsMinorMarking(), &next);
// There are two cases we need to call incremental write barrier. // There are two cases we need to call incremental write barrier.
// 1) value_is_white // 1) value_is_white
GotoIf(IsWhite(value), &call_incremental_wb); GotoIf(IsWhite(value), &call_incremental_wb);
......
...@@ -500,6 +500,11 @@ ExternalReference ExternalReference::heap_is_marking_flag_address( ...@@ -500,6 +500,11 @@ ExternalReference ExternalReference::heap_is_marking_flag_address(
return ExternalReference(isolate->heap()->IsMarkingFlagAddress()); return ExternalReference(isolate->heap()->IsMarkingFlagAddress());
} }
ExternalReference ExternalReference::heap_is_minor_marking_flag_address(
Isolate* isolate) {
return ExternalReference(isolate->heap()->IsMinorMarkingFlagAddress());
}
ExternalReference ExternalReference::new_space_allocation_top_address( ExternalReference ExternalReference::new_space_allocation_top_address(
Isolate* isolate) { Isolate* isolate) {
return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress()); return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
......
...@@ -40,6 +40,7 @@ class StatsCounter; ...@@ -40,6 +40,7 @@ class StatsCounter;
V(address_of_jslimit, "StackGuard::address_of_jslimit()") \ V(address_of_jslimit, "StackGuard::address_of_jslimit()") \
V(address_of_real_jslimit, "StackGuard::address_of_real_jslimit()") \ V(address_of_real_jslimit, "StackGuard::address_of_real_jslimit()") \
V(heap_is_marking_flag_address, "heap_is_marking_flag_address") \ V(heap_is_marking_flag_address, "heap_is_marking_flag_address") \
V(heap_is_minor_marking_flag_address, "heap_is_minor_marking_flag_address") \
V(new_space_allocation_top_address, "Heap::NewSpaceAllocationTopAddress()") \ V(new_space_allocation_top_address, "Heap::NewSpaceAllocationTopAddress()") \
V(new_space_allocation_limit_address, \ V(new_space_allocation_limit_address, \
"Heap::NewSpaceAllocationLimitAddress()") \ "Heap::NewSpaceAllocationLimitAddress()") \
......
...@@ -54,7 +54,8 @@ class Isolate; ...@@ -54,7 +54,8 @@ class Isolate;
V(kNewAllocationInfo, LinearAllocationArea::kSize, new_allocation_info) \ V(kNewAllocationInfo, LinearAllocationArea::kSize, new_allocation_info) \
V(kOldAllocationInfo, LinearAllocationArea::kSize, old_allocation_info) \ V(kOldAllocationInfo, LinearAllocationArea::kSize, old_allocation_info) \
V(kStackIsIterableOffset, kUInt8Size, stack_is_iterable) \ V(kStackIsIterableOffset, kUInt8Size, stack_is_iterable) \
V(kIsMarkingFlag, kUInt8Size, is_marking_flag) V(kIsMarkingFlag, kUInt8Size, is_marking_flag) \
V(kIsMinorMarkingFlag, kUInt8Size, is_minor_marking_flag)
#ifdef V8_COMPRESS_POINTERS #ifdef V8_COMPRESS_POINTERS
#define ISOLATE_DATA_FIELDS_POINTER_COMPRESSION(V) \ #define ISOLATE_DATA_FIELDS_POINTER_COMPRESSION(V) \
...@@ -223,6 +224,7 @@ class IsolateData final { ...@@ -223,6 +224,7 @@ class IsolateData final {
uint8_t stack_is_iterable_ = 1; uint8_t stack_is_iterable_ = 1;
bool is_marking_flag_ = false; bool is_marking_flag_ = false;
bool is_minor_marking_flag_ = false;
// Ensure the size is 8-byte aligned in order to make alignment of the field // Ensure the size is 8-byte aligned in order to make alignment of the field
// following the IsolateData field predictable. This solves the issue with // following the IsolateData field predictable. This solves the issue with
......
...@@ -7508,6 +7508,14 @@ bool* Heap::IsMarkingFlagAddress() { ...@@ -7508,6 +7508,14 @@ bool* Heap::IsMarkingFlagAddress() {
return &isolate()->isolate_data()->is_marking_flag_; return &isolate()->isolate_data()->is_marking_flag_;
} }
void Heap::SetIsMinorMarkingFlag(bool value) {
isolate()->isolate_data()->is_minor_marking_flag_ = value;
}
bool* Heap::IsMinorMarkingFlagAddress() {
return &isolate()->isolate_data()->is_minor_marking_flag_;
}
// StrongRootBlocks are allocated as a block of addresses, prefixed with a // StrongRootBlocks are allocated as a block of addresses, prefixed with a
// StrongRootsEntry pointer: // StrongRootsEntry pointer:
// //
......
...@@ -1076,6 +1076,7 @@ class Heap { ...@@ -1076,6 +1076,7 @@ class Heap {
// Used for query incremental marking status in generated code. // Used for query incremental marking status in generated code.
bool* IsMarkingFlagAddress(); bool* IsMarkingFlagAddress();
bool* IsMinorMarkingFlagAddress();
void ClearRecordedSlot(HeapObject object, ObjectSlot slot); void ClearRecordedSlot(HeapObject object, ObjectSlot slot);
void ClearRecordedSlotRange(Address start, Address end); void ClearRecordedSlotRange(Address start, Address end);
...@@ -2141,6 +2142,7 @@ class Heap { ...@@ -2141,6 +2142,7 @@ class Heap {
bool IsStressingScavenge(); bool IsStressingScavenge();
void SetIsMarkingFlag(bool value); void SetIsMarkingFlag(bool value);
void SetIsMinorMarkingFlag(bool value);
ExternalMemoryAccounting external_memory_; ExternalMemoryAccounting external_memory_;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment