Commit 7c426286 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Use normal marking write barrier for fixed array elements

This simplifies the marking write barrier for elements to mark the
values instead of revisiting the array.

Bug: chromium:918485

Change-Id: Id5da0d5b9ff8385a256fe14f4bf7171f9f6343e1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1588459
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61125}
parent 5a0f0ebf
......@@ -330,24 +330,14 @@ class ConcurrentMarkingVisitor final
marking_state_.GreyToBlack(object);
int size = FixedArray::BodyDescriptor::SizeOf(map, object);
size_t current_progress_bar = chunk->ProgressBar();
if (current_progress_bar == 0) {
// Try to move the progress bar forward to start offset. This solves the
// problem of not being able to observe a progress bar reset when
// processing the first kProgressBarScanningChunk.
if (!chunk->TrySetProgressBar(0,
FixedArray::BodyDescriptor::kStartOffset))
return 0;
current_progress_bar = FixedArray::BodyDescriptor::kStartOffset;
}
int start = static_cast<int>(current_progress_bar);
int start = std::max(static_cast<int>(current_progress_bar),
FixedArray::BodyDescriptor::kStartOffset);
int end = Min(size, start + kProgressBarScanningChunk);
if (start < end) {
VisitPointers(object, object.RawField(start), object.RawField(end));
// Setting the progress bar can fail if the object that is currently
// scanned is also revisited. In this case, there may be two tasks racing
// on the progress counter. The looser can bail out because the progress
// bar is reset before the tasks race on the object.
if (chunk->TrySetProgressBar(current_progress_bar, end) && (end < size)) {
bool success = chunk->TrySetProgressBar(current_progress_bar, end);
CHECK(success);
if (end < size) {
// The object can be pushed back onto the marking worklist only after
// progress bar was updated.
shared_.Push(object);
......
......@@ -43,7 +43,7 @@ V8_EXPORT_PRIVATE void Heap_GenerationalBarrierForElementsSlow(Heap* heap,
int offset,
int length);
V8_EXPORT_PRIVATE void Heap_MarkingBarrierForElementsSlow(Heap* heap,
HeapObject object);
FixedArray object);
V8_EXPORT_PRIVATE void Heap_MarkingBarrierForDescriptorArraySlow(
Heap* heap, HeapObject host, HeapObject descriptor_array,
int number_of_own_descriptors);
......@@ -216,12 +216,12 @@ inline void MarkingBarrier(HeapObject object, MaybeObjectSlot slot,
value_heap_object);
}
inline void MarkingBarrierForElements(Heap* heap, HeapObject object) {
inline void MarkingBarrierForElements(Heap* heap, FixedArray array) {
heap_internals::MemoryChunk* object_chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
heap_internals::MemoryChunk::FromHeapObject(array);
if (!object_chunk->IsMarking()) return;
Heap_MarkingBarrierForElementsSlow(heap, object);
Heap_MarkingBarrierForElementsSlow(heap, array);
}
inline void MarkingBarrierForCode(Code host, RelocInfo* rinfo,
......
......@@ -47,7 +47,7 @@ void GenerationalBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
// Marking write barrier.
void MarkingBarrier(HeapObject object, ObjectSlot slot, Object value);
void MarkingBarrier(HeapObject object, MaybeObjectSlot slot, MaybeObject value);
void MarkingBarrierForElements(Heap* heap, HeapObject object);
void MarkingBarrierForElements(Heap* heap, FixedArray array);
void MarkingBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject host,
......
......@@ -30,6 +30,7 @@
#include "src/heap/gc-tracer.h"
#include "src/heap/heap-controller.h"
#include "src/heap/heap-write-barrier-inl.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/mark-compact.h"
......@@ -111,8 +112,8 @@ void Heap_GenerationalBarrierForElementsSlow(Heap* heap, FixedArray array,
Heap::GenerationalBarrierForElementsSlow(heap, array, offset, length);
}
void Heap_MarkingBarrierForElementsSlow(Heap* heap, HeapObject object) {
Heap::MarkingBarrierForElementsSlow(heap, object);
void Heap_MarkingBarrierForElementsSlow(Heap* heap, FixedArray array) {
Heap::MarkingBarrierForElementsSlow(heap, array);
}
void Heap_MarkingBarrierForDescriptorArraySlow(Heap* heap, HeapObject host,
......@@ -5895,14 +5896,8 @@ void Heap::MarkingBarrierSlow(HeapObject object, Address slot,
value);
}
void Heap::MarkingBarrierForElementsSlow(Heap* heap, HeapObject object) {
IncrementalMarking::MarkingState* marking_state =
heap->incremental_marking()->marking_state();
if (!marking_state->IsBlack(object)) {
marking_state->WhiteToGrey(object);
marking_state->GreyToBlack(object);
}
heap->incremental_marking()->RevisitObject(object);
void Heap::MarkingBarrierForElementsSlow(Heap* heap, FixedArray array) {
heap->incremental_marking()->RecordWrites(array);
}
void Heap::MarkingBarrierForCodeSlow(Code host, RelocInfo* rinfo,
......
......@@ -366,8 +366,8 @@ class Heap {
V8_EXPORT_PRIVATE static void MarkingBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
V8_EXPORT_PRIVATE static void MarkingBarrierForElementsSlow(
Heap* heap, HeapObject object);
V8_EXPORT_PRIVATE static void MarkingBarrierForElementsSlow(Heap* heap,
FixedArray array);
V8_EXPORT_PRIVATE static void MarkingBarrierForCodeSlow(Code host,
RelocInfo* rinfo,
HeapObject value);
......
......@@ -117,6 +117,30 @@ void IncrementalMarking::RecordWriteIntoCode(Code host, RelocInfo* rinfo,
}
}
void IncrementalMarking::RecordWrites(FixedArray array) {
int length = array->length();
MarkCompactCollector* collector = heap_->mark_compact_collector();
MemoryChunk* source_page = MemoryChunk::FromHeapObject(array);
if (source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
for (int i = 0; i < length; i++) {
Object value = array->get(i);
if (value->IsHeapObject()) {
BaseRecordWrite(array, HeapObject::cast(value));
}
}
} else {
for (int i = 0; i < length; i++) {
Object value = array->get(i);
if (value->IsHeapObject() &&
BaseRecordWrite(array, HeapObject::cast(value))) {
collector->RecordSlot(source_page,
HeapObjectSlot(array->RawFieldOfElementAt(i)),
HeapObject::cast(value));
}
}
}
}
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj);
......@@ -697,12 +721,6 @@ void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
bytes_marked_ -= Min(bytes_marked_, dead_bytes_in_new_space);
}
bool IncrementalMarking::IsFixedArrayWithProgressBar(HeapObject obj) {
if (!obj->IsFixedArray()) return false;
MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR);
}
int IncrementalMarking::VisitObject(Map map, HeapObject obj) {
DCHECK(marking_state()->IsGrey(obj) || marking_state()->IsBlack(obj));
if (!marking_state()->GreyToBlack(obj)) {
......@@ -735,11 +753,9 @@ void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject obj) {
void IncrementalMarking::RevisitObject(HeapObject obj) {
DCHECK(IsMarking());
DCHECK(marking_state()->IsBlack(obj));
Page* page = Page::FromHeapObject(obj);
if (page->owner()->identity() == LO_SPACE ||
page->owner()->identity() == NEW_LO_SPACE) {
page->ResetProgressBar();
}
DCHECK_IMPLIES(MemoryChunk::FromHeapObject(obj)->IsFlagSet(
MemoryChunk::HAS_PROGRESS_BAR),
0u == MemoryChunk::FromHeapObject(obj)->ProgressBar());
Map map = obj->map();
WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
......
......@@ -202,6 +202,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
V8_INLINE void RecordWrite(HeapObject obj, ObjectSlot slot, Object value);
V8_INLINE void RecordMaybeWeakWrite(HeapObject obj, MaybeObjectSlot slot,
MaybeObject value);
void RecordWrites(FixedArray array);
void RevisitObject(HeapObject obj);
// Ensures that all descriptors int range [0, number_of_own_descripts)
// are visited.
......@@ -285,8 +286,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
intptr_t bytes_to_process,
ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION);
V8_INLINE bool IsFixedArrayWithProgressBar(HeapObject object);
// Visits the object and returns its size.
V8_INLINE int VisitObject(Map map, HeapObject obj);
......
......@@ -398,24 +398,14 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
DCHECK(FLAG_use_marking_progress_bar);
DCHECK(heap_->IsLargeObject(object));
size_t current_progress_bar = chunk->ProgressBar();
if (current_progress_bar == 0) {
// Try to move the progress bar forward to start offset. This solves the
// problem of not being able to observe a progress bar reset when
// processing the first kProgressBarScanningChunk.
if (!chunk->TrySetProgressBar(0,
FixedArray::BodyDescriptor::kStartOffset))
return 0;
current_progress_bar = FixedArray::BodyDescriptor::kStartOffset;
}
int start = static_cast<int>(current_progress_bar);
int start = std::max(static_cast<int>(current_progress_bar),
FixedArray::BodyDescriptor::kStartOffset);
int end = Min(size, start + kProgressBarScanningChunk);
if (start < end) {
VisitPointers(object, object.RawField(start), object.RawField(end));
// Setting the progress bar can fail if the object that is currently
// scanned is also revisited. In this case, there may be two tasks racing
// on the progress counter. The looser can bail out because the progress
// bar is reset before the tasks race on the object.
if (chunk->TrySetProgressBar(current_progress_bar, end) && (end < size)) {
bool success = chunk->TrySetProgressBar(current_progress_bar, end);
CHECK(success);
if (end < size) {
DCHECK(marking_state()->IsBlack(object));
// The object can be pushed back onto the marking worklist only after
// progress bar was updated.
......@@ -493,14 +483,22 @@ void MarkCompactCollector::RecordSlot(HeapObject object, ObjectSlot slot,
void MarkCompactCollector::RecordSlot(HeapObject object, HeapObjectSlot slot,
HeapObject target) {
Page* target_page = Page::FromHeapObject(target);
Page* source_page = Page::FromHeapObject(object);
MemoryChunk* target_page = MemoryChunk::FromHeapObject(target);
MemoryChunk* source_page = MemoryChunk::FromHeapObject(object);
if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
!source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
RememberedSet<OLD_TO_OLD>::Insert(source_page, slot.address());
}
}
void MarkCompactCollector::RecordSlot(MemoryChunk* source_page,
HeapObjectSlot slot, HeapObject target) {
MemoryChunk* target_page = MemoryChunk::FromHeapObject(target);
if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>()) {
RememberedSet<OLD_TO_OLD>::Insert(source_page, slot.address());
}
}
void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
weak_objects_.transition_arrays.Push(kMainThread, array);
}
......
......@@ -619,6 +619,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
HeapObject target);
V8_INLINE static void RecordSlot(HeapObject object, HeapObjectSlot slot,
HeapObject target);
V8_INLINE static void RecordSlot(MemoryChunk* source_page,
HeapObjectSlot slot, HeapObject target);
void RecordLiveSlotsOnPage(Page* page);
void UpdateSlots(SlotsBuffer* buffer);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment