Commit c1874ac3 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Remove unnecessary IncrementalMarking::EnsureBlackAllocated

We now have different mechanisms for black allocation, for regular
sized objects we will set all mark bits for the LAB. For large
objects we will set the mark bit when initializing that large page.

So when we reach this method, the object is already marked black.

Bug: v8:11708
Change-Id: Ie0f82f78eefe06a25103264098cc59a3ee46d20c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3817742
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82275}
parent 18105c72
...@@ -47,8 +47,6 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr, ...@@ -47,8 +47,6 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
RCS_SCOPE(heap->isolate(), RCS_SCOPE(heap->isolate(),
RuntimeCallCounterId::kGC_Custom_IncrementalMarkingObserver); RuntimeCallCounterId::kGC_Custom_IncrementalMarkingObserver);
incremental_marking_->AdvanceOnAllocation(); incremental_marking_->AdvanceOnAllocation();
// AdvanceIncrementalMarkingOnAllocation can start incremental marking.
incremental_marking_->EnsureBlackAllocated(addr, size);
} }
IncrementalMarking::IncrementalMarking(Heap* heap, WeakObjects* weak_objects) IncrementalMarking::IncrementalMarking(Heap* heap, WeakObjects* weak_objects)
...@@ -356,20 +354,6 @@ void IncrementalMarking::FinishBlackAllocation() { ...@@ -356,20 +354,6 @@ void IncrementalMarking::FinishBlackAllocation() {
} }
} }
void IncrementalMarking::EnsureBlackAllocated(Address allocated, size_t size) {
if (black_allocation() && allocated != kNullAddress) {
HeapObject object = HeapObject::FromAddress(allocated);
if (marking_state()->IsWhite(object) && !Heap::InYoungGeneration(object)) {
if (heap_->IsLargeObject(object)) {
marking_state()->WhiteToBlack(object);
} else {
Page::FromAddress(allocated)->CreateBlackArea(allocated,
allocated + size);
}
}
}
}
void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() { void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() {
if (!IsMarking()) return; if (!IsMarking()) return;
......
...@@ -148,10 +148,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -148,10 +148,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
return collector_->local_marking_worklists(); return collector_->local_marking_worklists();
} }
// Ensures that the given region is black allocated if it is in the old
// generation.
void EnsureBlackAllocated(Address allocated, size_t size);
bool IsBelowActivationThresholds() const; bool IsBelowActivationThresholds() const;
void IncrementLiveBytesBackground(MemoryChunk* chunk, intptr_t by) { void IncrementLiveBytesBackground(MemoryChunk* chunk, intptr_t by) {
......
...@@ -16,7 +16,6 @@ void StressMarkingObserver::Step(int bytes_allocated, Address soon_object, ...@@ -16,7 +16,6 @@ void StressMarkingObserver::Step(int bytes_allocated, Address soon_object,
size_t size) { size_t size) {
heap_->StartIncrementalMarkingIfAllocationLimitIsReached(Heap::kNoGCFlags, heap_->StartIncrementalMarkingIfAllocationLimitIsReached(Heap::kNoGCFlags,
kNoGCCallbackFlags); kNoGCCallbackFlags);
heap_->incremental_marking()->EnsureBlackAllocated(soon_object, size);
} }
} // namespace internal } // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment