Commit 491f9794 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Start StressConcurrentAllocatorTask through AllocationObserver

Starting the task right on NotifyDeserializationComplete() turned out
to be too early for some tests. So let an AllocationObserver start
the stress allocation after deserialization is finished.

Bug: v8:10315
Change-Id: I00a4294add5ec87074f9e775c602b3031cc16d58
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2358735
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69441}
parent 7b7fbff4
......@@ -48,6 +48,7 @@ void AllocationCounter::RemoveAllocationObserver(AllocationObserver* observer) {
DCHECK_NE(observers_.end(), it);
if (step_in_progress_) {
DCHECK_EQ(pending_removed_.count(observer), 0);
pending_removed_.insert(observer);
return;
}
......
......@@ -5277,6 +5277,22 @@ void Heap::ReplaceReadOnlySpace(SharedReadOnlySpace* space) {
read_only_space_ = space;
}
class StressConcurrentAllocationObserver : public AllocationObserver {
public:
explicit StressConcurrentAllocationObserver(Heap* heap)
: AllocationObserver(1024), heap_(heap) {}
void Step(int bytes_allocated, Address, size_t) override {
DCHECK(heap_->deserialization_complete());
StressConcurrentAllocatorTask::Schedule(heap_->isolate());
heap_->RemoveAllocationObserversFromAllSpaces(this, this);
heap_->need_to_remove_stress_concurrent_allocation_observer_ = false;
}
private:
Heap* heap_;
};
void Heap::SetUpSpaces() {
// Ensure SetUpFromReadOnlySpace has been ran.
DCHECK_NOT_NULL(read_only_space_);
......@@ -5402,7 +5418,12 @@ void Heap::NotifyDeserializationComplete() {
}
if (FLAG_stress_concurrent_allocation) {
StressConcurrentAllocatorTask::Schedule(isolate());
stress_concurrent_allocation_observer_.reset(
new StressConcurrentAllocationObserver(this));
AddAllocationObserversToAllSpaces(
stress_concurrent_allocation_observer_.get(),
stress_concurrent_allocation_observer_.get());
need_to_remove_stress_concurrent_allocation_observer_ = true;
}
deserialization_complete_ = true;
......@@ -5519,6 +5540,13 @@ void Heap::TearDown() {
scavenge_task_observer_.reset();
scavenge_job_.reset();
if (need_to_remove_stress_concurrent_allocation_observer_) {
RemoveAllocationObserversFromAllSpaces(
stress_concurrent_allocation_observer_.get(),
stress_concurrent_allocation_observer_.get());
}
stress_concurrent_allocation_observer_.reset();
if (FLAG_stress_marking > 0) {
RemoveAllocationObserversFromAllSpaces(stress_marking_observer_,
stress_marking_observer_);
......
......@@ -2196,10 +2196,13 @@ class Heap {
std::unique_ptr<ObjectStats> dead_object_stats_;
std::unique_ptr<ScavengeJob> scavenge_job_;
std::unique_ptr<AllocationObserver> scavenge_task_observer_;
std::unique_ptr<AllocationObserver> stress_concurrent_allocation_observer_;
std::unique_ptr<LocalEmbedderHeapTracer> local_embedder_heap_tracer_;
std::unique_ptr<MarkingBarrier> marking_barrier_;
StrongRootsList* strong_roots_list_ = nullptr;
bool need_to_remove_stress_concurrent_allocation_observer_ = false;
// This counter is increased before each GC and never reset.
// To account for the bytes allocated since the last GC, use the
// NewSpaceAllocationCounter() function.
......@@ -2319,6 +2322,7 @@ class Heap {
friend class ReadOnlyRoots;
friend class Scavenger;
friend class ScavengerCollector;
friend class StressConcurrentAllocationObserver;
friend class Space;
friend class Sweeper;
friend class heap::TestMemoryAllocatorScope;
......
......@@ -349,10 +349,13 @@ void SpaceWithLinearArea::AddAllocationObserver(AllocationObserver* observer) {
void SpaceWithLinearArea::RemoveAllocationObserver(
AllocationObserver* observer) {
DCHECK(!allocation_counter_.IsStepInProgress());
AdvanceAllocationObservers();
Space::RemoveAllocationObserver(observer);
UpdateInlineAllocationLimit(0);
if (!allocation_counter_.IsStepInProgress()) {
AdvanceAllocationObservers();
Space::RemoveAllocationObserver(observer);
UpdateInlineAllocationLimit(0);
} else {
Space::RemoveAllocationObserver(observer);
}
}
void SpaceWithLinearArea::PauseAllocationObservers() {
......@@ -426,8 +429,9 @@ void SpaceWithLinearArea::InvokeAllocationObservers(
DCHECK_EQ(saved_allocation_info.limit(), allocation_info_.limit());
}
DCHECK_LT(allocation_info_.limit() - allocation_info_.start(),
allocation_counter_.NextBytes());
DCHECK_IMPLIES(allocation_counter_.IsActive(),
(allocation_info_.limit() - allocation_info_.start()) <
allocation_counter_.NextBytes());
}
int MemoryChunk::FreeListsLength() {
......
......@@ -5496,8 +5496,10 @@ AllocationResult HeapTester::AllocateByteArrayForTest(
}
bool HeapTester::CodeEnsureLinearAllocationArea(Heap* heap, int size_in_bytes) {
return heap->code_space()->EnsureLabMain(size_in_bytes,
AllocationOrigin::kRuntime);
bool result = heap->code_space()->EnsureLabMain(size_in_bytes,
AllocationOrigin::kRuntime);
heap->code_space()->UpdateInlineAllocationLimit(0);
return result;
}
HEAP_TEST(Regress587004) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment