Commit ca41b837 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Make Heap::max_old_generation_size_ atomic

Fixes race between concurrent allocation and incrementing
max_old_generation_size_ in InvokeNearHeapLimitCallback().

Bug: v8:10315
Change-Id: If3586fd6164e784e66b0815d0200a27798127649
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2352771Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69371}
parent 3a16caa5
...@@ -196,7 +196,7 @@ Heap::Heap() ...@@ -196,7 +196,7 @@ Heap::Heap()
external_string_table_(this), external_string_table_(this),
collection_barrier_(this) { collection_barrier_(this) {
// Ensure old_generation_size_ is a multiple of kPageSize. // Ensure old_generation_size_ is a multiple of kPageSize.
DCHECK_EQ(0, max_old_generation_size_ & (Page::kPageSize - 1)); DCHECK_EQ(0, max_old_generation_size() & (Page::kPageSize - 1));
set_native_contexts_list(Smi::zero()); set_native_contexts_list(Smi::zero());
set_allocation_sites_list(Smi::zero()); set_allocation_sites_list(Smi::zero());
...@@ -213,7 +213,7 @@ size_t Heap::MaxReserved() { ...@@ -213,7 +213,7 @@ size_t Heap::MaxReserved() {
const size_t kMaxNewLargeObjectSpaceSize = max_semi_space_size_; const size_t kMaxNewLargeObjectSpaceSize = max_semi_space_size_;
return static_cast<size_t>(2 * max_semi_space_size_ + return static_cast<size_t>(2 * max_semi_space_size_ +
kMaxNewLargeObjectSpaceSize + kMaxNewLargeObjectSpaceSize +
max_old_generation_size_); max_old_generation_size());
} }
size_t Heap::YoungGenerationSizeFromOldGenerationSize(size_t old_generation) { size_t Heap::YoungGenerationSizeFromOldGenerationSize(size_t old_generation) {
...@@ -395,7 +395,7 @@ size_t Heap::Available() { ...@@ -395,7 +395,7 @@ size_t Heap::Available() {
bool Heap::CanExpandOldGeneration(size_t size) { bool Heap::CanExpandOldGeneration(size_t size) {
if (force_oom_) return false; if (force_oom_) return false;
if (OldGenerationCapacity() + size > max_old_generation_size_) return false; if (OldGenerationCapacity() + size > max_old_generation_size()) return false;
// The OldGenerationCapacity does not account compaction spaces used // The OldGenerationCapacity does not account compaction spaces used
// during evacuation. Ensure that expanding the old generation does push // during evacuation. Ensure that expanding the old generation does push
// the total allocated memory size over the maximum heap size. // the total allocated memory size over the maximum heap size.
...@@ -1644,9 +1644,9 @@ bool Heap::CollectGarbage(AllocationSpace space, ...@@ -1644,9 +1644,9 @@ bool Heap::CollectGarbage(AllocationSpace space,
if (deserialization_complete_) { if (deserialization_complete_) {
memory_reducer_->NotifyMarkCompact(event); memory_reducer_->NotifyMarkCompact(event);
} }
if (initial_max_old_generation_size_ < max_old_generation_size_ && if (initial_max_old_generation_size_ < max_old_generation_size() &&
used_memory_after < initial_max_old_generation_size_threshold_) { used_memory_after < initial_max_old_generation_size_threshold_) {
max_old_generation_size_ = initial_max_old_generation_size_; set_max_old_generation_size(initial_max_old_generation_size_);
} }
} }
...@@ -2135,7 +2135,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) { ...@@ -2135,7 +2135,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) {
double v8_mutator_speed = double v8_mutator_speed =
tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond(); tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond();
double v8_growing_factor = MemoryController<V8HeapTrait>::GrowingFactor( double v8_growing_factor = MemoryController<V8HeapTrait>::GrowingFactor(
this, max_old_generation_size_, v8_gc_speed, v8_mutator_speed); this, max_old_generation_size(), v8_gc_speed, v8_mutator_speed);
double global_growing_factor = 0; double global_growing_factor = 0;
if (UseGlobalMemoryScheduling()) { if (UseGlobalMemoryScheduling()) {
DCHECK_NOT_NULL(local_embedder_heap_tracer()); DCHECK_NOT_NULL(local_embedder_heap_tracer());
...@@ -2161,7 +2161,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) { ...@@ -2161,7 +2161,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) {
old_generation_allocation_limit_ = old_generation_allocation_limit_ =
MemoryController<V8HeapTrait>::CalculateAllocationLimit( MemoryController<V8HeapTrait>::CalculateAllocationLimit(
this, old_gen_size, min_old_generation_size_, this, old_gen_size, min_old_generation_size_,
max_old_generation_size_, new_space_capacity, v8_growing_factor, max_old_generation_size(), new_space_capacity, v8_growing_factor,
mode); mode);
if (UseGlobalMemoryScheduling()) { if (UseGlobalMemoryScheduling()) {
DCHECK_GT(global_growing_factor, 0); DCHECK_GT(global_growing_factor, 0);
...@@ -2178,7 +2178,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) { ...@@ -2178,7 +2178,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) {
size_t new_old_generation_limit = size_t new_old_generation_limit =
MemoryController<V8HeapTrait>::CalculateAllocationLimit( MemoryController<V8HeapTrait>::CalculateAllocationLimit(
this, old_gen_size, min_old_generation_size_, this, old_gen_size, min_old_generation_size_,
max_old_generation_size_, new_space_capacity, v8_growing_factor, max_old_generation_size(), new_space_capacity, v8_growing_factor,
mode); mode);
if (new_old_generation_limit < old_generation_allocation_limit_) { if (new_old_generation_limit < old_generation_allocation_limit_) {
old_generation_allocation_limit_ = new_old_generation_limit; old_generation_allocation_limit_ = new_old_generation_limit;
...@@ -3386,7 +3386,7 @@ bool Heap::IsIneffectiveMarkCompact(size_t old_generation_size, ...@@ -3386,7 +3386,7 @@ bool Heap::IsIneffectiveMarkCompact(size_t old_generation_size,
const double kHighHeapPercentage = 0.8; const double kHighHeapPercentage = 0.8;
const double kLowMutatorUtilization = 0.4; const double kLowMutatorUtilization = 0.4;
return old_generation_size >= return old_generation_size >=
kHighHeapPercentage * max_old_generation_size_ && kHighHeapPercentage * max_old_generation_size() &&
mutator_utilization < kLowMutatorUtilization; mutator_utilization < kLowMutatorUtilization;
} }
...@@ -3425,7 +3425,7 @@ bool Heap::HasHighFragmentation(size_t used, size_t committed) { ...@@ -3425,7 +3425,7 @@ bool Heap::HasHighFragmentation(size_t used, size_t committed) {
} }
bool Heap::ShouldOptimizeForMemoryUsage() { bool Heap::ShouldOptimizeForMemoryUsage() {
const size_t kOldGenerationSlack = max_old_generation_size_ / 8; const size_t kOldGenerationSlack = max_old_generation_size() / 8;
return FLAG_optimize_for_size || isolate()->IsIsolateInBackground() || return FLAG_optimize_for_size || isolate()->IsIsolateInBackground() ||
isolate()->IsMemorySavingsModeActive() || HighMemoryPressure() || isolate()->IsMemorySavingsModeActive() || HighMemoryPressure() ||
!CanExpandOldGeneration(kOldGenerationSlack); !CanExpandOldGeneration(kOldGenerationSlack);
...@@ -3924,11 +3924,11 @@ bool Heap::InvokeNearHeapLimitCallback() { ...@@ -3924,11 +3924,11 @@ bool Heap::InvokeNearHeapLimitCallback() {
v8::NearHeapLimitCallback callback = v8::NearHeapLimitCallback callback =
near_heap_limit_callbacks_.back().first; near_heap_limit_callbacks_.back().first;
void* data = near_heap_limit_callbacks_.back().second; void* data = near_heap_limit_callbacks_.back().second;
size_t heap_limit = callback(data, max_old_generation_size_, size_t heap_limit = callback(data, max_old_generation_size(),
initial_max_old_generation_size_); initial_max_old_generation_size_);
if (heap_limit > max_old_generation_size_) { if (heap_limit > max_old_generation_size()) {
max_old_generation_size_ = set_max_old_generation_size(
Min(heap_limit, AllocatorLimitOnMaxOldGenerationSize()); Min(heap_limit, AllocatorLimitOnMaxOldGenerationSize()));
return true; return true;
} }
} }
...@@ -4652,30 +4652,31 @@ void Heap::ConfigureHeap(const v8::ResourceConstraints& constraints) { ...@@ -4652,30 +4652,31 @@ void Heap::ConfigureHeap(const v8::ResourceConstraints& constraints) {
// Initialize max_old_generation_size_ and max_global_memory_. // Initialize max_old_generation_size_ and max_global_memory_.
{ {
max_old_generation_size_ = 700ul * (kSystemPointerSize / 4) * MB; size_t max_old_generation_size = 700ul * (kSystemPointerSize / 4) * MB;
if (constraints.max_old_generation_size_in_bytes() > 0) { if (constraints.max_old_generation_size_in_bytes() > 0) {
max_old_generation_size_ = constraints.max_old_generation_size_in_bytes(); max_old_generation_size = constraints.max_old_generation_size_in_bytes();
} }
if (FLAG_max_old_space_size > 0) { if (FLAG_max_old_space_size > 0) {
max_old_generation_size_ = max_old_generation_size =
static_cast<size_t>(FLAG_max_old_space_size) * MB; static_cast<size_t>(FLAG_max_old_space_size) * MB;
} else if (FLAG_max_heap_size > 0) { } else if (FLAG_max_heap_size > 0) {
size_t max_heap_size = static_cast<size_t>(FLAG_max_heap_size) * MB; size_t max_heap_size = static_cast<size_t>(FLAG_max_heap_size) * MB;
size_t young_generation_size = size_t young_generation_size =
YoungGenerationSizeFromSemiSpaceSize(max_semi_space_size_); YoungGenerationSizeFromSemiSpaceSize(max_semi_space_size_);
max_old_generation_size_ = max_heap_size > young_generation_size max_old_generation_size = max_heap_size > young_generation_size
? max_heap_size - young_generation_size ? max_heap_size - young_generation_size
: 0; : 0;
} }
max_old_generation_size_ = max_old_generation_size =
Max(max_old_generation_size_, MinOldGenerationSize()); Max(max_old_generation_size, MinOldGenerationSize());
max_old_generation_size_ = max_old_generation_size =
Min(max_old_generation_size_, AllocatorLimitOnMaxOldGenerationSize()); Min(max_old_generation_size, AllocatorLimitOnMaxOldGenerationSize());
max_old_generation_size_ = max_old_generation_size =
RoundDown<Page::kPageSize>(max_old_generation_size_); RoundDown<Page::kPageSize>(max_old_generation_size);
max_global_memory_size_ = max_global_memory_size_ =
GlobalMemorySizeFromV8Size(max_old_generation_size_); GlobalMemorySizeFromV8Size(max_old_generation_size);
set_max_old_generation_size(max_old_generation_size);
} }
CHECK_IMPLIES(FLAG_max_heap_size > 0, CHECK_IMPLIES(FLAG_max_heap_size > 0,
...@@ -4740,7 +4741,7 @@ void Heap::ConfigureHeap(const v8::ResourceConstraints& constraints) { ...@@ -4740,7 +4741,7 @@ void Heap::ConfigureHeap(const v8::ResourceConstraints& constraints) {
old_generation_size_configured_ = true; old_generation_size_configured_ = true;
} }
initial_old_generation_size_ = initial_old_generation_size_ =
Min(initial_old_generation_size_, max_old_generation_size_ / 2); Min(initial_old_generation_size_, max_old_generation_size() / 2);
initial_old_generation_size_ = initial_old_generation_size_ =
RoundDown<Page::kPageSize>(initial_old_generation_size_); RoundDown<Page::kPageSize>(initial_old_generation_size_);
} }
...@@ -4760,7 +4761,7 @@ void Heap::ConfigureHeap(const v8::ResourceConstraints& constraints) { ...@@ -4760,7 +4761,7 @@ void Heap::ConfigureHeap(const v8::ResourceConstraints& constraints) {
old_generation_allocation_limit_ = initial_old_generation_size_; old_generation_allocation_limit_ = initial_old_generation_size_;
global_allocation_limit_ = global_allocation_limit_ =
GlobalMemorySizeFromV8Size(old_generation_allocation_limit_); GlobalMemorySizeFromV8Size(old_generation_allocation_limit_);
initial_max_old_generation_size_ = max_old_generation_size_; initial_max_old_generation_size_ = max_old_generation_size();
// We rely on being able to allocate new arrays in paged spaces. // We rely on being able to allocate new arrays in paged spaces.
DCHECK(kMaxRegularHeapObjectSize >= DCHECK(kMaxRegularHeapObjectSize >=
...@@ -4886,7 +4887,7 @@ bool Heap::AllocationLimitOvershotByLargeMargin() { ...@@ -4886,7 +4887,7 @@ bool Heap::AllocationLimitOvershotByLargeMargin() {
// with special handling of small heaps. // with special handling of small heaps.
const size_t v8_margin = const size_t v8_margin =
Min(Max(old_generation_allocation_limit_ / 2, kMarginForSmallHeaps), Min(Max(old_generation_allocation_limit_ / 2, kMarginForSmallHeaps),
(max_old_generation_size_ - old_generation_allocation_limit_) / 2); (max_old_generation_size() - old_generation_allocation_limit_) / 2);
const size_t global_margin = const size_t global_margin =
Min(Max(global_allocation_limit_ / 2, kMarginForSmallHeaps), Min(Max(global_allocation_limit_ / 2, kMarginForSmallHeaps),
(max_global_memory_size_ - global_allocation_limit_) / 2); (max_global_memory_size_ - global_allocation_limit_) / 2);
......
...@@ -702,7 +702,7 @@ class Heap { ...@@ -702,7 +702,7 @@ class Heap {
// For post mortem debugging. // For post mortem debugging.
void RememberUnmappedPage(Address page, bool compacted); void RememberUnmappedPage(Address page, bool compacted);
int64_t external_memory_hard_limit() { return max_old_generation_size_ / 2; } int64_t external_memory_hard_limit() { return max_old_generation_size() / 2; }
V8_INLINE int64_t external_memory(); V8_INLINE int64_t external_memory();
V8_EXPORT_PRIVATE int64_t external_memory_limit(); V8_EXPORT_PRIVATE int64_t external_memory_limit();
...@@ -745,8 +745,8 @@ class Heap { ...@@ -745,8 +745,8 @@ class Heap {
void RestoreHeapLimit(size_t heap_limit) { void RestoreHeapLimit(size_t heap_limit) {
// Do not set the limit lower than the live size + some slack. // Do not set the limit lower than the live size + some slack.
size_t min_limit = SizeOfObjects() + SizeOfObjects() / 4; size_t min_limit = SizeOfObjects() + SizeOfObjects() / 4;
max_old_generation_size_ = set_max_old_generation_size(
Min(max_old_generation_size_, Max(heap_limit, min_limit)); Min(max_old_generation_size(), Max(heap_limit, min_limit)));
} }
// =========================================================================== // ===========================================================================
...@@ -1191,7 +1191,7 @@ class Heap { ...@@ -1191,7 +1191,7 @@ class Heap {
V8_EXPORT_PRIVATE size_t MaxReserved(); V8_EXPORT_PRIVATE size_t MaxReserved();
size_t MaxSemiSpaceSize() { return max_semi_space_size_; } size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
size_t InitialSemiSpaceSize() { return initial_semispace_size_; } size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
size_t MaxOldGenerationSize() { return max_old_generation_size_; } size_t MaxOldGenerationSize() { return max_old_generation_size(); }
// Limit on the max old generation size imposed by the underlying allocator. // Limit on the max old generation size imposed by the underlying allocator.
V8_EXPORT_PRIVATE static size_t AllocatorLimitOnMaxOldGenerationSize(); V8_EXPORT_PRIVATE static size_t AllocatorLimitOnMaxOldGenerationSize();
...@@ -1858,6 +1858,14 @@ class Heap { ...@@ -1858,6 +1858,14 @@ class Heap {
size_t global_allocation_limit() const { return global_allocation_limit_; } size_t global_allocation_limit() const { return global_allocation_limit_; }
size_t max_old_generation_size() {
return max_old_generation_size_.load(std::memory_order_relaxed);
}
void set_max_old_generation_size(size_t value) {
max_old_generation_size_.store(value, std::memory_order_relaxed);
}
bool always_allocate() { return always_allocate_scope_count_ != 0; } bool always_allocate() { return always_allocate_scope_count_ != 0; }
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size); V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size);
...@@ -2002,7 +2010,7 @@ class Heap { ...@@ -2002,7 +2010,7 @@ class Heap {
size_t min_old_generation_size_ = 0; size_t min_old_generation_size_ = 0;
// If the old generation size exceeds this limit, then V8 will // If the old generation size exceeds this limit, then V8 will
// crash with out-of-memory error. // crash with out-of-memory error.
size_t max_old_generation_size_ = 0; std::atomic<size_t> max_old_generation_size_{0};
// TODO(mlippautz): Clarify whether this should take some embedder // TODO(mlippautz): Clarify whether this should take some embedder
// configurable limit into account. // configurable limit into account.
size_t min_global_memory_size_ = 0; size_t min_global_memory_size_ = 0;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment