Commit 1f0b0ed0 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

Reland "heap: Fix initial GC configuration for C++-only heaps"

This is a reland of 7ef67b2e

Manually checked that the CL was not the culprit breaking
  media_blink_unittests --gtest_filter=WebMediaPlayerImplTest.MemDumpReporting

Original change's description:
> heap: Fix initial GC configuration for C++-only heaps
>
> Heaps in V8 start with a large limit that is shrunk upon young
> generation GCs, based on some liveness estimate. This provides best
> throughput during startup while at the same time finding a reasonable
> first limit.
>
> For C++ (embedder memory) there is no estimate which is why it was
> piggy-backing on V8. This breaks in scenarios where no JS memory is
> allocated.
>
> In this fix we start a memory reducer after embedder memory has hit
> the activation threshold if no GC happened so far. As soon as a single
> Scavenger has happened, we leave it up to the JS estimate to figure
> out a limit. Memory reducing GCs will then find a regular limit based
> on the initial live size.
>
> Drive-by: Give embedders the same activiation threshold of 8MB as JS.
>
> Bug: chromium:1217076
> Change-Id: I8469696002ac2af8d75d6b47def062d2608387a1
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2944935
> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
> Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#75012}

Bug: chromium:1217076
Change-Id: I482d8525379e33095834d5b41be8bb49bdd8a5d4
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2949094
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Auto-Submit: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#75048}
parent 8e602fbf
...@@ -1927,16 +1927,28 @@ void Heap::CompleteSweepingFull() { ...@@ -1927,16 +1927,28 @@ void Heap::CompleteSweepingFull() {
void Heap::StartIncrementalMarkingIfAllocationLimitIsReached( void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
int gc_flags, const GCCallbackFlags gc_callback_flags) { int gc_flags, const GCCallbackFlags gc_callback_flags) {
if (incremental_marking()->IsStopped()) { if (incremental_marking()->IsStopped()) {
IncrementalMarkingLimit reached_limit = IncrementalMarkingLimitReached(); switch (IncrementalMarkingLimitReached()) {
if (reached_limit == IncrementalMarkingLimit::kSoftLimit) { case IncrementalMarkingLimit::kHardLimit:
incremental_marking()->incremental_marking_job()->ScheduleTask(this); StartIncrementalMarking(
} else if (reached_limit == IncrementalMarkingLimit::kHardLimit) { gc_flags,
StartIncrementalMarking( OldGenerationSpaceAvailable() <= NewSpaceCapacity()
gc_flags, ? GarbageCollectionReason::kAllocationLimit
OldGenerationSpaceAvailable() <= NewSpaceCapacity() : GarbageCollectionReason::kGlobalAllocationLimit,
? GarbageCollectionReason::kAllocationLimit gc_callback_flags);
: GarbageCollectionReason::kGlobalAllocationLimit, break;
gc_callback_flags); case IncrementalMarkingLimit::kSoftLimit:
incremental_marking()->incremental_marking_job()->ScheduleTask(this);
break;
case IncrementalMarkingLimit::kFallbackForEmbedderLimit:
// This is a fallback case where no appropriate limits have been
// configured yet.
MemoryReducer::Event event;
event.type = MemoryReducer::kPossibleGarbage;
event.time_ms = MonotonicallyIncreasingTimeInMs();
memory_reducer()->NotifyPossibleGarbage(event);
break;
case IncrementalMarkingLimit::kNoLimit:
break;
} }
} }
} }
...@@ -4996,12 +5008,14 @@ size_t Heap::OldGenerationSizeOfObjects() { ...@@ -4996,12 +5008,14 @@ size_t Heap::OldGenerationSizeOfObjects() {
return total + lo_space_->SizeOfObjects() + code_lo_space_->SizeOfObjects(); return total + lo_space_->SizeOfObjects() + code_lo_space_->SizeOfObjects();
} }
size_t Heap::EmbedderSizeOfObjects() const {
return local_embedder_heap_tracer()
? local_embedder_heap_tracer()->used_size()
: 0;
}
size_t Heap::GlobalSizeOfObjects() { size_t Heap::GlobalSizeOfObjects() {
const size_t on_heap_size = OldGenerationSizeOfObjects(); return OldGenerationSizeOfObjects() + EmbedderSizeOfObjects();
const size_t embedder_size = local_embedder_heap_tracer()
? local_embedder_heap_tracer()->used_size()
: 0;
return on_heap_size + embedder_size;
} }
uint64_t Heap::AllocatedExternalMemorySinceMarkCompact() { uint64_t Heap::AllocatedExternalMemorySinceMarkCompact() {
...@@ -5152,11 +5166,13 @@ double Heap::PercentToGlobalMemoryLimit() { ...@@ -5152,11 +5166,13 @@ double Heap::PercentToGlobalMemoryLimit() {
return total_bytes > 0 ? (current_bytes / total_bytes) * 100.0 : 0; return total_bytes > 0 ? (current_bytes / total_bytes) * 100.0 : 0;
} }
// This function returns either kNoLimit, kSoftLimit, or kHardLimit. // - kNoLimit means that either incremental marking is disabled or it is too
// The kNoLimit means that either incremental marking is disabled or it is too
// early to start incremental marking. // early to start incremental marking.
// The kSoftLimit means that incremental marking should be started soon. // - kSoftLimit means that incremental marking should be started soon.
// The kHardLimit means that incremental marking should be started immediately. // - kHardLimit means that incremental marking should be started immediately.
// - kFallbackForEmbedderLimit means that incremental marking should be
// started as soon as the embedder does not allocate with high throughput
// anymore.
Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() { Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
// Code using an AlwaysAllocateScope assumes that the GC state does not // Code using an AlwaysAllocateScope assumes that the GC state does not
// change; that implies that no marking steps must be performed. // change; that implies that no marking steps must be performed.
...@@ -5221,6 +5237,15 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() { ...@@ -5221,6 +5237,15 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
if (old_generation_space_available > NewSpaceCapacity() && if (old_generation_space_available > NewSpaceCapacity() &&
(!global_memory_available || (!global_memory_available ||
global_memory_available > NewSpaceCapacity())) { global_memory_available > NewSpaceCapacity())) {
if (local_embedder_heap_tracer()->InUse() &&
!old_generation_size_configured_ && gc_count_ == 0) {
// At this point the embedder memory is above the activation
// threshold. No GC happened so far and it's thus unlikely to get a
// configured heap any time soon. Start a memory reducer in this case
// which will wait until the allocation rate is low to trigger garbage
// collection.
return IncrementalMarkingLimit::kFallbackForEmbedderLimit;
}
return IncrementalMarkingLimit::kNoLimit; return IncrementalMarkingLimit::kNoLimit;
} }
if (ShouldOptimizeForMemoryUsage()) { if (ShouldOptimizeForMemoryUsage()) {
......
...@@ -1440,6 +1440,10 @@ class Heap { ...@@ -1440,6 +1440,10 @@ class Heap {
// Excludes external memory held by those objects. // Excludes external memory held by those objects.
V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects(); V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects();
// Returns the size of objects held by the EmbedderHeapTracer.
V8_EXPORT_PRIVATE size_t EmbedderSizeOfObjects() const;
// Returns the global size of objects (embedder + V8 non-new spaces).
V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects(); V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects();
// We allow incremental marking to overshoot the V8 and global allocation // We allow incremental marking to overshoot the V8 and global allocation
...@@ -2016,7 +2020,12 @@ class Heap { ...@@ -2016,7 +2020,12 @@ class Heap {
double PercentToOldGenerationLimit(); double PercentToOldGenerationLimit();
double PercentToGlobalMemoryLimit(); double PercentToGlobalMemoryLimit();
enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit }; enum class IncrementalMarkingLimit {
kNoLimit,
kSoftLimit,
kHardLimit,
kFallbackForEmbedderLimit
};
IncrementalMarkingLimit IncrementalMarkingLimitReached(); IncrementalMarkingLimit IncrementalMarkingLimitReached();
bool ShouldStressCompaction() const; bool ShouldStressCompaction() const;
......
...@@ -151,7 +151,7 @@ bool IncrementalMarking::CanBeActivated() { ...@@ -151,7 +151,7 @@ bool IncrementalMarking::CanBeActivated() {
bool IncrementalMarking::IsBelowActivationThresholds() const { bool IncrementalMarking::IsBelowActivationThresholds() const {
return heap_->OldGenerationSizeOfObjects() <= kV8ActivationThreshold && return heap_->OldGenerationSizeOfObjects() <= kV8ActivationThreshold &&
heap_->GlobalSizeOfObjects() <= kGlobalActivationThreshold; heap_->EmbedderSizeOfObjects() <= kEmbedderActivationThreshold;
} }
void IncrementalMarking::Start(GarbageCollectionReason gc_reason) { void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
......
...@@ -75,10 +75,10 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -75,10 +75,10 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
#ifndef DEBUG #ifndef DEBUG
static constexpr size_t kV8ActivationThreshold = 8 * MB; static constexpr size_t kV8ActivationThreshold = 8 * MB;
static constexpr size_t kGlobalActivationThreshold = 16 * MB; static constexpr size_t kEmbedderActivationThreshold = 8 * MB;
#else #else
static constexpr size_t kV8ActivationThreshold = 0; static constexpr size_t kV8ActivationThreshold = 0;
static constexpr size_t kGlobalActivationThreshold = 0; static constexpr size_t kEmbedderActivationThreshold = 0;
#endif #endif
#ifdef V8_ATOMIC_MARKING_STATE #ifdef V8_ATOMIC_MARKING_STATE
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment