Commit dac86be2 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

Reland "[heap] Add global memory controller"

Provide a global memory controller used to compute limits for combined
on-heap and embedder memory. The global controller uses the same
mechanism (gc speed, mutator speed) and growing factors as the regular
on-heap controller.

Rely on V8's mechanisms for configured state that stops shrinking the
limit.

This reverts commit 5e043f27.

Tbr: ulan@chromium.org
Bug: chromium:948807
Change-Id: Id4f94e7dcb458d1d0d2f872194f8f3ea0959a73f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1622968Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61715}
parent 7d36961f
...@@ -7138,6 +7138,24 @@ class V8_EXPORT EmbedderHeapTracer { ...@@ -7138,6 +7138,24 @@ class V8_EXPORT EmbedderHeapTracer {
virtual void VisitTracedGlobalHandle(const TracedGlobal<Value>& value) = 0; virtual void VisitTracedGlobalHandle(const TracedGlobal<Value>& value) = 0;
}; };
/**
* Summary of a garbage collection cycle. See |TraceEpilogue| on how the
* summary is reported.
*/
struct TraceSummary {
/**
* Time spent managing the retained memory in milliseconds. This can e.g.
* include the time tracing through objects in the embedder.
*/
double time;
/**
* Memory retained by the embedder through the |EmbedderHeapTracer|
* mechanism in bytes.
*/
size_t allocated_size;
};
virtual ~EmbedderHeapTracer() = default; virtual ~EmbedderHeapTracer() = default;
/** /**
...@@ -7184,9 +7202,12 @@ class V8_EXPORT EmbedderHeapTracer { ...@@ -7184,9 +7202,12 @@ class V8_EXPORT EmbedderHeapTracer {
/** /**
* Called at the end of a GC cycle. * Called at the end of a GC cycle.
* *
* Note that allocation is *not* allowed within |TraceEpilogue|. * Note that allocation is *not* allowed within |TraceEpilogue|. Can be
* overriden to fill a |TraceSummary| that is used by V8 to schedule future
* garbage collections.
*/ */
virtual void TraceEpilogue() = 0; virtual void TraceEpilogue() {}
virtual void TraceEpilogue(TraceSummary* trace_summary) { TraceEpilogue(); }
/** /**
* Called upon entering the final marking pause. No more incremental marking * Called upon entering the final marking pause. No more incremental marking
...@@ -7223,6 +7244,14 @@ class V8_EXPORT EmbedderHeapTracer { ...@@ -7223,6 +7244,14 @@ class V8_EXPORT EmbedderHeapTracer {
*/ */
void GarbageCollectionForTesting(EmbedderStackState stack_state); void GarbageCollectionForTesting(EmbedderStackState stack_state);
/*
* Called by the embedder to signal newly allocated memory. Not bound to
* tracing phases. Embedders should trade off when increments are reported as
* V8 may consult global heuristics on whether to trigger garbage collection
* on this change.
*/
void IncreaseAllocatedSize(size_t bytes);
/* /*
* Returns the v8::Isolate this tracer is attached too and |nullptr| if it * Returns the v8::Isolate this tracer is attached too and |nullptr| if it
* is not attached to any v8::Isolate. * is not attached to any v8::Isolate.
......
...@@ -41,6 +41,7 @@ ...@@ -41,6 +41,7 @@
#include "src/frames-inl.h" #include "src/frames-inl.h"
#include "src/global-handles.h" #include "src/global-handles.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/heap/embedder-tracing.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/init/bootstrapper.h" #include "src/init/bootstrapper.h"
#include "src/init/icu_util.h" #include "src/init/icu_util.h"
...@@ -10146,6 +10147,17 @@ void EmbedderHeapTracer::GarbageCollectionForTesting( ...@@ -10146,6 +10147,17 @@ void EmbedderHeapTracer::GarbageCollectionForTesting(
kGCCallbackFlagForced); kGCCallbackFlagForced);
} }
void EmbedderHeapTracer::IncreaseAllocatedSize(size_t bytes) {
if (isolate_) {
i::LocalEmbedderHeapTracer* const tracer =
reinterpret_cast<i::Isolate*>(isolate_)
->heap()
->local_embedder_heap_tracer();
DCHECK_NOT_NULL(tracer);
tracer->IncreaseAllocatedSize(bytes);
}
}
void EmbedderHeapTracer::RegisterEmbedderReference( void EmbedderHeapTracer::RegisterEmbedderReference(
const TracedGlobal<v8::Value>& ref) { const TracedGlobal<v8::Value>& ref) {
if (ref.IsEmpty()) return; if (ref.IsEmpty()) return;
......
...@@ -747,6 +747,8 @@ DEFINE_BOOL(huge_max_old_generation_size, false, ...@@ -747,6 +747,8 @@ DEFINE_BOOL(huge_max_old_generation_size, false,
"Increase max size of the old space to 4 GB for x64 systems with" "Increase max size of the old space to 4 GB for x64 systems with"
"the physical memory bigger than 16 GB") "the physical memory bigger than 16 GB")
DEFINE_SIZE_T(initial_old_space_size, 0, "initial old space size (in Mbytes)") DEFINE_SIZE_T(initial_old_space_size, 0, "initial old space size (in Mbytes)")
DEFINE_BOOL(global_gc_scheduling, false,
"enable GC scheduling based on global memory")
DEFINE_BOOL(gc_global, false, "always perform global GCs") DEFINE_BOOL(gc_global, false, "always perform global GCs")
DEFINE_INT(random_gc_interval, 0, DEFINE_INT(random_gc_interval, 0,
"Collect garbage after random(0, X) allocations. It overrides " "Collect garbage after random(0, X) allocations. It overrides "
......
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
#include "src/heap/embedder-tracing.h" #include "src/heap/embedder-tracing.h"
#include "src/base/logging.h" #include "src/base/logging.h"
#include "src/heap/gc-tracer.h"
#include "src/objects/embedder-data-slot.h" #include "src/objects/embedder-data-slot.h"
#include "src/objects/js-objects-inl.h" #include "src/objects/js-objects-inl.h"
...@@ -31,7 +32,17 @@ void LocalEmbedderHeapTracer::TracePrologue( ...@@ -31,7 +32,17 @@ void LocalEmbedderHeapTracer::TracePrologue(
void LocalEmbedderHeapTracer::TraceEpilogue() { void LocalEmbedderHeapTracer::TraceEpilogue() {
if (!InUse()) return; if (!InUse()) return;
remote_tracer_->TraceEpilogue(); EmbedderHeapTracer::TraceSummary summary;
remote_tracer_->TraceEpilogue(&summary);
remote_stats_.allocated_size = summary.allocated_size;
// Force a check next time increased memory is reported. This allows for
// setting limits close to actual heap sizes.
remote_stats_.allocated_size_limit_for_check = 0;
constexpr double kMinReportingTimeMs = 0.5;
if (summary.time > kMinReportingTimeMs) {
isolate_->heap()->tracer()->RecordEmbedderSpeed(summary.allocated_size,
summary.time);
}
} }
void LocalEmbedderHeapTracer::EnterFinalPause() { void LocalEmbedderHeapTracer::EnterFinalPause() {
...@@ -100,5 +111,12 @@ void LocalEmbedderHeapTracer::ProcessingScope::AddWrapperInfoForTesting( ...@@ -100,5 +111,12 @@ void LocalEmbedderHeapTracer::ProcessingScope::AddWrapperInfoForTesting(
FlushWrapperCacheIfFull(); FlushWrapperCacheIfFull();
} }
void LocalEmbedderHeapTracer::StartIncrementalMarkingIfNeeded() {
Heap* heap = isolate_->heap();
heap->StartIncrementalMarkingIfAllocationLimitIsReached(
heap->GCFlagsForIncrementalMarking(),
kGCCallbackScheduleIdleGarbageCollection);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -76,7 +76,27 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final { ...@@ -76,7 +76,27 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
embedder_worklist_empty_ = is_empty; embedder_worklist_empty_ = is_empty;
} }
void IncreaseAllocatedSize(size_t bytes) {
remote_stats_.allocated_size += bytes;
remote_stats_.accumulated_allocated_size += bytes;
if (remote_stats_.allocated_size >
remote_stats_.allocated_size_limit_for_check) {
StartIncrementalMarkingIfNeeded();
remote_stats_.allocated_size_limit_for_check =
remote_stats_.allocated_size + kEmbedderAllocatedThreshold;
}
}
void StartIncrementalMarkingIfNeeded();
size_t allocated_size() const { return remote_stats_.allocated_size; }
size_t accumulated_allocated_size() const {
return remote_stats_.accumulated_allocated_size;
}
private: private:
static constexpr size_t kEmbedderAllocatedThreshold = 128 * KB;
Isolate* const isolate_; Isolate* const isolate_;
EmbedderHeapTracer* remote_tracer_ = nullptr; EmbedderHeapTracer* remote_tracer_ = nullptr;
...@@ -88,6 +108,19 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final { ...@@ -88,6 +108,19 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
// segments of potential embedder fields to move to the main thread. // segments of potential embedder fields to move to the main thread.
bool embedder_worklist_empty_ = false; bool embedder_worklist_empty_ = false;
struct RemoteStatistics {
// Allocated size of objects in bytes reported by the embedder. Updated via
// TraceSummary at the end of tracing and incrementally when the GC is not
// in progress.
size_t allocated_size = 0;
// Limit for |allocated_size_| in bytes to avoid checking for starting a GC
// on each increment.
size_t allocated_size_limit_for_check = 0;
// Totally accumulated bytes allocated by the embedder. Monotonically
// increasing value. Used to approximate allocation rate.
size_t accumulated_allocated_size = 0;
} remote_stats_;
friend class EmbedderStackStateScope; friend class EmbedderStackStateScope;
}; };
......
...@@ -191,6 +191,7 @@ void GCTracer::ResetForTesting() { ...@@ -191,6 +191,7 @@ void GCTracer::ResetForTesting() {
recorded_incremental_mark_compacts_.Reset(); recorded_incremental_mark_compacts_.Reset();
recorded_new_generation_allocations_.Reset(); recorded_new_generation_allocations_.Reset();
recorded_old_generation_allocations_.Reset(); recorded_old_generation_allocations_.Reset();
recorded_embedder_generation_allocations_.Reset();
recorded_context_disposal_times_.Reset(); recorded_context_disposal_times_.Reset();
recorded_survival_ratios_.Reset(); recorded_survival_ratios_.Reset();
start_counter_ = 0; start_counter_ = 0;
...@@ -221,7 +222,8 @@ void GCTracer::Start(GarbageCollector collector, ...@@ -221,7 +222,8 @@ void GCTracer::Start(GarbageCollector collector,
previous_ = current_; previous_ = current_;
double start_time = heap_->MonotonicallyIncreasingTimeInMs(); double start_time = heap_->MonotonicallyIncreasingTimeInMs();
SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(), SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
heap_->OldGenerationAllocationCounter()); heap_->OldGenerationAllocationCounter(),
heap_->EmbedderAllocationCounter());
switch (collector) { switch (collector) {
case SCAVENGER: case SCAVENGER:
...@@ -375,15 +377,16 @@ void GCTracer::Stop(GarbageCollector collector) { ...@@ -375,15 +377,16 @@ void GCTracer::Stop(GarbageCollector collector) {
} }
} }
void GCTracer::SampleAllocation(double current_ms, void GCTracer::SampleAllocation(double current_ms,
size_t new_space_counter_bytes, size_t new_space_counter_bytes,
size_t old_generation_counter_bytes) { size_t old_generation_counter_bytes,
size_t embedder_allocation_bytes) {
if (allocation_time_ms_ == 0) { if (allocation_time_ms_ == 0) {
// It is the first sample. // It is the first sample.
allocation_time_ms_ = current_ms; allocation_time_ms_ = current_ms;
new_space_allocation_counter_bytes_ = new_space_counter_bytes; new_space_allocation_counter_bytes_ = new_space_counter_bytes;
old_generation_allocation_counter_bytes_ = old_generation_counter_bytes; old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
embedder_allocation_counter_bytes_ = embedder_allocation_bytes;
return; return;
} }
// This assumes that counters are unsigned integers so that the subtraction // This assumes that counters are unsigned integers so that the subtraction
...@@ -392,6 +395,8 @@ void GCTracer::SampleAllocation(double current_ms, ...@@ -392,6 +395,8 @@ void GCTracer::SampleAllocation(double current_ms,
new_space_counter_bytes - new_space_allocation_counter_bytes_; new_space_counter_bytes - new_space_allocation_counter_bytes_;
size_t old_generation_allocated_bytes = size_t old_generation_allocated_bytes =
old_generation_counter_bytes - old_generation_allocation_counter_bytes_; old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
size_t embedder_allocated_bytes =
embedder_allocation_bytes - embedder_allocation_counter_bytes_;
double duration = current_ms - allocation_time_ms_; double duration = current_ms - allocation_time_ms_;
allocation_time_ms_ = current_ms; allocation_time_ms_ = current_ms;
new_space_allocation_counter_bytes_ = new_space_counter_bytes; new_space_allocation_counter_bytes_ = new_space_counter_bytes;
...@@ -400,9 +405,9 @@ void GCTracer::SampleAllocation(double current_ms, ...@@ -400,9 +405,9 @@ void GCTracer::SampleAllocation(double current_ms,
new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes; new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
old_generation_allocation_in_bytes_since_gc_ += old_generation_allocation_in_bytes_since_gc_ +=
old_generation_allocated_bytes; old_generation_allocated_bytes;
embedder_allocation_in_bytes_since_gc_ += embedder_allocated_bytes;
} }
void GCTracer::AddAllocation(double current_ms) { void GCTracer::AddAllocation(double current_ms) {
allocation_time_ms_ = current_ms; allocation_time_ms_ = current_ms;
if (allocation_duration_since_gc_ > 0) { if (allocation_duration_since_gc_ > 0) {
...@@ -412,10 +417,13 @@ void GCTracer::AddAllocation(double current_ms) { ...@@ -412,10 +417,13 @@ void GCTracer::AddAllocation(double current_ms) {
recorded_old_generation_allocations_.Push( recorded_old_generation_allocations_.Push(
MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_, MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
allocation_duration_since_gc_)); allocation_duration_since_gc_));
recorded_embedder_generation_allocations_.Push(MakeBytesAndDuration(
embedder_allocation_in_bytes_since_gc_, allocation_duration_since_gc_));
} }
allocation_duration_since_gc_ = 0; allocation_duration_since_gc_ = 0;
new_space_allocation_in_bytes_since_gc_ = 0; new_space_allocation_in_bytes_since_gc_ = 0;
old_generation_allocation_in_bytes_since_gc_ = 0; old_generation_allocation_in_bytes_since_gc_ = 0;
embedder_allocation_in_bytes_since_gc_ = 0;
} }
...@@ -881,6 +889,16 @@ void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) { ...@@ -881,6 +889,16 @@ void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
} }
} }
void GCTracer::RecordEmbedderSpeed(size_t bytes, double duration) {
if (duration == 0 || bytes == 0) return;
double current_speed = bytes / duration;
if (recorded_embedder_speed_ == 0.0) {
recorded_embedder_speed_ = current_speed;
} else {
recorded_embedder_speed_ = (recorded_embedder_speed_ + current_speed) / 2;
}
}
void GCTracer::RecordMutatorUtilization(double mark_compact_end_time, void GCTracer::RecordMutatorUtilization(double mark_compact_end_time,
double mark_compact_duration) { double mark_compact_duration) {
if (previous_mark_compact_end_time_ == 0) { if (previous_mark_compact_end_time_ == 0) {
...@@ -919,7 +937,6 @@ double GCTracer::CurrentMarkCompactMutatorUtilization() const { ...@@ -919,7 +937,6 @@ double GCTracer::CurrentMarkCompactMutatorUtilization() const {
} }
double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const { double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
const int kConservativeSpeedInBytesPerMillisecond = 128 * KB;
if (recorded_incremental_marking_speed_ != 0) { if (recorded_incremental_marking_speed_ != 0) {
return recorded_incremental_marking_speed_; return recorded_incremental_marking_speed_;
} }
...@@ -929,6 +946,13 @@ double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const { ...@@ -929,6 +946,13 @@ double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
return kConservativeSpeedInBytesPerMillisecond; return kConservativeSpeedInBytesPerMillisecond;
} }
double GCTracer::EmbedderSpeedInBytesPerMillisecond() const {
if (recorded_embedder_speed_ != 0.0) {
return recorded_embedder_speed_;
}
return kConservativeSpeedInBytesPerMillisecond;
}
double GCTracer::ScavengeSpeedInBytesPerMillisecond( double GCTracer::ScavengeSpeedInBytesPerMillisecond(
ScavengeSpeedMode mode) const { ScavengeSpeedMode mode) const {
if (mode == kForAllObjects) { if (mode == kForAllObjects) {
...@@ -975,6 +999,15 @@ double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() { ...@@ -975,6 +999,15 @@ double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
return combined_mark_compact_speed_cache_; return combined_mark_compact_speed_cache_;
} }
double GCTracer::CombineSpeedsInBytesPerMillisecond(double default_speed,
double optional_speed) {
constexpr double kMinimumSpeed = 0.5;
if (optional_speed < kMinimumSpeed) {
return default_speed;
}
return default_speed * optional_speed / (default_speed + optional_speed);
}
double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond( double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
double time_ms) const { double time_ms) const {
size_t bytes = new_space_allocation_in_bytes_since_gc_; size_t bytes = new_space_allocation_in_bytes_since_gc_;
...@@ -991,6 +1024,14 @@ double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond( ...@@ -991,6 +1024,14 @@ double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
MakeBytesAndDuration(bytes, durations), time_ms); MakeBytesAndDuration(bytes, durations), time_ms);
} }
double GCTracer::EmbedderAllocationThroughputInBytesPerMillisecond(
double time_ms) const {
size_t bytes = embedder_allocation_in_bytes_since_gc_;
double durations = allocation_duration_since_gc_;
return AverageSpeed(recorded_embedder_generation_allocations_,
MakeBytesAndDuration(bytes, durations), time_ms);
}
double GCTracer::AllocationThroughputInBytesPerMillisecond( double GCTracer::AllocationThroughputInBytesPerMillisecond(
double time_ms) const { double time_ms) const {
return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) + return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
...@@ -1007,6 +1048,12 @@ double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() ...@@ -1007,6 +1048,12 @@ double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
kThroughputTimeFrameMs); kThroughputTimeFrameMs);
} }
double GCTracer::CurrentEmbedderAllocationThroughputInBytesPerMillisecond()
const {
return EmbedderAllocationThroughputInBytesPerMillisecond(
kThroughputTimeFrameMs);
}
double GCTracer::ContextDisposalRateInMilliseconds() const { double GCTracer::ContextDisposalRateInMilliseconds() const {
if (recorded_context_disposal_times_.Count() < if (recorded_context_disposal_times_.Count() <
recorded_context_disposal_times_.kSize) recorded_context_disposal_times_.kSize)
......
...@@ -200,6 +200,10 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -200,6 +200,10 @@ class V8_EXPORT_PRIVATE GCTracer {
}; };
static const int kThroughputTimeFrameMs = 5000; static const int kThroughputTimeFrameMs = 5000;
static constexpr double kConservativeSpeedInBytesPerMillisecond = 128 * KB;
static double CombineSpeedsInBytesPerMillisecond(double default_speed,
double optional_speed);
static RuntimeCallCounterId RCSCounterFromScope(Scope::ScopeId id); static RuntimeCallCounterId RCSCounterFromScope(Scope::ScopeId id);
...@@ -217,7 +221,8 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -217,7 +221,8 @@ class V8_EXPORT_PRIVATE GCTracer {
// Sample and accumulate bytes allocated since the last GC. // Sample and accumulate bytes allocated since the last GC.
void SampleAllocation(double current_ms, size_t new_space_counter_bytes, void SampleAllocation(double current_ms, size_t new_space_counter_bytes,
size_t old_generation_counter_bytes); size_t old_generation_counter_bytes,
size_t embedder_allocation_bytes);
// Log the accumulated new space allocation bytes. // Log the accumulated new space allocation bytes.
void AddAllocation(double current_ms); void AddAllocation(double current_ms);
...@@ -232,9 +237,13 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -232,9 +237,13 @@ class V8_EXPORT_PRIVATE GCTracer {
void AddIncrementalMarkingStep(double duration, size_t bytes); void AddIncrementalMarkingStep(double duration, size_t bytes);
// Compute the average incremental marking speed in bytes/millisecond. // Compute the average incremental marking speed in bytes/millisecond.
// Returns 0 if no events have been recorded. // Returns a conservative value if no events have been recorded.
double IncrementalMarkingSpeedInBytesPerMillisecond() const; double IncrementalMarkingSpeedInBytesPerMillisecond() const;
// Compute the average embedder speed in bytes/millisecond.
// Returns a conservative value if no events have been recorded.
double EmbedderSpeedInBytesPerMillisecond() const;
// Compute the average scavenge speed in bytes/millisecond. // Compute the average scavenge speed in bytes/millisecond.
// Returns 0 if no events have been recorded. // Returns 0 if no events have been recorded.
double ScavengeSpeedInBytesPerMillisecond( double ScavengeSpeedInBytesPerMillisecond(
...@@ -268,6 +277,12 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -268,6 +277,12 @@ class V8_EXPORT_PRIVATE GCTracer {
double OldGenerationAllocationThroughputInBytesPerMillisecond( double OldGenerationAllocationThroughputInBytesPerMillisecond(
double time_ms = 0) const; double time_ms = 0) const;
// Allocation throughput in the embedder in bytes/millisecond in the
// last time_ms milliseconds. Reported through v8::EmbedderHeapTracer.
// Returns 0 if no allocation events have been recorded.
double EmbedderAllocationThroughputInBytesPerMillisecond(
double time_ms = 0) const;
// Allocation throughput in heap in bytes/millisecond in the last time_ms // Allocation throughput in heap in bytes/millisecond in the last time_ms
// milliseconds. // milliseconds.
// Returns 0 if no allocation events have been recorded. // Returns 0 if no allocation events have been recorded.
...@@ -283,6 +298,11 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -283,6 +298,11 @@ class V8_EXPORT_PRIVATE GCTracer {
// Returns 0 if no allocation events have been recorded. // Returns 0 if no allocation events have been recorded.
double CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() const; double CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() const;
// Allocation throughput in the embedder in bytes/milliseconds in the last
// kThroughputTimeFrameMs seconds. Reported through v8::EmbedderHeapTracer.
// Returns 0 if no allocation events have been recorded.
double CurrentEmbedderAllocationThroughputInBytesPerMillisecond() const;
// Computes the context disposal rate in milliseconds. It takes the time // Computes the context disposal rate in milliseconds. It takes the time
// frame of the first recorded context disposal to the current time and // frame of the first recorded context disposal to the current time and
// divides it by the number of recorded events. // divides it by the number of recorded events.
...@@ -323,6 +343,8 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -323,6 +343,8 @@ class V8_EXPORT_PRIVATE GCTracer {
void RecordGCPhasesHistograms(TimedHistogram* gc_timer); void RecordGCPhasesHistograms(TimedHistogram* gc_timer);
void RecordEmbedderSpeed(size_t bytes, double duration);
private: private:
FRIEND_TEST(GCTracer, AverageSpeed); FRIEND_TEST(GCTracer, AverageSpeed);
FRIEND_TEST(GCTracerTest, AllocationThroughput); FRIEND_TEST(GCTracerTest, AllocationThroughput);
...@@ -414,6 +436,8 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -414,6 +436,8 @@ class V8_EXPORT_PRIVATE GCTracer {
double recorded_incremental_marking_speed_; double recorded_incremental_marking_speed_;
double recorded_embedder_speed_ = 0.0;
// Incremental scopes carry more information than just the duration. The infos // Incremental scopes carry more information than just the duration. The infos
// here are merged back upon starting/stopping the GC tracer. // here are merged back upon starting/stopping the GC tracer.
IncrementalMarkingInfos IncrementalMarkingInfos
...@@ -424,11 +448,13 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -424,11 +448,13 @@ class V8_EXPORT_PRIVATE GCTracer {
double allocation_time_ms_; double allocation_time_ms_;
size_t new_space_allocation_counter_bytes_; size_t new_space_allocation_counter_bytes_;
size_t old_generation_allocation_counter_bytes_; size_t old_generation_allocation_counter_bytes_;
size_t embedder_allocation_counter_bytes_;
// Accumulated duration and allocated bytes since the last GC. // Accumulated duration and allocated bytes since the last GC.
double allocation_duration_since_gc_; double allocation_duration_since_gc_;
size_t new_space_allocation_in_bytes_since_gc_; size_t new_space_allocation_in_bytes_since_gc_;
size_t old_generation_allocation_in_bytes_since_gc_; size_t old_generation_allocation_in_bytes_since_gc_;
size_t embedder_allocation_in_bytes_since_gc_;
double combined_mark_compact_speed_cache_; double combined_mark_compact_speed_cache_;
...@@ -448,6 +474,7 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -448,6 +474,7 @@ class V8_EXPORT_PRIVATE GCTracer {
base::RingBuffer<BytesAndDuration> recorded_mark_compacts_; base::RingBuffer<BytesAndDuration> recorded_mark_compacts_;
base::RingBuffer<BytesAndDuration> recorded_new_generation_allocations_; base::RingBuffer<BytesAndDuration> recorded_new_generation_allocations_;
base::RingBuffer<BytesAndDuration> recorded_old_generation_allocations_; base::RingBuffer<BytesAndDuration> recorded_old_generation_allocations_;
base::RingBuffer<BytesAndDuration> recorded_embedder_generation_allocations_;
base::RingBuffer<double> recorded_context_disposal_times_; base::RingBuffer<double> recorded_context_disposal_times_;
base::RingBuffer<double> recorded_survival_ratios_; base::RingBuffer<double> recorded_survival_ratios_;
......
...@@ -49,17 +49,17 @@ namespace internal { ...@@ -49,17 +49,17 @@ namespace internal {
// F * (1 - MU / (R * (1 - MU))) = 1 // F * (1 - MU / (R * (1 - MU))) = 1
// F * (R * (1 - MU) - MU) / (R * (1 - MU)) = 1 // F * (R * (1 - MU) - MU) / (R * (1 - MU)) = 1
// F = R * (1 - MU) / (R * (1 - MU) - MU) // F = R * (1 - MU) / (R * (1 - MU) - MU)
double HeapController::GrowingFactor(double gc_speed, double mutator_speed, double MemoryController::GrowingFactor(double gc_speed, double mutator_speed,
double max_factor) { double max_factor) {
DCHECK_LE(min_growing_factor_, max_factor); DCHECK_LE(min_growing_factor_, max_factor);
DCHECK_GE(max_growing_factor_, max_factor); DCHECK_GE(max_growing_factor_, max_factor);
if (gc_speed == 0 || mutator_speed == 0) return max_factor; if (gc_speed == 0 || mutator_speed == 0) return max_factor;
const double speed_ratio = gc_speed / mutator_speed; const double speed_ratio = gc_speed / mutator_speed;
const double a = speed_ratio * (1 - kTargetMutatorUtilization); const double a = speed_ratio * (1 - target_mutator_utlization_);
const double b = const double b = speed_ratio * (1 - target_mutator_utlization_) -
speed_ratio * (1 - kTargetMutatorUtilization) - kTargetMutatorUtilization; target_mutator_utlization_;
// The factor is a / b, but we need to check for small b first. // The factor is a / b, but we need to check for small b first.
double factor = (a < b * max_factor) ? a / b : max_factor; double factor = (a < b * max_factor) ? a / b : max_factor;
...@@ -140,6 +140,31 @@ double HeapController::MaxGrowingFactor(size_t curr_max_size) { ...@@ -140,6 +140,31 @@ double HeapController::MaxGrowingFactor(size_t curr_max_size) {
return factor; return factor;
} }
double GlobalMemoryController::MaxGrowingFactor(size_t curr_max_size) {
constexpr double kMinSmallFactor = 1.3;
constexpr double kMaxSmallFactor = 2.0;
constexpr double kHighFactor = 4.0;
size_t max_size_in_mb = curr_max_size / MB;
max_size_in_mb = Max(max_size_in_mb, kMinSize);
// If we are on a device with lots of memory, we allow a high heap
// growing factor.
if (max_size_in_mb >= kMaxSize) {
return kHighFactor;
}
DCHECK_GE(max_size_in_mb, kMinSize);
DCHECK_LT(max_size_in_mb, kMaxSize);
// On smaller devices we linearly scale the factor: (X-A)/(B-A)*(D-C)+C
double factor = (max_size_in_mb - kMinSize) *
(kMaxSmallFactor - kMinSmallFactor) /
(kMaxSize - kMinSize) +
kMinSmallFactor;
return factor;
}
size_t HeapController::CalculateAllocationLimit( size_t HeapController::CalculateAllocationLimit(
size_t curr_size, size_t max_size, double gc_speed, double mutator_speed, size_t curr_size, size_t max_size, double gc_speed, double mutator_speed,
size_t new_space_capacity, Heap::HeapGrowingMode growing_mode) { size_t new_space_capacity, Heap::HeapGrowingMode growing_mode) {
...@@ -150,7 +175,25 @@ size_t HeapController::CalculateAllocationLimit( ...@@ -150,7 +175,25 @@ size_t HeapController::CalculateAllocationLimit(
Isolate::FromHeap(heap_)->PrintWithTimestamp( Isolate::FromHeap(heap_)->PrintWithTimestamp(
"[%s] factor %.1f based on mu=%.3f, speed_ratio=%.f " "[%s] factor %.1f based on mu=%.3f, speed_ratio=%.f "
"(gc=%.f, mutator=%.f)\n", "(gc=%.f, mutator=%.f)\n",
ControllerName(), factor, kTargetMutatorUtilization, ControllerName(), factor, target_mutator_utlization_,
gc_speed / mutator_speed, gc_speed, mutator_speed);
}
return CalculateAllocationLimitBase(curr_size, max_size, factor,
new_space_capacity, growing_mode);
}
size_t GlobalMemoryController::CalculateAllocationLimit(
size_t curr_size, size_t max_size, double gc_speed, double mutator_speed,
size_t new_space_capacity, Heap::HeapGrowingMode growing_mode) {
const double max_factor = MaxGrowingFactor(max_size);
const double factor = GrowingFactor(gc_speed, mutator_speed, max_factor);
if (FLAG_trace_gc_verbose) {
Isolate::FromHeap(heap_)->PrintWithTimestamp(
"[%s] factor %.1f based on mu=%.3f, speed_ratio=%.f "
"(gc=%.f, mutator=%.f)\n",
ControllerName(), factor, target_mutator_utlization_,
gc_speed / mutator_speed, gc_speed, mutator_speed); gc_speed / mutator_speed, gc_speed, mutator_speed);
} }
......
...@@ -15,31 +15,38 @@ namespace internal { ...@@ -15,31 +15,38 @@ namespace internal {
class V8_EXPORT_PRIVATE MemoryController { class V8_EXPORT_PRIVATE MemoryController {
public: public:
virtual ~MemoryController() = default;
// Computes the growing step when the limit increases. // Computes the growing step when the limit increases.
size_t MinimumAllocationLimitGrowingStep(Heap::HeapGrowingMode growing_mode); static size_t MinimumAllocationLimitGrowingStep(
Heap::HeapGrowingMode growing_mode);
virtual ~MemoryController() = default;
protected: protected:
MemoryController(Heap* heap, double min_growing_factor, MemoryController(Heap* heap, double min_growing_factor,
double max_growing_factor, double max_growing_factor,
double conservative_growing_factor) double conservative_growing_factor,
double target_mutator_utlization)
: heap_(heap), : heap_(heap),
min_growing_factor_(min_growing_factor), min_growing_factor_(min_growing_factor),
max_growing_factor_(max_growing_factor), max_growing_factor_(max_growing_factor),
conservative_growing_factor_(conservative_growing_factor) {} conservative_growing_factor_(conservative_growing_factor),
target_mutator_utlization_(target_mutator_utlization) {}
// Computes the allocation limit to trigger the next garbage collection. // Computes the allocation limit to trigger the next garbage collection.
size_t CalculateAllocationLimitBase(size_t curr_size, size_t max_size, size_t CalculateAllocationLimitBase(size_t curr_size, size_t max_size,
double factor, size_t additional_bytes, double factor, size_t additional_bytes,
Heap::HeapGrowingMode growing_mode); Heap::HeapGrowingMode growing_mode);
double GrowingFactor(double gc_speed, double mutator_speed,
double max_factor);
virtual const char* ControllerName() = 0; virtual const char* ControllerName() = 0;
Heap* const heap_; Heap* const heap_;
const double min_growing_factor_; const double min_growing_factor_;
const double max_growing_factor_; const double max_growing_factor_;
const double conservative_growing_factor_; const double conservative_growing_factor_;
const double target_mutator_utlization_;
}; };
class V8_EXPORT_PRIVATE HeapController : public MemoryController { class V8_EXPORT_PRIVATE HeapController : public MemoryController {
...@@ -47,9 +54,9 @@ class V8_EXPORT_PRIVATE HeapController : public MemoryController { ...@@ -47,9 +54,9 @@ class V8_EXPORT_PRIVATE HeapController : public MemoryController {
// Sizes are in MB. // Sizes are in MB.
static constexpr size_t kMinSize = 128 * Heap::kPointerMultiplier; static constexpr size_t kMinSize = 128 * Heap::kPointerMultiplier;
static constexpr size_t kMaxSize = 1024 * Heap::kPointerMultiplier; static constexpr size_t kMaxSize = 1024 * Heap::kPointerMultiplier;
static constexpr double kTargetMutatorUtilization = 0.97;
explicit HeapController(Heap* heap) : MemoryController(heap, 1.1, 4.0, 1.3) {} explicit HeapController(Heap* heap)
: MemoryController(heap, 1.1, 4.0, 1.3, 0.97) {}
size_t CalculateAllocationLimit(size_t curr_size, size_t max_size, size_t CalculateAllocationLimit(size_t curr_size, size_t max_size,
double gc_speed, double mutator_speed, double gc_speed, double mutator_speed,
...@@ -57,9 +64,6 @@ class V8_EXPORT_PRIVATE HeapController : public MemoryController { ...@@ -57,9 +64,6 @@ class V8_EXPORT_PRIVATE HeapController : public MemoryController {
Heap::HeapGrowingMode growing_mode); Heap::HeapGrowingMode growing_mode);
protected: protected:
double GrowingFactor(double gc_speed, double mutator_speed,
double max_factor);
double MaxGrowingFactor(size_t curr_max_size); double MaxGrowingFactor(size_t curr_max_size);
const char* ControllerName() override { return "HeapController"; } const char* ControllerName() override { return "HeapController"; }
...@@ -70,6 +74,26 @@ class V8_EXPORT_PRIVATE HeapController : public MemoryController { ...@@ -70,6 +74,26 @@ class V8_EXPORT_PRIVATE HeapController : public MemoryController {
FRIEND_TEST(HeapControllerTest, OldGenerationAllocationLimit); FRIEND_TEST(HeapControllerTest, OldGenerationAllocationLimit);
}; };
class V8_EXPORT_PRIVATE GlobalMemoryController : public MemoryController {
public:
// Sizes are in MB.
static constexpr size_t kMinSize = 128 * Heap::kPointerMultiplier;
static constexpr size_t kMaxSize = 1024 * Heap::kPointerMultiplier;
explicit GlobalMemoryController(Heap* heap)
: MemoryController(heap, 1.1, 4.0, 1.3, 0.97) {}
size_t CalculateAllocationLimit(size_t curr_size, size_t max_size,
double gc_speed, double mutator_speed,
size_t new_space_capacity,
Heap::HeapGrowingMode growing_mode);
protected:
double MaxGrowingFactor(size_t curr_max_size);
const char* ControllerName() override { return "GlobalMemoryController"; }
};
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -183,6 +183,7 @@ Heap::Heap() ...@@ -183,6 +183,7 @@ Heap::Heap()
Min(max_old_generation_size_, kMaxInitialOldGenerationSize)), Min(max_old_generation_size_, kMaxInitialOldGenerationSize)),
memory_pressure_level_(MemoryPressureLevel::kNone), memory_pressure_level_(MemoryPressureLevel::kNone),
old_generation_allocation_limit_(initial_old_generation_size_), old_generation_allocation_limit_(initial_old_generation_size_),
global_allocation_limit_(initial_old_generation_size_),
global_pretenuring_feedback_(kInitialFeedbackCapacity), global_pretenuring_feedback_(kInitialFeedbackCapacity),
current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags), current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags),
is_current_gc_forced_(false), is_current_gc_forced_(false),
...@@ -1526,9 +1527,12 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached( ...@@ -1526,9 +1527,12 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
if (reached_limit == IncrementalMarkingLimit::kSoftLimit) { if (reached_limit == IncrementalMarkingLimit::kSoftLimit) {
incremental_marking()->incremental_marking_job()->ScheduleTask(this); incremental_marking()->incremental_marking_job()->ScheduleTask(this);
} else if (reached_limit == IncrementalMarkingLimit::kHardLimit) { } else if (reached_limit == IncrementalMarkingLimit::kHardLimit) {
StartIncrementalMarking(gc_flags, StartIncrementalMarking(
GarbageCollectionReason::kAllocationLimit, gc_flags,
gc_callback_flags); OldGenerationSpaceAvailable() <= new_space_->Capacity()
? GarbageCollectionReason::kAllocationLimit
: GarbageCollectionReason::kGlobalAllocationLimit,
gc_callback_flags);
} }
} }
} }
...@@ -1929,6 +1933,24 @@ bool Heap::PerformGarbageCollection( ...@@ -1929,6 +1933,24 @@ bool Heap::PerformGarbageCollection(
double mutator_speed = double mutator_speed =
tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond(); tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond();
size_t old_gen_size = OldGenerationSizeOfObjects(); size_t old_gen_size = OldGenerationSizeOfObjects();
double global_mutator_speed;
double global_gc_speed;
size_t global_memory_size;
if (UseGlobalMemoryScheduling()) {
global_mutator_speed = GCTracer::CombineSpeedsInBytesPerMillisecond(
mutator_speed,
local_embedder_heap_tracer()
? tracer()
->CurrentEmbedderAllocationThroughputInBytesPerMillisecond()
: 0.0);
global_gc_speed = GCTracer::CombineSpeedsInBytesPerMillisecond(
gc_speed, local_embedder_heap_tracer()
? tracer()->EmbedderSpeedInBytesPerMillisecond()
: 0.0);
global_memory_size = GlobalSizeOfObjects();
}
if (collector == MARK_COMPACTOR) { if (collector == MARK_COMPACTOR) {
// Register the amount of external allocated memory. // Register the amount of external allocated memory.
isolate()->isolate_data()->external_memory_at_last_mark_compact_ = isolate()->isolate_data()->external_memory_at_last_mark_compact_ =
...@@ -1941,7 +1963,13 @@ bool Heap::PerformGarbageCollection( ...@@ -1941,7 +1963,13 @@ bool Heap::PerformGarbageCollection(
heap_controller()->CalculateAllocationLimit( heap_controller()->CalculateAllocationLimit(
old_gen_size, max_old_generation_size_, gc_speed, mutator_speed, old_gen_size, max_old_generation_size_, gc_speed, mutator_speed,
new_space()->Capacity(), CurrentHeapGrowingMode()); new_space()->Capacity(), CurrentHeapGrowingMode());
if (UseGlobalMemoryScheduling()) {
global_allocation_limit_ =
global_memory_controller()->CalculateAllocationLimit(
global_memory_size, max_global_memory_size_, global_gc_speed,
global_mutator_speed, new_space()->Capacity(),
CurrentHeapGrowingMode());
}
CheckIneffectiveMarkCompact( CheckIneffectiveMarkCompact(
old_gen_size, tracer()->AverageMarkCompactMutatorUtilization()); old_gen_size, tracer()->AverageMarkCompactMutatorUtilization());
} else if (HasLowYoungGenerationAllocationRate() && } else if (HasLowYoungGenerationAllocationRate() &&
...@@ -1952,6 +1980,16 @@ bool Heap::PerformGarbageCollection( ...@@ -1952,6 +1980,16 @@ bool Heap::PerformGarbageCollection(
if (new_limit < old_generation_allocation_limit_) { if (new_limit < old_generation_allocation_limit_) {
old_generation_allocation_limit_ = new_limit; old_generation_allocation_limit_ = new_limit;
} }
if (UseGlobalMemoryScheduling()) {
const size_t new_global_limit =
global_memory_controller()->CalculateAllocationLimit(
global_memory_size, max_global_memory_size_, global_gc_speed,
global_mutator_speed, new_space()->Capacity(),
CurrentHeapGrowingMode());
if (new_global_limit < global_allocation_limit_) {
global_allocation_limit_ = new_global_limit;
}
}
} }
{ {
...@@ -2608,18 +2646,29 @@ void Heap::UnregisterArrayBuffer(JSArrayBuffer buffer) { ...@@ -2608,18 +2646,29 @@ void Heap::UnregisterArrayBuffer(JSArrayBuffer buffer) {
void Heap::ConfigureInitialOldGenerationSize() { void Heap::ConfigureInitialOldGenerationSize() {
if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) { if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
const size_t new_limit = const size_t minimum_growing_step =
Max(OldGenerationSizeOfObjects() + MemoryController::MinimumAllocationLimitGrowingStep(
heap_controller()->MinimumAllocationLimitGrowingStep( CurrentHeapGrowingMode());
CurrentHeapGrowingMode()), const size_t new_old_generation_allocation_limit =
Max(OldGenerationSizeOfObjects() + minimum_growing_step,
static_cast<size_t>( static_cast<size_t>(
static_cast<double>(old_generation_allocation_limit_) * static_cast<double>(old_generation_allocation_limit_) *
(tracer()->AverageSurvivalRatio() / 100))); (tracer()->AverageSurvivalRatio() / 100)));
if (new_limit < old_generation_allocation_limit_) { if (new_old_generation_allocation_limit <
old_generation_allocation_limit_ = new_limit; old_generation_allocation_limit_) {
old_generation_allocation_limit_ = new_old_generation_allocation_limit;
} else { } else {
old_generation_size_configured_ = true; old_generation_size_configured_ = true;
} }
if (UseGlobalMemoryScheduling()) {
const size_t new_global_memory_limit = Max(
GlobalSizeOfObjects() + minimum_growing_step,
static_cast<size_t>(static_cast<double>(global_allocation_limit_) *
(tracer()->AverageSurvivalRatio() / 100)));
if (new_global_memory_limit < global_allocation_limit_) {
global_allocation_limit_ = new_global_memory_limit;
}
}
} }
} }
...@@ -3381,7 +3430,8 @@ bool Heap::IdleNotification(double deadline_in_seconds) { ...@@ -3381,7 +3430,8 @@ bool Heap::IdleNotification(double deadline_in_seconds) {
double idle_time_in_ms = deadline_in_ms - start_ms; double idle_time_in_ms = deadline_in_ms - start_ms;
tracer()->SampleAllocation(start_ms, NewSpaceAllocationCounter(), tracer()->SampleAllocation(start_ms, NewSpaceAllocationCounter(),
OldGenerationAllocationCounter()); OldGenerationAllocationCounter(),
EmbedderAllocationCounter());
GCIdleTimeHeapState heap_state = ComputeHeapState(); GCIdleTimeHeapState heap_state = ComputeHeapState();
...@@ -3634,6 +3684,8 @@ const char* Heap::GarbageCollectionReasonToString( ...@@ -3634,6 +3684,8 @@ const char* Heap::GarbageCollectionReasonToString(
return "testing"; return "testing";
case GarbageCollectionReason::kExternalFinalize: case GarbageCollectionReason::kExternalFinalize:
return "external finalize"; return "external finalize";
case GarbageCollectionReason::kGlobalAllocationLimit:
return "global allocation limit";
case GarbageCollectionReason::kUnknown: case GarbageCollectionReason::kUnknown:
return "unknown"; return "unknown";
} }
...@@ -4372,6 +4424,15 @@ size_t Heap::OldGenerationSizeOfObjects() { ...@@ -4372,6 +4424,15 @@ size_t Heap::OldGenerationSizeOfObjects() {
return total + lo_space_->SizeOfObjects(); return total + lo_space_->SizeOfObjects();
} }
size_t Heap::GlobalSizeOfObjects() {
const size_t on_heap_size = OldGenerationSizeOfObjects();
const size_t embedder_size =
local_embedder_heap_tracer()
? local_embedder_heap_tracer()->allocated_size()
: 0;
return on_heap_size + embedder_size;
}
uint64_t Heap::PromotedExternalMemorySize() { uint64_t Heap::PromotedExternalMemorySize() {
IsolateData* isolate_data = isolate()->isolate_data(); IsolateData* isolate_data = isolate()->isolate_data();
if (isolate_data->external_memory_ <= if (isolate_data->external_memory_ <=
...@@ -4431,6 +4492,14 @@ Heap::HeapGrowingMode Heap::CurrentHeapGrowingMode() { ...@@ -4431,6 +4492,14 @@ Heap::HeapGrowingMode Heap::CurrentHeapGrowingMode() {
return Heap::HeapGrowingMode::kDefault; return Heap::HeapGrowingMode::kDefault;
} }
size_t Heap::GlobalMemoryAvailable() {
return UseGlobalMemoryScheduling()
? GlobalSizeOfObjects() < global_allocation_limit_
? global_allocation_limit_ - GlobalSizeOfObjects()
: 0
: 1;
}
// This function returns either kNoLimit, kSoftLimit, or kHardLimit. // This function returns either kNoLimit, kSoftLimit, or kHardLimit.
// The kNoLimit means that either incremental marking is disabled or it is too // The kNoLimit means that either incremental marking is disabled or it is too
// early to start incremental marking. // early to start incremental marking.
...@@ -4491,8 +4560,10 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() { ...@@ -4491,8 +4560,10 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
} }
size_t old_generation_space_available = OldGenerationSpaceAvailable(); size_t old_generation_space_available = OldGenerationSpaceAvailable();
const size_t global_memory_available = GlobalMemoryAvailable();
if (old_generation_space_available > new_space_->Capacity()) { if (old_generation_space_available > new_space_->Capacity() &&
(global_memory_available > 0)) {
return IncrementalMarkingLimit::kNoLimit; return IncrementalMarkingLimit::kNoLimit;
} }
if (ShouldOptimizeForMemoryUsage()) { if (ShouldOptimizeForMemoryUsage()) {
...@@ -4504,6 +4575,9 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() { ...@@ -4504,6 +4575,9 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
if (old_generation_space_available == 0) { if (old_generation_space_available == 0) {
return IncrementalMarkingLimit::kHardLimit; return IncrementalMarkingLimit::kHardLimit;
} }
if (global_memory_available == 0) {
return IncrementalMarkingLimit::kHardLimit;
}
return IncrementalMarkingLimit::kSoftLimit; return IncrementalMarkingLimit::kSoftLimit;
} }
...@@ -4657,6 +4731,7 @@ void Heap::SetUp() { ...@@ -4657,6 +4731,7 @@ void Heap::SetUp() {
store_buffer_.reset(new StoreBuffer(this)); store_buffer_.reset(new StoreBuffer(this));
heap_controller_.reset(new HeapController(this)); heap_controller_.reset(new HeapController(this));
global_memory_controller_.reset(new GlobalMemoryController(this));
mark_compact_collector_.reset(new MarkCompactCollector(this)); mark_compact_collector_.reset(new MarkCompactCollector(this));
...@@ -4934,6 +5009,7 @@ void Heap::TearDown() { ...@@ -4934,6 +5009,7 @@ void Heap::TearDown() {
} }
heap_controller_.reset(); heap_controller_.reset();
global_memory_controller_.reset();
if (mark_compact_collector_) { if (mark_compact_collector_) {
mark_compact_collector_->TearDown(); mark_compact_collector_->TearDown();
...@@ -5781,6 +5857,12 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) { ...@@ -5781,6 +5857,12 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) {
UNREACHABLE(); UNREACHABLE();
} }
size_t Heap::EmbedderAllocationCounter() const {
return local_embedder_heap_tracer()
? local_embedder_heap_tracer()->accumulated_allocated_size()
: 0;
}
void Heap::CreateObjectStats() { void Heap::CreateObjectStats() {
if (V8_LIKELY(!TracingFlags::is_gc_stats_enabled())) return; if (V8_LIKELY(!TracingFlags::is_gc_stats_enabled())) return;
if (!live_object_stats_) { if (!live_object_stats_) {
......
...@@ -62,6 +62,7 @@ class ConcurrentMarking; ...@@ -62,6 +62,7 @@ class ConcurrentMarking;
class GCIdleTimeHandler; class GCIdleTimeHandler;
class GCIdleTimeHeapState; class GCIdleTimeHeapState;
class GCTracer; class GCTracer;
class GlobalMemoryController;
class HeapController; class HeapController;
class HeapObjectAllocationTracker; class HeapObjectAllocationTracker;
class HeapObjectsFilter; class HeapObjectsFilter;
...@@ -129,7 +130,8 @@ enum class GarbageCollectionReason { ...@@ -129,7 +130,8 @@ enum class GarbageCollectionReason {
kSamplingProfiler = 19, kSamplingProfiler = 19,
kSnapshotCreator = 20, kSnapshotCreator = 20,
kTesting = 21, kTesting = 21,
kExternalFinalize = 22 kExternalFinalize = 22,
kGlobalAllocationLimit = 23,
// If you add new items here, then update the incremental_marking_reason, // If you add new items here, then update the incremental_marking_reason,
// mark_compact_reason, and scavenge_reason counters in counters.h. // mark_compact_reason, and scavenge_reason counters in counters.h.
// Also update src/tools/metrics/histograms/histograms.xml in chromium. // Also update src/tools/metrics/histograms/histograms.xml in chromium.
...@@ -1147,6 +1149,8 @@ class Heap { ...@@ -1147,6 +1149,8 @@ class Heap {
PromotedSinceLastGC(); PromotedSinceLastGC();
} }
size_t EmbedderAllocationCounter() const;
// This should be used only for testing. // This should be used only for testing.
void set_old_generation_allocation_counter_at_last_gc(size_t new_value) { void set_old_generation_allocation_counter_at_last_gc(size_t new_value) {
old_generation_allocation_counter_at_last_gc_ = new_value; old_generation_allocation_counter_at_last_gc_ = new_value;
...@@ -1178,6 +1182,8 @@ class Heap { ...@@ -1178,6 +1182,8 @@ class Heap {
// Excludes external memory held by those objects. // Excludes external memory held by those objects.
V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects(); V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects();
V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects();
// =========================================================================== // ===========================================================================
// Prologue/epilogue callback methods.======================================== // Prologue/epilogue callback methods.========================================
// =========================================================================== // ===========================================================================
...@@ -1687,6 +1693,9 @@ class Heap { ...@@ -1687,6 +1693,9 @@ class Heap {
// =========================================================================== // ===========================================================================
HeapController* heap_controller() { return heap_controller_.get(); } HeapController* heap_controller() { return heap_controller_.get(); }
GlobalMemoryController* global_memory_controller() const {
return global_memory_controller_.get();
}
MemoryReducer* memory_reducer() { return memory_reducer_.get(); } MemoryReducer* memory_reducer() { return memory_reducer_.get(); }
// For some webpages RAIL mode does not switch from PERFORMANCE_LOAD. // For some webpages RAIL mode does not switch from PERFORMANCE_LOAD.
...@@ -1714,6 +1723,12 @@ class Heap { ...@@ -1714,6 +1723,12 @@ class Heap {
enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit }; enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit };
IncrementalMarkingLimit IncrementalMarkingLimitReached(); IncrementalMarkingLimit IncrementalMarkingLimitReached();
bool UseGlobalMemoryScheduling() const {
return FLAG_global_gc_scheduling && local_embedder_heap_tracer();
}
size_t GlobalMemoryAvailable();
// =========================================================================== // ===========================================================================
// Idle notification. ======================================================== // Idle notification. ========================================================
// =========================================================================== // ===========================================================================
...@@ -1807,6 +1822,11 @@ class Heap { ...@@ -1807,6 +1822,11 @@ class Heap {
size_t max_semi_space_size_ = 8 * (kSystemPointerSize / 4) * MB; size_t max_semi_space_size_ = 8 * (kSystemPointerSize / 4) * MB;
size_t initial_semispace_size_ = kMinSemiSpaceSizeInKB * KB; size_t initial_semispace_size_ = kMinSemiSpaceSizeInKB * KB;
size_t max_old_generation_size_ = 700ul * (kSystemPointerSize / 4) * MB; size_t max_old_generation_size_ = 700ul * (kSystemPointerSize / 4) * MB;
// TODO(mlippautz): Clarify whether this should be take some embedder
// configurable limit into account.
size_t max_global_memory_size_ =
Min(static_cast<uint64_t>(std::numeric_limits<size_t>::max()),
static_cast<uint64_t>(max_old_generation_size_) * 2);
size_t initial_max_old_generation_size_; size_t initial_max_old_generation_size_;
size_t initial_max_old_generation_size_threshold_; size_t initial_max_old_generation_size_threshold_;
size_t initial_old_generation_size_; size_t initial_old_generation_size_;
...@@ -1915,6 +1935,7 @@ class Heap { ...@@ -1915,6 +1935,7 @@ class Heap {
// which collector to invoke, before expanding a paged space in the old // which collector to invoke, before expanding a paged space in the old
// generation and on every allocation in large object space. // generation and on every allocation in large object space.
size_t old_generation_allocation_limit_; size_t old_generation_allocation_limit_;
size_t global_allocation_limit_;
// Indicates that inline bump-pointer allocation has been globally disabled // Indicates that inline bump-pointer allocation has been globally disabled
// for all spaces. This is used to disable allocations in generated code. // for all spaces. This is used to disable allocations in generated code.
...@@ -1965,6 +1986,7 @@ class Heap { ...@@ -1965,6 +1986,7 @@ class Heap {
std::unique_ptr<MemoryAllocator> memory_allocator_; std::unique_ptr<MemoryAllocator> memory_allocator_;
std::unique_ptr<StoreBuffer> store_buffer_; std::unique_ptr<StoreBuffer> store_buffer_;
std::unique_ptr<HeapController> heap_controller_; std::unique_ptr<HeapController> heap_controller_;
std::unique_ptr<GlobalMemoryController> global_memory_controller_;
std::unique_ptr<IncrementalMarking> incremental_marking_; std::unique_ptr<IncrementalMarking> incremental_marking_;
std::unique_ptr<ConcurrentMarking> concurrent_marking_; std::unique_ptr<ConcurrentMarking> concurrent_marking_;
std::unique_ptr<GCIdleTimeHandler> gc_idle_time_handler_; std::unique_ptr<GCIdleTimeHandler> gc_idle_time_handler_;
...@@ -2064,6 +2086,7 @@ class Heap { ...@@ -2064,6 +2086,7 @@ class Heap {
friend class ConcurrentMarking; friend class ConcurrentMarking;
friend class GCCallbacksScope; friend class GCCallbacksScope;
friend class GCTracer; friend class GCTracer;
friend class GlobalMemoryController;
friend class HeapController; friend class HeapController;
friend class MemoryController; friend class MemoryController;
friend class HeapIterator; friend class HeapIterator;
......
...@@ -39,7 +39,8 @@ void MemoryReducer::TimerTask::RunInternal() { ...@@ -39,7 +39,8 @@ void MemoryReducer::TimerTask::RunInternal() {
Event event; Event event;
double time_ms = heap->MonotonicallyIncreasingTimeInMs(); double time_ms = heap->MonotonicallyIncreasingTimeInMs();
heap->tracer()->SampleAllocation(time_ms, heap->NewSpaceAllocationCounter(), heap->tracer()->SampleAllocation(time_ms, heap->NewSpaceAllocationCounter(),
heap->OldGenerationAllocationCounter()); heap->OldGenerationAllocationCounter(),
heap->EmbedderAllocationCounter());
bool low_allocation_rate = heap->HasLowAllocationRate(); bool low_allocation_rate = heap->HasLowAllocationRate();
bool optimize_for_memory = heap->ShouldOptimizeForMemoryUsage(); bool optimize_for_memory = heap->ShouldOptimizeForMemoryUsage();
if (FLAG_trace_gc_verbose) { if (FLAG_trace_gc_verbose) {
......
...@@ -17,9 +17,9 @@ namespace internal { ...@@ -17,9 +17,9 @@ namespace internal {
HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6) \ HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6) \
HR(errors_thrown_per_context, V8.ErrorsThrownPerContext, 0, 200, 20) \ HR(errors_thrown_per_context, V8.ErrorsThrownPerContext, 0, 200, 20) \
HR(debug_feature_usage, V8.DebugFeatureUsage, 1, 7, 7) \ HR(debug_feature_usage, V8.DebugFeatureUsage, 1, 7, 7) \
HR(incremental_marking_reason, V8.GCIncrementalMarkingReason, 0, 21, 22) \ HR(incremental_marking_reason, V8.GCIncrementalMarkingReason, 0, 22, 23) \
HR(incremental_marking_sum, V8.GCIncrementalMarkingSum, 0, 10000, 101) \ HR(incremental_marking_sum, V8.GCIncrementalMarkingSum, 0, 10000, 101) \
HR(mark_compact_reason, V8.GCMarkCompactReason, 0, 21, 22) \ HR(mark_compact_reason, V8.GCMarkCompactReason, 0, 22, 23) \
HR(gc_finalize_clear, V8.GCFinalizeMC.Clear, 0, 10000, 101) \ HR(gc_finalize_clear, V8.GCFinalizeMC.Clear, 0, 10000, 101) \
HR(gc_finalize_epilogue, V8.GCFinalizeMC.Epilogue, 0, 10000, 101) \ HR(gc_finalize_epilogue, V8.GCFinalizeMC.Epilogue, 0, 10000, 101) \
HR(gc_finalize_evacuate, V8.GCFinalizeMC.Evacuate, 0, 10000, 101) \ HR(gc_finalize_evacuate, V8.GCFinalizeMC.Evacuate, 0, 10000, 101) \
...@@ -34,7 +34,7 @@ namespace internal { ...@@ -34,7 +34,7 @@ namespace internal {
/* Range and bucket matches BlinkGC.MainThreadMarkingThroughput. */ \ /* Range and bucket matches BlinkGC.MainThreadMarkingThroughput. */ \
HR(gc_main_thread_marking_throughput, V8.GCMainThreadMarkingThroughput, 0, \ HR(gc_main_thread_marking_throughput, V8.GCMainThreadMarkingThroughput, 0, \
100000, 50) \ 100000, 50) \
HR(scavenge_reason, V8.GCScavengeReason, 0, 21, 22) \ HR(scavenge_reason, V8.GCScavengeReason, 0, 22, 23) \
HR(young_generation_handling, V8.GCYoungGenerationHandling, 0, 2, 3) \ HR(young_generation_handling, V8.GCYoungGenerationHandling, 0, 2, 3) \
/* Asm/Wasm. */ \ /* Asm/Wasm. */ \
HR(wasm_functions_per_asm_module, V8.WasmFunctionsPerModule.asm, 1, 1000000, \ HR(wasm_functions_per_asm_module, V8.WasmFunctionsPerModule.asm, 1, 1000000, \
......
...@@ -32,7 +32,7 @@ LocalEmbedderHeapTracer::WrapperInfo CreateWrapperInfo() { ...@@ -32,7 +32,7 @@ LocalEmbedderHeapTracer::WrapperInfo CreateWrapperInfo() {
class MockEmbedderHeapTracer : public EmbedderHeapTracer { class MockEmbedderHeapTracer : public EmbedderHeapTracer {
public: public:
MOCK_METHOD1(TracePrologue, void(EmbedderHeapTracer::TraceFlags)); MOCK_METHOD1(TracePrologue, void(EmbedderHeapTracer::TraceFlags));
MOCK_METHOD0(TraceEpilogue, void()); MOCK_METHOD1(TraceEpilogue, void(EmbedderHeapTracer::TraceSummary*));
MOCK_METHOD1(EnterFinalPause, void(EmbedderHeapTracer::EmbedderStackState)); MOCK_METHOD1(EnterFinalPause, void(EmbedderHeapTracer::EmbedderStackState));
MOCK_METHOD0(IsTracingDone, bool()); MOCK_METHOD0(IsTracingDone, bool());
MOCK_METHOD1(RegisterV8References, MOCK_METHOD1(RegisterV8References,
...@@ -80,7 +80,7 @@ TEST(LocalEmbedderHeapTracer, TraceEpilogueForwards) { ...@@ -80,7 +80,7 @@ TEST(LocalEmbedderHeapTracer, TraceEpilogueForwards) {
StrictMock<MockEmbedderHeapTracer> remote_tracer; StrictMock<MockEmbedderHeapTracer> remote_tracer;
LocalEmbedderHeapTracer local_tracer(nullptr); LocalEmbedderHeapTracer local_tracer(nullptr);
local_tracer.SetRemoteTracer(&remote_tracer); local_tracer.SetRemoteTracer(&remote_tracer);
EXPECT_CALL(remote_tracer, TraceEpilogue()); EXPECT_CALL(remote_tracer, TraceEpilogue(_));
local_tracer.TraceEpilogue(); local_tracer.TraceEpilogue();
} }
......
...@@ -53,11 +53,13 @@ TEST(GCTracer, AverageSpeed) { ...@@ -53,11 +53,13 @@ TEST(GCTracer, AverageSpeed) {
namespace { namespace {
constexpr size_t kNoGlobalMemory = 0;
void SampleAndAddAllocaton(v8::internal::GCTracer* tracer, double time_ms, void SampleAndAddAllocaton(v8::internal::GCTracer* tracer, double time_ms,
size_t new_space_counter_bytes, size_t new_space_counter_bytes,
size_t old_generation_counter_bytes) { size_t old_generation_counter_bytes) {
tracer->SampleAllocation(time_ms, new_space_counter_bytes, tracer->SampleAllocation(time_ms, new_space_counter_bytes,
old_generation_counter_bytes); old_generation_counter_bytes, kNoGlobalMemory);
tracer->AddAllocation(time_ms); tracer->AddAllocation(time_ms);
} }
...@@ -70,7 +72,7 @@ TEST_F(GCTracerTest, AllocationThroughput) { ...@@ -70,7 +72,7 @@ TEST_F(GCTracerTest, AllocationThroughput) {
int time1 = 100; int time1 = 100;
size_t counter1 = 1000; size_t counter1 = 1000;
// First sample creates baseline but is not part of the recorded samples. // First sample creates baseline but is not part of the recorded samples.
tracer->SampleAllocation(time1, counter1, counter1); tracer->SampleAllocation(time1, counter1, counter1, kNoGlobalMemory);
SampleAndAddAllocaton(tracer, time1, counter1, counter1); SampleAndAddAllocaton(tracer, time1, counter1, counter1);
int time2 = 200; int time2 = 200;
size_t counter2 = 2000; size_t counter2 = 2000;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment