Commit cbf72529 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[api,heap] Allow decrementing allocated embedder size

Allow the embedder to decrement its allocated bytes count:
- The decrement will be applied to the used bytes value.
- The decrement is ignored for the total allocated bytes.

Bug: chromium:948807
Change-Id: I609ccf81017b693e0db13b499cbf8967f5f8a2c7
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1631428
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61982}
parent 2e327e90
...@@ -7230,12 +7230,13 @@ class V8_EXPORT EmbedderHeapTracer { ...@@ -7230,12 +7230,13 @@ class V8_EXPORT EmbedderHeapTracer {
void GarbageCollectionForTesting(EmbedderStackState stack_state); void GarbageCollectionForTesting(EmbedderStackState stack_state);
/* /*
* Called by the embedder to signal newly allocated memory. Not bound to * Called by the embedder to signal newly allocated or freed memory. Not bound
* tracing phases. Embedders should trade off when increments are reported as * to tracing phases. Embedders should trade off when increments are reported
* V8 may consult global heuristics on whether to trigger garbage collection * as V8 may consult global heuristics on whether to trigger garbage
* on this change. * collection on this change.
*/ */
void IncreaseAllocatedSize(size_t bytes); void IncreaseAllocatedSize(size_t bytes);
void DecreaseAllocatedSize(size_t bytes);
/* /*
* Returns the v8::Isolate this tracer is attached too and |nullptr| if it * Returns the v8::Isolate this tracer is attached too and |nullptr| if it
......
...@@ -10151,6 +10151,17 @@ void EmbedderHeapTracer::IncreaseAllocatedSize(size_t bytes) { ...@@ -10151,6 +10151,17 @@ void EmbedderHeapTracer::IncreaseAllocatedSize(size_t bytes) {
} }
} }
void EmbedderHeapTracer::DecreaseAllocatedSize(size_t bytes) {
if (isolate_) {
i::LocalEmbedderHeapTracer* const tracer =
reinterpret_cast<i::Isolate*>(isolate_)
->heap()
->local_embedder_heap_tracer();
DCHECK_NOT_NULL(tracer);
tracer->DecreaseAllocatedSize(bytes);
}
}
void EmbedderHeapTracer::RegisterEmbedderReference( void EmbedderHeapTracer::RegisterEmbedderReference(
const TracedGlobal<v8::Value>& ref) { const TracedGlobal<v8::Value>& ref) {
if (ref.IsEmpty()) return; if (ref.IsEmpty()) return;
......
...@@ -34,7 +34,7 @@ void LocalEmbedderHeapTracer::TraceEpilogue() { ...@@ -34,7 +34,7 @@ void LocalEmbedderHeapTracer::TraceEpilogue() {
EmbedderHeapTracer::TraceSummary summary; EmbedderHeapTracer::TraceSummary summary;
remote_tracer_->TraceEpilogue(&summary); remote_tracer_->TraceEpilogue(&summary);
remote_stats_.allocated_size = summary.allocated_size; remote_stats_.used_size = summary.allocated_size;
// Force a check next time increased memory is reported. This allows for // Force a check next time increased memory is reported. This allows for
// setting limits close to actual heap sizes. // setting limits close to actual heap sizes.
remote_stats_.allocated_size_limit_for_check = 0; remote_stats_.allocated_size_limit_for_check = 0;
......
...@@ -77,8 +77,8 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final { ...@@ -77,8 +77,8 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
} }
void IncreaseAllocatedSize(size_t bytes) { void IncreaseAllocatedSize(size_t bytes) {
remote_stats_.used_size += bytes;
remote_stats_.allocated_size += bytes; remote_stats_.allocated_size += bytes;
remote_stats_.accumulated_allocated_size += bytes;
if (remote_stats_.allocated_size > if (remote_stats_.allocated_size >
remote_stats_.allocated_size_limit_for_check) { remote_stats_.allocated_size_limit_for_check) {
StartIncrementalMarkingIfNeeded(); StartIncrementalMarkingIfNeeded();
...@@ -87,12 +87,15 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final { ...@@ -87,12 +87,15 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
} }
} }
void DecreaseAllocatedSize(size_t bytes) {
DCHECK_GE(remote_stats_.used_size, bytes);
remote_stats_.used_size -= bytes;
}
void StartIncrementalMarkingIfNeeded(); void StartIncrementalMarkingIfNeeded();
size_t used_size() const { return remote_stats_.used_size; }
size_t allocated_size() const { return remote_stats_.allocated_size; } size_t allocated_size() const { return remote_stats_.allocated_size; }
size_t accumulated_allocated_size() const {
return remote_stats_.accumulated_allocated_size;
}
private: private:
static constexpr size_t kEmbedderAllocatedThreshold = 128 * KB; static constexpr size_t kEmbedderAllocatedThreshold = 128 * KB;
...@@ -109,16 +112,16 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final { ...@@ -109,16 +112,16 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
bool embedder_worklist_empty_ = false; bool embedder_worklist_empty_ = false;
struct RemoteStatistics { struct RemoteStatistics {
// Allocated size of objects in bytes reported by the embedder. Updated via // Used size of objects in bytes reported by the embedder. Updated via
// TraceSummary at the end of tracing and incrementally when the GC is not // TraceSummary at the end of tracing and incrementally when the GC is not
// in progress. // in progress.
size_t used_size = 0;
// Totally bytes allocated by the embedder. Monotonically
// increasing value. Used to approximate allocation rate.
size_t allocated_size = 0; size_t allocated_size = 0;
// Limit for |allocated_size_| in bytes to avoid checking for starting a GC // Limit for |allocated_size| in bytes to avoid checking for starting a GC
// on each increment. // on each increment.
size_t allocated_size_limit_for_check = 0; size_t allocated_size_limit_for_check = 0;
// Totally accumulated bytes allocated by the embedder. Monotonically
// increasing value. Used to approximate allocation rate.
size_t accumulated_allocated_size = 0;
} remote_stats_; } remote_stats_;
friend class EmbedderStackStateScope; friend class EmbedderStackStateScope;
......
...@@ -4454,9 +4454,8 @@ size_t Heap::OldGenerationSizeOfObjects() { ...@@ -4454,9 +4454,8 @@ size_t Heap::OldGenerationSizeOfObjects() {
size_t Heap::GlobalSizeOfObjects() { size_t Heap::GlobalSizeOfObjects() {
const size_t on_heap_size = OldGenerationSizeOfObjects(); const size_t on_heap_size = OldGenerationSizeOfObjects();
const size_t embedder_size = const size_t embedder_size = local_embedder_heap_tracer()
local_embedder_heap_tracer() ? local_embedder_heap_tracer()->used_size()
? local_embedder_heap_tracer()->allocated_size()
: 0; : 0;
return on_heap_size + embedder_size; return on_heap_size + embedder_size;
} }
...@@ -5879,7 +5878,7 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) { ...@@ -5879,7 +5878,7 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) {
size_t Heap::EmbedderAllocationCounter() const { size_t Heap::EmbedderAllocationCounter() const {
return local_embedder_heap_tracer() return local_embedder_heap_tracer()
? local_embedder_heap_tracer()->accumulated_allocated_size() ? local_embedder_heap_tracer()->allocated_size()
: 0; : 0;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment