Commit 0ebe286f authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Turn backing_store_bytes_ into uint64_t

The counter as size_t can legitimately overflow on 32-bit systems, since
decreasing the counters is performed after all backing stores were
freed on a background thread. Before sweeping is finished a new backing
store could already be allocated which then leads to the overflow.

Bug: v8:11788, chromium:1211437
Change-Id: Id9f3e58b0e84e831fe47109f7deb3a05ae7e489c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2922242
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74836}
parent 11f331de
......@@ -8530,7 +8530,12 @@ void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
heap_statistics->malloced_memory_ =
isolate->allocator()->GetCurrentMemoryUsage() +
isolate->string_table()->GetCurrentMemoryUsage();
heap_statistics->external_memory_ = isolate->heap()->backing_store_bytes();
// On 32-bit systems backing_store_bytes() might overflow size_t temporarily
// due to concurrent array buffer sweeping.
heap_statistics->external_memory_ =
isolate->heap()->backing_store_bytes() < SIZE_MAX
? static_cast<size_t>(isolate->heap()->backing_store_bytes())
: SIZE_MAX;
heap_statistics->peak_malloced_memory_ =
isolate->allocator()->GetMaxMemoryUsage();
heap_statistics->number_of_native_contexts_ = heap->NumberOfNativeContexts();
......
......@@ -6,6 +6,8 @@
#define V8_BASE_ATOMIC_UTILS_H_
#include <limits.h>
#include <atomic>
#include <type_traits>
#include "src/base/atomicops.h"
......@@ -189,16 +191,20 @@ using AsAtomicPointer = AsAtomicPointerImpl<base::AtomicWord>;
template <typename T,
typename = typename std::enable_if<std::is_unsigned<T>::value>::type>
inline void CheckedIncrement(std::atomic<T>* number, T amount) {
const T old = number->fetch_add(amount);
inline void CheckedIncrement(
std::atomic<T>* number, T amount,
std::memory_order order = std::memory_order_seq_cst) {
const T old = number->fetch_add(amount, order);
DCHECK_GE(old + amount, old);
USE(old);
}
template <typename T,
typename = typename std::enable_if<std::is_unsigned<T>::value>::type>
inline void CheckedDecrement(std::atomic<T>* number, T amount) {
const T old = number->fetch_sub(amount);
inline void CheckedDecrement(
std::atomic<T>* number, T amount,
std::memory_order order = std::memory_order_seq_cst) {
const T old = number->fetch_sub(amount, order);
DCHECK_GE(old, amount);
USE(old);
}
......
......@@ -102,6 +102,7 @@ void ArrayBufferSweeper::EnsureFinished() {
}
UpdateCountersForConcurrentlySweptExtensions();
DCHECK_LE(heap_->backing_store_bytes(), SIZE_MAX);
sweeping_in_progress_ = false;
}
......
......@@ -5,6 +5,7 @@
#ifndef V8_HEAP_HEAP_INL_H_
#define V8_HEAP_HEAP_INL_H_
#include <atomic>
#include <cmath>
// Clients of this interface shouldn't depend on lots of heap internals.
......@@ -686,14 +687,16 @@ int Heap::MaxNumberToStringCacheSize() const {
void Heap::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
size_t amount) {
base::CheckedIncrement(&backing_store_bytes_, amount);
base::CheckedIncrement(&backing_store_bytes_, static_cast<uint64_t>(amount),
std::memory_order_relaxed);
// TODO(mlippautz): Implement interrupt for global memory allocations that can
// trigger garbage collections.
}
void Heap::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
size_t amount) {
base::CheckedDecrement(&backing_store_bytes_, amount);
base::CheckedDecrement(&backing_store_bytes_, static_cast<uint64_t>(amount),
std::memory_order_relaxed);
}
bool Heap::HasDirtyJSFinalizationRegistries() {
......
......@@ -548,8 +548,8 @@ void Heap::PrintShortHeapStatistics() {
CommittedMemoryOfUnmapper() / KB);
PrintIsolate(isolate_, "External memory reported: %6" PRId64 " KB\n",
external_memory_.total() / KB);
PrintIsolate(isolate_, "Backing store memory: %6zu KB\n",
backing_store_bytes_ / KB);
PrintIsolate(isolate_, "Backing store memory: %6" PRIu64 " KB\n",
backing_store_bytes() / KB);
PrintIsolate(isolate_, "External memory global %zu KB\n",
external_memory_callback_() / KB);
PrintIsolate(isolate_, "Total time spent in GC : %.1f ms\n",
......
......@@ -755,7 +755,9 @@ class Heap {
V8_EXPORT_PRIVATE size_t YoungArrayBufferBytes();
V8_EXPORT_PRIVATE size_t OldArrayBufferBytes();
size_t backing_store_bytes() const { return backing_store_bytes_; }
uint64_t backing_store_bytes() const {
return backing_store_bytes_.load(std::memory_order_relaxed);
}
void CompactWeakArrayLists();
......@@ -2136,7 +2138,9 @@ class Heap {
size_t old_generation_capacity_after_bootstrap_ = 0;
// Backing store bytes (array buffers and external strings).
std::atomic<size_t> backing_store_bytes_{0};
// Use uint64_t counter since the counter could overflow the 32-bit range
// temporarily on 32-bit.
std::atomic<uint64_t> backing_store_bytes_{0};
// For keeping track of how much data has survived
// scavenge since last new space expansion.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment