Commit 4444874c authored by Thiabaud Engelbrecht's avatar Thiabaud Engelbrecht Committed by V8 LUCI CQ

[v8] Use |AllocateAtLeast| for resizing v8 zones.

This is part of an ongoing effort to reduce fragmentation in Chrome.  Partition alloc shows v8 zones are a large user of memory in Renderer processes, and that there is fragmentation from these allocations. This CL will reduce this fragmentation by allowing v8 to use all allocated memory for its zones.

Bug: v8:13193, chromium:1238858
Change-Id: Ibeac8bdba9d0e7ff66b14a3dde10e7c87d3cf953
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3889361Reviewed-by: 's avatarAdam Klein <adamk@chromium.org>
Commit-Queue: Thiabaud Engelbrecht <thiabaud@google.com>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83235}
parent 116e217b
...@@ -111,6 +111,8 @@ inline void AlignedFree(void* ptr) { ...@@ -111,6 +111,8 @@ inline void AlignedFree(void* ptr) {
// `AllocateAtLeast()` for a safe version. // `AllocateAtLeast()` for a safe version.
inline size_t MallocUsableSize(void* ptr) { inline size_t MallocUsableSize(void* ptr) {
#if V8_OS_WIN #if V8_OS_WIN
// |_msize| cannot handle a null pointer.
if (!ptr) return 0;
return _msize(ptr); return _msize(ptr);
#elif V8_OS_DARWIN #elif V8_OS_DARWIN
return malloc_size(ptr); return malloc_size(ptr);
...@@ -130,7 +132,7 @@ struct AllocationResult { ...@@ -130,7 +132,7 @@ struct AllocationResult {
// Allocates at least `n * sizeof(T)` uninitialized storage but may allocate // Allocates at least `n * sizeof(T)` uninitialized storage but may allocate
// more which is indicated by the return value. Mimics C++23 // more which is indicated by the return value. Mimics C++23
// `allocate_ate_least()`. // `allocate_at_least()`.
template <typename T> template <typename T>
V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) { V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) {
const size_t min_wanted_size = n * sizeof(T); const size_t min_wanted_size = n * sizeof(T);
...@@ -140,6 +142,8 @@ V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) { ...@@ -140,6 +142,8 @@ V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) {
#else // V8_HAS_MALLOC_USABLE_SIZE #else // V8_HAS_MALLOC_USABLE_SIZE
const size_t usable_size = MallocUsableSize(memory); const size_t usable_size = MallocUsableSize(memory);
#if V8_USE_UNDEFINED_BEHAVIOR_SANITIZER #if V8_USE_UNDEFINED_BEHAVIOR_SANITIZER
if (memory == nullptr)
return {nullptr, 0};
// UBSan (specifically, -fsanitize=bounds) assumes that any access outside // UBSan (specifically, -fsanitize=bounds) assumes that any access outside
// of the requested size for malloc is UB and will trap in ud2 instructions. // of the requested size for malloc is UB and will trap in ud2 instructions.
// This can be worked around by using `Realloc()` on the specific memory // This can be worked around by using `Realloc()` on the specific memory
......
...@@ -129,6 +129,16 @@ void* AllocWithRetry(size_t size, MallocFn malloc_fn) { ...@@ -129,6 +129,16 @@ void* AllocWithRetry(size_t size, MallocFn malloc_fn) {
return result; return result;
} }
base::AllocationResult<void*> AllocAtLeastWithRetry(size_t size) {
base::AllocationResult<char*> result = {nullptr, 0u};
for (int i = 0; i < kAllocationTries; ++i) {
result = base::AllocateAtLeast<char>(size);
if (V8_LIKELY(result.ptr != nullptr)) break;
OnCriticalMemoryPressure();
}
return {result.ptr, result.count};
}
void* AlignedAllocWithRetry(size_t size, size_t alignment) { void* AlignedAllocWithRetry(size_t size, size_t alignment) {
void* result = nullptr; void* result = nullptr;
for (int i = 0; i < kAllocationTries; ++i) { for (int i = 0; i < kAllocationTries; ++i) {
......
...@@ -94,6 +94,10 @@ using MallocFn = void* (*)(size_t); ...@@ -94,6 +94,10 @@ using MallocFn = void* (*)(size_t);
// Call free to release memory allocated with this function. // Call free to release memory allocated with this function.
void* AllocWithRetry(size_t size, MallocFn = base::Malloc); void* AllocWithRetry(size_t size, MallocFn = base::Malloc);
// Performs a malloc, with retry logic on failure. Returns nullptr on failure.
// Call free to release memory allocated with this function.
base::AllocationResult<void*> AllocAtLeastWithRetry(size_t size);
V8_EXPORT_PRIVATE void* AlignedAllocWithRetry(size_t size, size_t alignment); V8_EXPORT_PRIVATE void* AlignedAllocWithRetry(size_t size, size_t alignment);
V8_EXPORT_PRIVATE void AlignedFree(void* ptr); V8_EXPORT_PRIVATE void AlignedFree(void* ptr);
......
...@@ -91,7 +91,9 @@ Segment* AccountingAllocator::AllocateSegment(size_t bytes, ...@@ -91,7 +91,9 @@ Segment* AccountingAllocator::AllocateSegment(size_t bytes,
kZonePageSize, PageAllocator::kReadWrite); kZonePageSize, PageAllocator::kReadWrite);
} else { } else {
memory = AllocWithRetry(bytes, zone_backing_malloc_); auto result = AllocAtLeastWithRetry(bytes);
memory = result.ptr;
bytes = result.count;
} }
if (memory == nullptr) return nullptr; if (memory == nullptr) return nullptr;
......
...@@ -96,6 +96,8 @@ TEST_WITH_PLATFORM(AccountingAllocatorOOM, AllocationPlatform) { ...@@ -96,6 +96,8 @@ TEST_WITH_PLATFORM(AccountingAllocatorOOM, AllocationPlatform) {
CHECK_EQ(result == nullptr, platform.oom_callback_called); CHECK_EQ(result == nullptr, platform.oom_callback_called);
} }
// We use |AllocateAtLeast| in the accounting allocator, so we check only that
// we have _at least_ the expected amount of memory allocated.
TEST_WITH_PLATFORM(AccountingAllocatorCurrentAndMax, AllocationPlatform) { TEST_WITH_PLATFORM(AccountingAllocatorCurrentAndMax, AllocationPlatform) {
v8::internal::AccountingAllocator allocator; v8::internal::AccountingAllocator allocator;
static constexpr size_t kAllocationSizes[] = {51, 231, 27}; static constexpr size_t kAllocationSizes[] = {51, 231, 27};
...@@ -108,8 +110,8 @@ TEST_WITH_PLATFORM(AccountingAllocatorCurrentAndMax, AllocationPlatform) { ...@@ -108,8 +110,8 @@ TEST_WITH_PLATFORM(AccountingAllocatorCurrentAndMax, AllocationPlatform) {
for (size_t size : kAllocationSizes) { for (size_t size : kAllocationSizes) {
segments.push_back(allocator.AllocateSegment(size, support_compression)); segments.push_back(allocator.AllocateSegment(size, support_compression));
CHECK_NOT_NULL(segments.back()); CHECK_NOT_NULL(segments.back());
CHECK_EQ(size, segments.back()->total_size()); CHECK_LE(size, segments.back()->total_size());
expected_current += size; expected_current += segments.back()->total_size();
if (expected_current > expected_max) expected_max = expected_current; if (expected_current > expected_max) expected_max = expected_current;
CHECK_EQ(expected_current, allocator.GetCurrentMemoryUsage()); CHECK_EQ(expected_current, allocator.GetCurrentMemoryUsage());
CHECK_EQ(expected_max, allocator.GetMaxMemoryUsage()); CHECK_EQ(expected_max, allocator.GetMaxMemoryUsage());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment