Commit 2a7f86ed authored by Thiabaud Engelbrecht's avatar Thiabaud Engelbrecht Committed by V8 LUCI CQ

Reland "[v8] Use |AllocateAtLeast| for resizing v8 zones."

This is a reland of commit 4444874c
This fixes a failure on the UBSan bots caused by assuming |Realloc| is always in-place if the new size is <= the value of |malloc_usable_size|.

Original change's description:
> [v8] Use |AllocateAtLeast| for resizing v8 zones.
>
> This is part of an ongoing effort to reduce fragmentation in Chrome.  Partition alloc shows v8 zones are a large user of memory in Renderer processes, and that there is fragmentation from these allocations. This CL will reduce this fragmentation by allowing v8 to use all allocated memory for its zones.
>
> Bug: v8:13193, chromium:1238858
> Change-Id: Ibeac8bdba9d0e7ff66b14a3dde10e7c87d3cf953
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3889361
> Reviewed-by: Adam Klein <adamk@chromium.org>
> Commit-Queue: Thiabaud Engelbrecht <thiabaud@google.com>
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#83235}

Bug: v8:13193, chromium:1238858
Change-Id: I923bcbce8403dd7d84642340fd7202087b8a4440
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3910268Reviewed-by: 's avatarAdam Klein <adamk@chromium.org>
Commit-Queue: Thiabaud Engelbrecht <thiabaud@google.com>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83372}
parent 361e8245
......@@ -111,6 +111,8 @@ inline void AlignedFree(void* ptr) {
// `AllocateAtLeast()` for a safe version.
inline size_t MallocUsableSize(void* ptr) {
#if V8_OS_WIN
// |_msize| cannot handle a null pointer.
if (!ptr) return 0;
return _msize(ptr);
#elif V8_OS_DARWIN
return malloc_size(ptr);
......@@ -130,7 +132,7 @@ struct AllocationResult {
// Allocates at least `n * sizeof(T)` uninitialized storage but may allocate
// more which is indicated by the return value. Mimics C++23
// `allocate_ate_least()`.
// `allocate_at_least()`.
template <typename T>
V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) {
const size_t min_wanted_size = n * sizeof(T);
......@@ -140,13 +142,14 @@ V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) {
#else // V8_HAS_MALLOC_USABLE_SIZE
const size_t usable_size = MallocUsableSize(memory);
#if V8_USE_UNDEFINED_BEHAVIOR_SANITIZER
if (memory == nullptr)
return {nullptr, 0};
// UBSan (specifically, -fsanitize=bounds) assumes that any access outside
// of the requested size for malloc is UB and will trap in ud2 instructions.
// This can be worked around by using `Realloc()` on the specific memory
// region, assuming that the allocator doesn't actually reallocate the
// buffer.
// region.
if (usable_size != min_wanted_size) {
CHECK_EQ(static_cast<T*>(Realloc(memory, usable_size)), memory);
memory = static_cast<T*>(Realloc(memory, usable_size));
}
#endif // V8_USE_UNDEFINED_BEHAVIOR_SANITIZER
return {memory, usable_size};
......
......@@ -129,6 +129,16 @@ void* AllocWithRetry(size_t size, MallocFn malloc_fn) {
return result;
}
base::AllocationResult<void*> AllocAtLeastWithRetry(size_t size) {
base::AllocationResult<char*> result = {nullptr, 0u};
for (int i = 0; i < kAllocationTries; ++i) {
result = base::AllocateAtLeast<char>(size);
if (V8_LIKELY(result.ptr != nullptr)) break;
OnCriticalMemoryPressure();
}
return {result.ptr, result.count};
}
void* AlignedAllocWithRetry(size_t size, size_t alignment) {
void* result = nullptr;
for (int i = 0; i < kAllocationTries; ++i) {
......
......@@ -94,6 +94,10 @@ using MallocFn = void* (*)(size_t);
// Call free to release memory allocated with this function.
void* AllocWithRetry(size_t size, MallocFn = base::Malloc);
// Performs a malloc, with retry logic on failure. Returns nullptr on failure.
// Call free to release memory allocated with this function.
base::AllocationResult<void*> AllocAtLeastWithRetry(size_t size);
V8_EXPORT_PRIVATE void* AlignedAllocWithRetry(size_t size, size_t alignment);
V8_EXPORT_PRIVATE void AlignedFree(void* ptr);
......
......@@ -91,7 +91,9 @@ Segment* AccountingAllocator::AllocateSegment(size_t bytes,
kZonePageSize, PageAllocator::kReadWrite);
} else {
memory = AllocWithRetry(bytes, zone_backing_malloc_);
auto result = AllocAtLeastWithRetry(bytes);
memory = result.ptr;
bytes = result.count;
}
if (memory == nullptr) return nullptr;
......
......@@ -96,6 +96,8 @@ TEST_WITH_PLATFORM(AccountingAllocatorOOM, AllocationPlatform) {
CHECK_EQ(result == nullptr, platform.oom_callback_called);
}
// We use |AllocateAtLeast| in the accounting allocator, so we check only that
// we have _at least_ the expected amount of memory allocated.
TEST_WITH_PLATFORM(AccountingAllocatorCurrentAndMax, AllocationPlatform) {
v8::internal::AccountingAllocator allocator;
static constexpr size_t kAllocationSizes[] = {51, 231, 27};
......@@ -108,8 +110,8 @@ TEST_WITH_PLATFORM(AccountingAllocatorCurrentAndMax, AllocationPlatform) {
for (size_t size : kAllocationSizes) {
segments.push_back(allocator.AllocateSegment(size, support_compression));
CHECK_NOT_NULL(segments.back());
CHECK_EQ(size, segments.back()->total_size());
expected_current += size;
CHECK_LE(size, segments.back()->total_size());
expected_current += segments.back()->total_size();
if (expected_current > expected_max) expected_max = expected_current;
CHECK_EQ(expected_current, allocator.GetCurrentMemoryUsage());
CHECK_EQ(expected_max, allocator.GetMaxMemoryUsage());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment