Commit 55a8ad0c authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

Clean up VirtualMemory allocation

VirtualMemory objects can be moved since https://crrev.com/c/1213062,
so there is no need any more to return them via pointer argument. This
also makes the {AllocVirtualMemory} and {AlignedAllocVirtualMemory}
functions superfluous.

R=ishell@chromium.org, titzer@chromium.org

Bug: v8:8015
Change-Id: Id72921e1c66a6c10be6647194603b8283e010e24
Reviewed-on: https://chromium-review.googlesource.com/1226972Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarBen Titzer <titzer@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55947}
parent 6f2402ed
......@@ -228,7 +228,7 @@ bool OnCriticalMemoryPressure(size_t length) {
VirtualMemory::VirtualMemory(v8::PageAllocator* page_allocator, size_t size,
void* hint, size_t alignment)
: page_allocator_(page_allocator), address_(kNullAddress), size_(0) {
: page_allocator_(page_allocator) {
DCHECK_NOT_NULL(page_allocator);
size_t page_size = page_allocator_->AllocatePageSize();
alignment = RoundUp(alignment, page_size);
......@@ -300,26 +300,5 @@ void VirtualMemory::TakeControl(VirtualMemory* from) {
from->Reset();
}
bool AllocVirtualMemory(v8::PageAllocator* page_allocator, size_t size,
void* hint, VirtualMemory* result) {
VirtualMemory vm(page_allocator, size, hint);
if (vm.IsReserved()) {
result->TakeControl(&vm);
return true;
}
return false;
}
bool AlignedAllocVirtualMemory(v8::PageAllocator* page_allocator, size_t size,
size_t alignment, void* hint,
VirtualMemory* result) {
VirtualMemory vm(page_allocator, size, hint, alignment);
if (vm.IsReserved()) {
result->TakeControl(&vm);
return true;
}
return false;
}
} // namespace internal
} // namespace v8
......@@ -240,12 +240,6 @@ class V8_EXPORT_PRIVATE VirtualMemory final {
DISALLOW_COPY_AND_ASSIGN(VirtualMemory);
};
bool AllocVirtualMemory(v8::PageAllocator* page_allocator, size_t size,
void* hint, VirtualMemory* result);
bool AlignedAllocVirtualMemory(v8::PageAllocator* page_allocator, size_t size,
size_t alignment, void* hint,
VirtualMemory* result);
} // namespace internal
} // namespace v8
......
......@@ -160,13 +160,11 @@ void MemoryAllocator::InitializeCodePageAllocator(
}
DCHECK(!kRequiresCodeRange || requested <= kMaximalCodeRangeSize);
VirtualMemory reservation;
void* hint = code_range_address_hint.Pointer()->GetAddressHint(requested);
if (!AlignedAllocVirtualMemory(
page_allocator, requested,
Max(kCodeRangeAreaAlignment, page_allocator->AllocatePageSize()),
hint, &reservation)) {
VirtualMemory reservation(
page_allocator, requested, hint,
Max(kCodeRangeAreaAlignment, page_allocator->AllocatePageSize()));
if (!reservation.IsReserved()) {
V8::FatalProcessOutOfMemory(isolate_,
"CodeRange setup: allocate virtual memory");
}
......@@ -407,11 +405,8 @@ Address MemoryAllocator::AllocateAlignedMemory(
Executability executable, void* hint, VirtualMemory* controller) {
v8::PageAllocator* page_allocator = this->page_allocator(executable);
DCHECK(commit_size <= reserve_size);
VirtualMemory reservation;
if (!AlignedAllocVirtualMemory(page_allocator, reserve_size, alignment, hint,
&reservation)) {
return kNullAddress;
}
VirtualMemory reservation(page_allocator, reserve_size, hint, alignment);
if (!reservation.IsReserved()) return kNullAddress;
Address base = reservation.address();
size_ += reservation.size();
......
......@@ -38,9 +38,8 @@ void StoreBuffer::SetUp() {
const size_t alignment =
std::max<size_t>(kStoreBufferSize, page_allocator->AllocatePageSize());
void* hint = AlignedAddress(heap_->GetRandomMmapAddr(), alignment);
VirtualMemory reservation;
if (!AlignedAllocVirtualMemory(page_allocator, requested_size, alignment,
hint, &reservation)) {
VirtualMemory reservation(page_allocator, requested_size, hint, alignment);
if (!reservation.IsReserved()) {
heap_->FatalProcessOutOfMemory("StoreBuffer::SetUp");
}
......
......@@ -657,10 +657,9 @@ Address NativeModule::AllocateForCode(size_t size) {
Address hint = owned_code_space_.empty() ? kNullAddress
: owned_code_space_.back().end();
owned_code_space_.emplace_back();
owned_code_space_.emplace_back(
wasm_code_manager_->TryAllocate(size, reinterpret_cast<void*>(hint)));
VirtualMemory& new_mem = owned_code_space_.back();
wasm_code_manager_->TryAllocate(size, &new_mem,
reinterpret_cast<void*>(hint));
if (!new_mem.IsReserved()) return kNullAddress;
wasm_code_manager_->AssignRanges(new_mem.address(), new_mem.end(), this);
......@@ -810,20 +809,20 @@ void WasmCodeManager::AssignRanges(Address start, Address end,
lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
}
void WasmCodeManager::TryAllocate(size_t size, VirtualMemory* ret, void* hint) {
VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
DCHECK_GT(size, 0);
size = RoundUp(size, page_allocator->AllocatePageSize());
if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr();
if (!AlignedAllocVirtualMemory(page_allocator, size,
page_allocator->AllocatePageSize(), hint,
ret)) {
DCHECK(!ret->IsReserved());
VirtualMemory mem(page_allocator, size, hint,
page_allocator->AllocatePageSize());
if (mem.IsReserved()) {
TRACE_HEAP("VMem alloc: %p:%p (%zu)\n",
reinterpret_cast<void*>(mem.address()),
reinterpret_cast<void*>(mem.end()), mem.size());
}
TRACE_HEAP("VMem alloc: %p:%p (%zu)\n",
reinterpret_cast<void*>(ret->address()),
reinterpret_cast<void*>(ret->end()), ret->size());
return mem;
}
void WasmCodeManager::SampleModuleSizes(Isolate* isolate) const {
......@@ -901,7 +900,7 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
static constexpr int kAllocationRetries = 2;
VirtualMemory mem;
for (int retries = 0;; ++retries) {
TryAllocate(vmem_size, &mem);
mem = TryAllocate(vmem_size);
if (mem.IsReserved()) break;
if (retries == kAllocationRetries) {
V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule");
......
......@@ -457,7 +457,8 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
private:
friend class NativeModule;
void TryAllocate(size_t size, VirtualMemory*, void* hint = nullptr);
V8_WARN_UNUSED_RESULT VirtualMemory TryAllocate(size_t size,
void* hint = nullptr);
bool Commit(Address, size_t);
// Currently, we uncommit a whole module, so all we need is account
// for the freed memory size. We do that in FreeNativeModule.
......
......@@ -147,10 +147,9 @@ TEST(MemoryChunk) {
// With CodeRange.
const size_t code_range_size = 32 * MB;
VirtualMemory code_range_reservation;
CHECK(AlignedAllocVirtualMemory(page_allocator, code_range_size,
MemoryChunk::kAlignment, nullptr,
&code_range_reservation));
VirtualMemory code_range_reservation(page_allocator, code_range_size,
nullptr, MemoryChunk::kAlignment);
CHECK(code_range_reservation.IsReserved());
base::BoundedPageAllocator code_page_allocator(
page_allocator, code_range_reservation.address(),
......
......@@ -141,25 +141,20 @@ TEST(AlignedAllocOOM) {
TEST(AllocVirtualMemoryOOM) {
AllocationPlatform platform;
CHECK(!platform.oom_callback_called);
v8::internal::VirtualMemory result;
bool success =
v8::internal::AllocVirtualMemory(v8::internal::GetPlatformPageAllocator(),
GetHugeMemoryAmount(), nullptr, &result);
v8::internal::VirtualMemory result(v8::internal::GetPlatformPageAllocator(),
GetHugeMemoryAmount(), nullptr);
// On a few systems, allocation somehow succeeds.
CHECK_IMPLIES(success, result.IsReserved());
CHECK_IMPLIES(!success, !result.IsReserved() && platform.oom_callback_called);
CHECK_IMPLIES(!result.IsReserved(), platform.oom_callback_called);
}
TEST(AlignedAllocVirtualMemoryOOM) {
AllocationPlatform platform;
CHECK(!platform.oom_callback_called);
v8::internal::VirtualMemory result;
bool success = v8::internal::AlignedAllocVirtualMemory(
v8::internal::GetPlatformPageAllocator(), GetHugeMemoryAmount(),
v8::internal::AllocatePageSize(), nullptr, &result);
v8::internal::VirtualMemory result(v8::internal::GetPlatformPageAllocator(),
GetHugeMemoryAmount(), nullptr,
v8::internal::AllocatePageSize());
// On a few systems, allocation somehow succeeds.
CHECK_IMPLIES(success, result.IsReserved());
CHECK_IMPLIES(!success, !result.IsReserved() && platform.oom_callback_called);
CHECK_IMPLIES(!result.IsReserved(), platform.oom_callback_called);
}
#endif // !defined(V8_USE_ADDRESS_SANITIZER) && !defined(MEMORY_SANITIZER) &&
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment