Commit 318ede07 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Rename some "mem" to "code_space"

In order to not confuse this with wasm memory.

R=mstarzinger@chromium.org

Bug: v8:8238
Change-Id: Ife183162a902ab1d141f6af95a9fa487a52379a1
Reviewed-on: https://chromium-review.googlesource.com/c/1296483
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56936}
parent dffba1ee
...@@ -669,8 +669,8 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) { ...@@ -669,8 +669,8 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) {
v8::PageAllocator* page_allocator = GetPlatformPageAllocator(); v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
// This happens under a lock assumed by the caller. // This happens under a lock assumed by the caller.
size = RoundUp(size, kCodeAlignment); size = RoundUp(size, kCodeAlignment);
base::AddressRegion mem = free_code_space_.Allocate(size); base::AddressRegion code_space = free_code_space_.Allocate(size);
if (mem.is_empty()) { if (code_space.is_empty()) {
if (!can_request_more_memory_) { if (!can_request_more_memory_) {
V8::FatalProcessOutOfMemory(nullptr, V8::FatalProcessOutOfMemory(nullptr,
"NativeModule::AllocateForCode reservation"); "NativeModule::AllocateForCode reservation");
...@@ -691,14 +691,14 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) { ...@@ -691,14 +691,14 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) {
free_code_space_.Merge(new_mem.region()); free_code_space_.Merge(new_mem.region());
owned_code_space_.emplace_back(std::move(new_mem)); owned_code_space_.emplace_back(std::move(new_mem));
mem = free_code_space_.Allocate(size); code_space = free_code_space_.Allocate(size);
DCHECK(!mem.is_empty()); DCHECK(!code_space.is_empty());
} }
const Address page_size = page_allocator->AllocatePageSize(); const Address page_size = page_allocator->AllocatePageSize();
Address commit_start = RoundUp(mem.begin(), page_size); Address commit_start = RoundUp(code_space.begin(), page_size);
Address commit_end = RoundUp(mem.end(), page_size); Address commit_end = RoundUp(code_space.end(), page_size);
// {commit_start} will be either mem.start or the start of the next page. // {commit_start} will be either code_space.start or the start of the next
// {commit_end} will be the start of the page after the one in which // page. {commit_end} will be the start of the page after the one in which
// the allocation ends. // the allocation ends.
// We start from an aligned start, and we know we allocated vmem in // We start from an aligned start, and we know we allocated vmem in
// page multiples. // page multiples.
...@@ -739,10 +739,11 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) { ...@@ -739,10 +739,11 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) {
} }
#endif #endif
} }
DCHECK(IsAligned(mem.begin(), kCodeAlignment)); DCHECK(IsAligned(code_space.begin(), kCodeAlignment));
allocated_code_space_.Merge(mem); allocated_code_space_.Merge(code_space);
TRACE_HEAP("Code alloc for %p: %" PRIxPTR ",+%zu\n", this, mem.begin(), size); TRACE_HEAP("Code alloc for %p: %" PRIxPTR ",+%zu\n", this, code_space.begin(),
return {reinterpret_cast<byte*>(mem.begin()), mem.size()}; size);
return {reinterpret_cast<byte*>(code_space.begin()), code_space.size()};
} }
WasmCode* NativeModule::Lookup(Address pc) const { WasmCode* NativeModule::Lookup(Address pc) const {
...@@ -935,7 +936,7 @@ bool WasmCodeManager::ShouldForceCriticalMemoryPressureNotification() { ...@@ -935,7 +936,7 @@ bool WasmCodeManager::ShouldForceCriticalMemoryPressureNotification() {
} }
std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule( std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
Isolate* isolate, const WasmFeatures& enabled, size_t memory_estimate, Isolate* isolate, const WasmFeatures& enabled, size_t code_size_estimate,
bool can_request_more, std::shared_ptr<const WasmModule> module) { bool can_request_more, std::shared_ptr<const WasmModule> module) {
if (ShouldForceCriticalMemoryPressureNotification()) { if (ShouldForceCriticalMemoryPressureNotification()) {
(reinterpret_cast<v8::Isolate*>(isolate)) (reinterpret_cast<v8::Isolate*>(isolate))
...@@ -943,15 +944,16 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule( ...@@ -943,15 +944,16 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
} }
// If the code must be contiguous, reserve enough address space up front. // If the code must be contiguous, reserve enough address space up front.
size_t vmem_size = kRequiresCodeRange ? kMaxWasmCodeMemory : memory_estimate; size_t code_vmem_size =
kRequiresCodeRange ? kMaxWasmCodeMemory : code_size_estimate;
// Try up to three times; getting rid of dead JSArrayBuffer allocations might // Try up to three times; getting rid of dead JSArrayBuffer allocations might
// require two GCs because the first GC maybe incremental and may have // require two GCs because the first GC maybe incremental and may have
// floating garbage. // floating garbage.
static constexpr int kAllocationRetries = 2; static constexpr int kAllocationRetries = 2;
VirtualMemory mem; VirtualMemory code_space;
for (int retries = 0;; ++retries) { for (int retries = 0;; ++retries) {
mem = TryAllocate(vmem_size); code_space = TryAllocate(code_vmem_size);
if (mem.IsReserved()) break; if (code_space.IsReserved()) break;
if (retries == kAllocationRetries) { if (retries == kAllocationRetries) {
V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule"); V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule");
UNREACHABLE(); UNREACHABLE();
...@@ -961,12 +963,12 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule( ...@@ -961,12 +963,12 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
true); true);
} }
Address start = mem.address(); Address start = code_space.address();
size_t size = mem.size(); size_t size = code_space.size();
Address end = mem.end(); Address end = code_space.end();
std::unique_ptr<NativeModule> ret( std::unique_ptr<NativeModule> ret(
new NativeModule(isolate, enabled, can_request_more, std::move(mem), this, new NativeModule(isolate, enabled, can_request_more,
std::move(module))); std::move(code_space), this, std::move(module)));
TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", this, start, TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", this, start,
size); size);
AssignRangesAndAddModule(start, end, ret.get()); AssignRangesAndAddModule(start, end, ret.get());
...@@ -1027,14 +1029,14 @@ void WasmCodeManager::FreeNativeModule(NativeModule* native_module) { ...@@ -1027,14 +1029,14 @@ void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
DCHECK_EQ(1, native_modules_.count(native_module)); DCHECK_EQ(1, native_modules_.count(native_module));
native_modules_.erase(native_module); native_modules_.erase(native_module);
TRACE_HEAP("Freeing NativeModule %p\n", this); TRACE_HEAP("Freeing NativeModule %p\n", this);
for (auto& mem : native_module->owned_code_space_) { for (auto& code_space : native_module->owned_code_space_) {
DCHECK(mem.IsReserved()); DCHECK(code_space.IsReserved());
TRACE_HEAP("VMem Release: %" PRIxPTR ":%" PRIxPTR " (%zu)\n", mem.address(), TRACE_HEAP("VMem Release: %" PRIxPTR ":%" PRIxPTR " (%zu)\n",
mem.end(), mem.size()); code_space.address(), code_space.end(), code_space.size());
lookup_map_.erase(mem.address()); lookup_map_.erase(code_space.address());
memory_tracker_->ReleaseReservation(mem.size()); memory_tracker_->ReleaseReservation(code_space.size());
mem.Free(); code_space.Free();
DCHECK(!mem.IsReserved()); DCHECK(!code_space.IsReserved());
} }
native_module->owned_code_space_.clear(); native_module->owned_code_space_.clear();
......
...@@ -476,7 +476,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final { ...@@ -476,7 +476,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
// TODO(titzer): isolate is only required here for CompilationState. // TODO(titzer): isolate is only required here for CompilationState.
std::unique_ptr<NativeModule> NewNativeModule( std::unique_ptr<NativeModule> NewNativeModule(
Isolate* isolate, const WasmFeatures& enabled_features, Isolate* isolate, const WasmFeatures& enabled_features,
size_t memory_estimate, bool can_request_more, size_t code_size_estimate, bool can_request_more,
std::shared_ptr<const WasmModule> module); std::shared_ptr<const WasmModule> module);
NativeModule* LookupNativeModule(Address pc) const; NativeModule* LookupNativeModule(Address pc) const;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment