Commit c865c9f5 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Fix naming to distinguish memory from code space

The term memory usually refers to the wasm memory. In the
{NativeModule}, we store pools for allocated and available code space.
This CL changes naming to make clear that this is code space and not
memory.

R=titzer@chromium.org

Bug: v8:7754
Change-Id: I195bf5c9227ad246af302ae1e98f9c839a02adbf
Reviewed-on: https://chromium-review.googlesource.com/1061495Reviewed-by: 's avatarBen Titzer <titzer@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53208}
parent 63bd3a88
...@@ -328,20 +328,20 @@ WasmCode::~WasmCode() { ...@@ -328,20 +328,20 @@ WasmCode::~WasmCode() {
base::AtomicNumber<size_t> NativeModule::next_id_; base::AtomicNumber<size_t> NativeModule::next_id_;
NativeModule::NativeModule(uint32_t num_functions, uint32_t num_imports, NativeModule::NativeModule(uint32_t num_functions, uint32_t num_imports,
bool can_request_more, VirtualMemory* mem, bool can_request_more, VirtualMemory* code_space,
WasmCodeManager* code_manager, ModuleEnv& env) WasmCodeManager* code_manager, ModuleEnv& env)
: instance_id(next_id_.Increment(1)), : instance_id(next_id_.Increment(1)),
code_table_(num_functions), code_table_(num_functions),
num_imported_functions_(num_imports), num_imported_functions_(num_imports),
compilation_state_(NewCompilationState( compilation_state_(NewCompilationState(
reinterpret_cast<Isolate*>(code_manager->isolate_), env)), reinterpret_cast<Isolate*>(code_manager->isolate_), env)),
free_memory_(mem->address(), mem->end()), free_code_space_(code_space->address(), code_space->end()),
wasm_code_manager_(code_manager), wasm_code_manager_(code_manager),
can_request_more_memory_(can_request_more), can_request_more_memory_(can_request_more),
use_trap_handler_(env.use_trap_handler) { use_trap_handler_(env.use_trap_handler) {
VirtualMemory my_mem; VirtualMemory my_mem;
owned_memory_.push_back(my_mem); owned_code_space_.push_back(my_mem);
owned_memory_.back().TakeControl(mem); owned_code_space_.back().TakeControl(code_space);
owned_code_.reserve(num_functions); owned_code_.reserve(num_functions);
} }
...@@ -625,23 +625,23 @@ Address NativeModule::GetLocalAddressFor(Handle<Code> code) { ...@@ -625,23 +625,23 @@ Address NativeModule::GetLocalAddressFor(Handle<Code> code) {
Address NativeModule::AllocateForCode(size_t size) { Address NativeModule::AllocateForCode(size_t size) {
// this happens under a lock assumed by the caller. // this happens under a lock assumed by the caller.
size = RoundUp(size, kCodeAlignment); size = RoundUp(size, kCodeAlignment);
DisjointAllocationPool mem = free_memory_.Allocate(size); DisjointAllocationPool mem = free_code_space_.Allocate(size);
if (mem.IsEmpty()) { if (mem.IsEmpty()) {
if (!can_request_more_memory_) return kNullAddress; if (!can_request_more_memory_) return kNullAddress;
Address hint = Address hint = owned_code_space_.empty() ? kNullAddress
owned_memory_.empty() ? kNullAddress : owned_memory_.back().end(); : owned_code_space_.back().end();
VirtualMemory empty_mem; VirtualMemory empty_mem;
owned_memory_.push_back(empty_mem); owned_code_space_.push_back(empty_mem);
VirtualMemory& new_mem = owned_memory_.back(); VirtualMemory& new_mem = owned_code_space_.back();
wasm_code_manager_->TryAllocate(size, &new_mem, wasm_code_manager_->TryAllocate(size, &new_mem,
reinterpret_cast<void*>(hint)); reinterpret_cast<void*>(hint));
if (!new_mem.IsReserved()) return kNullAddress; if (!new_mem.IsReserved()) return kNullAddress;
DisjointAllocationPool mem_pool(new_mem.address(), new_mem.end()); DisjointAllocationPool mem_pool(new_mem.address(), new_mem.end());
wasm_code_manager_->AssignRanges(new_mem.address(), new_mem.end(), this); wasm_code_manager_->AssignRanges(new_mem.address(), new_mem.end(), this);
free_memory_.Merge(std::move(mem_pool)); free_code_space_.Merge(std::move(mem_pool));
mem = free_memory_.Allocate(size); mem = free_code_space_.Allocate(size);
if (mem.IsEmpty()) return kNullAddress; if (mem.IsEmpty()) return kNullAddress;
} }
Address ret = mem.ranges().front().first; Address ret = mem.ranges().front().first;
...@@ -661,9 +661,10 @@ Address NativeModule::AllocateForCode(size_t size) { ...@@ -661,9 +661,10 @@ Address NativeModule::AllocateForCode(size_t size) {
// On Windows, we cannot commit a range that straddles different // On Windows, we cannot commit a range that straddles different
// reservations of virtual memory. Because we bump-allocate, and because, if // reservations of virtual memory. Because we bump-allocate, and because, if
// we need more memory, we append that memory at the end of the // we need more memory, we append that memory at the end of the
// owned_memory_ list, we traverse that list in reverse order to find the // owned_code_space_ list, we traverse that list in reverse order to find
// reservation(s) that guide how to chunk the region to commit. // the reservation(s) that guide how to chunk the region to commit.
for (auto it = owned_memory_.crbegin(), rend = owned_memory_.crend(); for (auto it = owned_code_space_.crbegin(),
rend = owned_code_space_.crend();
it != rend && commit_start < commit_end; ++it) { it != rend && commit_start < commit_end; ++it) {
if (commit_end > it->end() || it->address() >= commit_end) continue; if (commit_end > it->end() || it->address() >= commit_end) continue;
Address start = std::max(commit_start, it->address()); Address start = std::max(commit_start, it->address());
...@@ -672,7 +673,7 @@ Address NativeModule::AllocateForCode(size_t size) { ...@@ -672,7 +673,7 @@ Address NativeModule::AllocateForCode(size_t size) {
if (!wasm_code_manager_->Commit(start, commit_size)) { if (!wasm_code_manager_->Commit(start, commit_size)) {
return kNullAddress; return kNullAddress;
} }
committed_memory_ += commit_size; committed_code_space_ += commit_size;
commit_end = start; commit_end = start;
} }
#else #else
...@@ -681,11 +682,11 @@ Address NativeModule::AllocateForCode(size_t size) { ...@@ -681,11 +682,11 @@ Address NativeModule::AllocateForCode(size_t size) {
if (!wasm_code_manager_->Commit(commit_start, commit_size)) { if (!wasm_code_manager_->Commit(commit_start, commit_size)) {
return kNullAddress; return kNullAddress;
} }
committed_memory_ += commit_size; committed_code_space_ += commit_size;
#endif #endif
} }
DCHECK(IsAligned(ret, kCodeAlignment)); DCHECK(IsAligned(ret, kCodeAlignment));
allocated_memory_.Merge(std::move(mem)); allocated_code_space_.Merge(std::move(mem));
TRACE_HEAP("ID: %zu. Code alloc: %p,+%zu\n", instance_id, TRACE_HEAP("ID: %zu. Code alloc: %p,+%zu\n", instance_id,
reinterpret_cast<void*>(ret), size); reinterpret_cast<void*>(ret), size);
return ret; return ret;
...@@ -802,13 +803,13 @@ NativeModule::~NativeModule() { ...@@ -802,13 +803,13 @@ NativeModule::~NativeModule() {
reinterpret_cast<Object**>(shared_module_data_)); reinterpret_cast<Object**>(shared_module_data_));
shared_module_data_ = nullptr; shared_module_data_ = nullptr;
} }
wasm_code_manager_->FreeNativeModuleMemories(this); wasm_code_manager_->FreeNativeModule(this);
} }
WasmCodeManager::WasmCodeManager(v8::Isolate* isolate, size_t max_committed) WasmCodeManager::WasmCodeManager(v8::Isolate* isolate, size_t max_committed)
: isolate_(isolate) { : isolate_(isolate) {
DCHECK_LE(max_committed, kMaxWasmCodeMemory); DCHECK_LE(max_committed, kMaxWasmCodeMemory);
remaining_uncommitted_.store(max_committed); remaining_uncommitted_code_space_.store(max_committed);
} }
bool WasmCodeManager::Commit(Address start, size_t size) { bool WasmCodeManager::Commit(Address start, size_t size) {
...@@ -818,10 +819,10 @@ bool WasmCodeManager::Commit(Address start, size_t size) { ...@@ -818,10 +819,10 @@ bool WasmCodeManager::Commit(Address start, size_t size) {
// {remaining_uncommitted_}. Temporary underflow would allow concurrent // {remaining_uncommitted_}. Temporary underflow would allow concurrent
// threads to over-commit. // threads to over-commit.
while (true) { while (true) {
size_t old_value = remaining_uncommitted_.load(); size_t old_value = remaining_uncommitted_code_space_.load();
if (old_value < size) return false; if (old_value < size) return false;
if (remaining_uncommitted_.compare_exchange_weak(old_value, if (remaining_uncommitted_code_space_.compare_exchange_weak(
old_value - size)) { old_value, old_value - size)) {
break; break;
} }
} }
...@@ -835,7 +836,7 @@ bool WasmCodeManager::Commit(Address start, size_t size) { ...@@ -835,7 +836,7 @@ bool WasmCodeManager::Commit(Address start, size_t size) {
reinterpret_cast<void*>(start + size)); reinterpret_cast<void*>(start + size));
if (!ret) { if (!ret) {
// Highly unlikely. // Highly unlikely.
remaining_uncommitted_.fetch_add(size); remaining_uncommitted_code_space_.fetch_add(size);
return false; return false;
} }
// This API assumes main thread // This API assumes main thread
...@@ -857,7 +858,7 @@ bool WasmCodeManager::WouldGCHelp() const { ...@@ -857,7 +858,7 @@ bool WasmCodeManager::WouldGCHelp() const {
// We have an expectation on the largest size a native function // We have an expectation on the largest size a native function
// may have. // may have.
constexpr size_t kMaxNativeFunction = 32 * MB; constexpr size_t kMaxNativeFunction = 32 * MB;
size_t remaining = remaining_uncommitted_.load(); size_t remaining = remaining_uncommitted_code_space_.load();
return remaining < kMaxNativeFunction; return remaining < kMaxNativeFunction;
} }
...@@ -945,7 +946,7 @@ bool NativeModule::SetExecutable(bool executable) { ...@@ -945,7 +946,7 @@ bool NativeModule::SetExecutable(bool executable) {
// use. On 32-bit though, the scarce resource is the address space - // use. On 32-bit though, the scarce resource is the address space -
// committed or not. // committed or not.
if (can_request_more_memory_) { if (can_request_more_memory_) {
for (auto& vmem : owned_memory_) { for (auto& vmem : owned_code_space_) {
if (!SetPermissions(vmem.address(), vmem.size(), permission)) { if (!SetPermissions(vmem.address(), vmem.size(), permission)) {
return false; return false;
} }
...@@ -956,8 +957,8 @@ bool NativeModule::SetExecutable(bool executable) { ...@@ -956,8 +957,8 @@ bool NativeModule::SetExecutable(bool executable) {
return true; return true;
} }
#endif #endif
for (auto& range : allocated_memory_.ranges()) { for (auto& range : allocated_code_space_.ranges()) {
// allocated_memory_ is fine-grained, so we need to // allocated_code_space_ is fine-grained, so we need to
// page-align it. // page-align it.
size_t range_size = RoundUp( size_t range_size = RoundUp(
static_cast<size_t>(range.second - range.first), AllocatePageSize()); static_cast<size_t>(range.second - range.first), AllocatePageSize());
...@@ -973,25 +974,25 @@ bool NativeModule::SetExecutable(bool executable) { ...@@ -973,25 +974,25 @@ bool NativeModule::SetExecutable(bool executable) {
return true; return true;
} }
void WasmCodeManager::FreeNativeModuleMemories(NativeModule* native_module) { void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
DCHECK_GE(active_, 1); DCHECK_GE(active_, 1);
--active_; --active_;
TRACE_HEAP("Freeing %zu\n", native_module->instance_id); TRACE_HEAP("Freeing %zu\n", native_module->instance_id);
for (auto& vmem : native_module->owned_memory_) { for (auto& vmem : native_module->owned_code_space_) {
lookup_map_.erase(vmem.address()); lookup_map_.erase(vmem.address());
Free(&vmem); Free(&vmem);
DCHECK(!vmem.IsReserved()); DCHECK(!vmem.IsReserved());
} }
native_module->owned_memory_.clear(); native_module->owned_code_space_.clear();
// No need to tell the GC anything if we're destroying the heap, // No need to tell the GC anything if we're destroying the heap,
// which we currently indicate by having the isolate_ as null // which we currently indicate by having the isolate_ as null
if (isolate_ == nullptr) return; if (isolate_ == nullptr) return;
size_t freed_mem = native_module->committed_memory_; size_t freed_code_space = native_module->committed_code_space_;
DCHECK(IsAligned(freed_mem, AllocatePageSize())); DCHECK(IsAligned(freed_code_space, AllocatePageSize()));
remaining_uncommitted_.fetch_add(freed_mem); remaining_uncommitted_code_space_.fetch_add(freed_code_space);
isolate_->AdjustAmountOfExternalAllocatedMemory( isolate_->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(freed_mem)); -static_cast<int64_t>(freed_code_space));
} }
// TODO(wasm): We can make this more efficient if needed. For // TODO(wasm): We can make this more efficient if needed. For
...@@ -1031,8 +1032,8 @@ void WasmCodeManager::Free(VirtualMemory* mem) { ...@@ -1031,8 +1032,8 @@ void WasmCodeManager::Free(VirtualMemory* mem) {
TRACE_HEAP("VMem Release: %p:%p (%zu)\n", start, end, size); TRACE_HEAP("VMem Release: %p:%p (%zu)\n", start, end, size);
} }
size_t WasmCodeManager::remaining_uncommitted() const { size_t WasmCodeManager::remaining_uncommitted_code_space() const {
return remaining_uncommitted_.load(); return remaining_uncommitted_code_space_.load();
} }
NativeModuleModificationScope::NativeModuleModificationScope( NativeModuleModificationScope::NativeModuleModificationScope(
......
...@@ -290,7 +290,6 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -290,7 +290,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
uint32_t num_imported_functions() const { return num_imported_functions_; } uint32_t num_imported_functions() const { return num_imported_functions_; }
const std::vector<WasmCode*>& code_table() const { return code_table_; } const std::vector<WasmCode*>& code_table() const { return code_table_; }
size_t committed_memory() const { return committed_memory_; }
bool use_trap_handler() const { return use_trap_handler_; } bool use_trap_handler() const { return use_trap_handler_; }
void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; } void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
bool lazy_compile_frozen() const { return lazy_compile_frozen_; } bool lazy_compile_frozen() const { return lazy_compile_frozen_; }
...@@ -306,7 +305,7 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -306,7 +305,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
static base::AtomicNumber<size_t> next_id_; static base::AtomicNumber<size_t> next_id_;
NativeModule(uint32_t num_functions, uint32_t num_imports, NativeModule(uint32_t num_functions, uint32_t num_imports,
bool can_request_more, VirtualMemory* vmem, bool can_request_more, VirtualMemory* code_space,
WasmCodeManager* code_manager, ModuleEnv& env); WasmCodeManager* code_manager, ModuleEnv& env);
WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind); WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind);
...@@ -350,12 +349,13 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -350,12 +349,13 @@ class V8_EXPORT_PRIVATE NativeModule final {
// when the phantom reference is cleared. // when the phantom reference is cleared.
WasmSharedModuleData** shared_module_data_ = nullptr; WasmSharedModuleData** shared_module_data_ = nullptr;
DisjointAllocationPool free_memory_; DisjointAllocationPool free_code_space_;
DisjointAllocationPool allocated_memory_; DisjointAllocationPool allocated_code_space_;
std::list<VirtualMemory> owned_memory_; std::list<VirtualMemory> owned_code_space_;
WasmCodeManager* wasm_code_manager_; WasmCodeManager* wasm_code_manager_;
base::Mutex allocation_mutex_; base::Mutex allocation_mutex_;
size_t committed_memory_ = 0; size_t committed_code_space_ = 0;
int modification_scope_depth_ = 0; int modification_scope_depth_ = 0;
bool can_request_more_memory_; bool can_request_more_memory_;
bool use_trap_handler_; bool use_trap_handler_;
...@@ -386,7 +386,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final { ...@@ -386,7 +386,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
WasmCode* LookupCode(Address pc) const; WasmCode* LookupCode(Address pc) const;
WasmCode* GetCodeFromStartAddress(Address pc) const; WasmCode* GetCodeFromStartAddress(Address pc) const;
size_t remaining_uncommitted() const; size_t remaining_uncommitted_code_space() const;
private: private:
friend class NativeModule; friend class NativeModule;
...@@ -394,10 +394,10 @@ class V8_EXPORT_PRIVATE WasmCodeManager final { ...@@ -394,10 +394,10 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
void TryAllocate(size_t size, VirtualMemory*, void* hint = nullptr); void TryAllocate(size_t size, VirtualMemory*, void* hint = nullptr);
bool Commit(Address, size_t); bool Commit(Address, size_t);
// Currently, we uncommit a whole module, so all we need is account // Currently, we uncommit a whole module, so all we need is account
// for the freed memory size. We do that in FreeNativeModuleMemories. // for the freed memory size. We do that in FreeNativeModule.
// There's no separate Uncommit. // There's no separate Uncommit.
void FreeNativeModuleMemories(NativeModule*); void FreeNativeModule(NativeModule*);
void Free(VirtualMemory* mem); void Free(VirtualMemory* mem);
void AssignRanges(Address start, Address end, NativeModule*); void AssignRanges(Address start, Address end, NativeModule*);
size_t GetAllocationChunk(const WasmModule& module); size_t GetAllocationChunk(const WasmModule& module);
...@@ -407,7 +407,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final { ...@@ -407,7 +407,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
// Count of NativeModules not yet collected. Helps determine if it's // Count of NativeModules not yet collected. Helps determine if it's
// worth requesting a GC on memory pressure. // worth requesting a GC on memory pressure.
size_t active_ = 0; size_t active_ = 0;
std::atomic<size_t> remaining_uncommitted_; std::atomic<size_t> remaining_uncommitted_code_space_;
// TODO(mtrofin): remove the dependency on isolate. // TODO(mtrofin): remove the dependency on isolate.
v8::Isolate* isolate_; v8::Isolate* isolate_;
......
...@@ -207,7 +207,7 @@ INSTANTIATE_TEST_CASE_P(Parameterized, WasmCodeManagerTest, ...@@ -207,7 +207,7 @@ INSTANTIATE_TEST_CASE_P(Parameterized, WasmCodeManagerTest,
TEST_P(WasmCodeManagerTest, EmptyCase) { TEST_P(WasmCodeManagerTest, EmptyCase) {
WasmCodeManager manager(v8_isolate(), 0 * page()); WasmCodeManager manager(v8_isolate(), 0 * page());
CHECK_EQ(0, manager.remaining_uncommitted()); CHECK_EQ(0, manager.remaining_uncommitted_code_space());
NativeModulePtr native_module = AllocModule(&manager, 1 * page(), GetParam()); NativeModulePtr native_module = AllocModule(&manager, 1 * page(), GetParam());
CHECK(native_module); CHECK(native_module);
...@@ -217,22 +217,22 @@ TEST_P(WasmCodeManagerTest, EmptyCase) { ...@@ -217,22 +217,22 @@ TEST_P(WasmCodeManagerTest, EmptyCase) {
TEST_P(WasmCodeManagerTest, AllocateAndGoOverLimit) { TEST_P(WasmCodeManagerTest, AllocateAndGoOverLimit) {
WasmCodeManager manager(v8_isolate(), 1 * page()); WasmCodeManager manager(v8_isolate(), 1 * page());
CHECK_EQ(1 * page(), manager.remaining_uncommitted()); CHECK_EQ(1 * page(), manager.remaining_uncommitted_code_space());
NativeModulePtr native_module = AllocModule(&manager, 1 * page(), GetParam()); NativeModulePtr native_module = AllocModule(&manager, 1 * page(), GetParam());
CHECK(native_module); CHECK(native_module);
CHECK_EQ(1 * page(), manager.remaining_uncommitted()); CHECK_EQ(1 * page(), manager.remaining_uncommitted_code_space());
uint32_t index = 0; uint32_t index = 0;
WasmCode* code = AddCode(native_module.get(), index++, 1 * kCodeAlignment); WasmCode* code = AddCode(native_module.get(), index++, 1 * kCodeAlignment);
CHECK_NOT_NULL(code); CHECK_NOT_NULL(code);
CHECK_EQ(0, manager.remaining_uncommitted()); CHECK_EQ(0, manager.remaining_uncommitted_code_space());
code = AddCode(native_module.get(), index++, 3 * kCodeAlignment); code = AddCode(native_module.get(), index++, 3 * kCodeAlignment);
CHECK_NOT_NULL(code); CHECK_NOT_NULL(code);
CHECK_EQ(0, manager.remaining_uncommitted()); CHECK_EQ(0, manager.remaining_uncommitted_code_space());
code = AddCode(native_module.get(), index++, page() - 4 * kCodeAlignment); code = AddCode(native_module.get(), index++, page() - 4 * kCodeAlignment);
CHECK_NOT_NULL(code); CHECK_NOT_NULL(code);
CHECK_EQ(0, manager.remaining_uncommitted()); CHECK_EQ(0, manager.remaining_uncommitted_code_space());
ASSERT_DEATH_IF_SUPPORTED( ASSERT_DEATH_IF_SUPPORTED(
AddCode(native_module.get(), index++, 1 * kCodeAlignment), AddCode(native_module.get(), index++, 1 * kCodeAlignment),
...@@ -260,7 +260,7 @@ TEST_P(WasmCodeManagerTest, DifferentHeapsApplyLimitsIndependently) { ...@@ -260,7 +260,7 @@ TEST_P(WasmCodeManagerTest, DifferentHeapsApplyLimitsIndependently) {
CHECK(nm2); CHECK(nm2);
WasmCode* code = AddCode(nm1.get(), 0, 1 * page()); WasmCode* code = AddCode(nm1.get(), 0, 1 * page());
CHECK_NOT_NULL(code); CHECK_NOT_NULL(code);
CHECK_EQ(0, manager1.remaining_uncommitted()); CHECK_EQ(0, manager1.remaining_uncommitted_code_space());
code = AddCode(nm2.get(), 0, 1 * page()); code = AddCode(nm2.get(), 0, 1 * page());
CHECK_NOT_NULL(code); CHECK_NOT_NULL(code);
} }
...@@ -273,7 +273,7 @@ TEST_P(WasmCodeManagerTest, GrowingVsFixedModule) { ...@@ -273,7 +273,7 @@ TEST_P(WasmCodeManagerTest, GrowingVsFixedModule) {
"OOM in NativeModule::AddOwnedCode"); "OOM in NativeModule::AddOwnedCode");
} else { } else {
CHECK_NOT_NULL(AddCode(nm.get(), 0, 1 * page() + kCodeAlignment)); CHECK_NOT_NULL(AddCode(nm.get(), 0, 1 * page() + kCodeAlignment));
CHECK_EQ(manager.remaining_uncommitted(), 1 * page()); CHECK_EQ(manager.remaining_uncommitted_code_space(), 1 * page());
} }
} }
...@@ -282,13 +282,13 @@ TEST_P(WasmCodeManagerTest, CommitIncrements) { ...@@ -282,13 +282,13 @@ TEST_P(WasmCodeManagerTest, CommitIncrements) {
NativeModulePtr nm = AllocModule(&manager, 3 * page(), GetParam()); NativeModulePtr nm = AllocModule(&manager, 3 * page(), GetParam());
WasmCode* code = AddCode(nm.get(), 0, kCodeAlignment); WasmCode* code = AddCode(nm.get(), 0, kCodeAlignment);
CHECK_NOT_NULL(code); CHECK_NOT_NULL(code);
CHECK_EQ(manager.remaining_uncommitted(), 9 * page()); CHECK_EQ(manager.remaining_uncommitted_code_space(), 9 * page());
code = AddCode(nm.get(), 1, 2 * page()); code = AddCode(nm.get(), 1, 2 * page());
CHECK_NOT_NULL(code); CHECK_NOT_NULL(code);
CHECK_EQ(manager.remaining_uncommitted(), 7 * page()); CHECK_EQ(manager.remaining_uncommitted_code_space(), 7 * page());
code = AddCode(nm.get(), 2, page() - kCodeAlignment); code = AddCode(nm.get(), 2, page() - kCodeAlignment);
CHECK_NOT_NULL(code); CHECK_NOT_NULL(code);
CHECK_EQ(manager.remaining_uncommitted(), 7 * page()); CHECK_EQ(manager.remaining_uncommitted_code_space(), 7 * page());
} }
TEST_P(WasmCodeManagerTest, Lookup) { TEST_P(WasmCodeManagerTest, Lookup) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment