Commit 26d0d95e authored by Ben L. Titzer's avatar Ben L. Titzer Committed by Commit Bot

[wasm] Add size estimates for managed objects

This CL estimates the sizes of the important managed objects in WASM:
the decoded module {WasmModule}, the native module that contains code
{NativeModule}, and the natively-allocated indirect and import tables
{WasmInstanceNativeAllocations}.

Since Managed<T> updates the isolate's external allocated memory,
it is no longer necessary to do so upon committing or releasing a
native module's memory.

R=mstarzinger@chromium.org
CC=ulan@chromium.org

Bug: v8:7424
Change-Id: Iff4e07d0d328383a925febd654ccbfc95f0930e9
Reviewed-on: https://chromium-review.googlesource.com/1079067
Commit-Queue: Ben Titzer <titzer@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53675}
parent dffc2a5a
...@@ -1370,7 +1370,7 @@ MaybeHandle<WasmModuleObject> CompileToModuleObjectInternal( ...@@ -1370,7 +1370,7 @@ MaybeHandle<WasmModuleObject> CompileToModuleObjectInternal(
// The {managed_module} will take ownership of the {WasmModule} object, // The {managed_module} will take ownership of the {WasmModule} object,
// and it will be destroyed when the GC reclaims the wrapper object. // and it will be destroyed when the GC reclaims the wrapper object.
size_t module_size = 0; // TODO(titzer): estimate size of decoded module. size_t module_size = EstimateWasmModuleSize(module.get());
Handle<Managed<WasmModule>> managed_module = Handle<Managed<WasmModule>> managed_module =
Managed<WasmModule>::FromUniquePtr(isolate, module_size, Managed<WasmModule>::FromUniquePtr(isolate, module_size,
std::move(module)); std::move(module));
...@@ -2901,7 +2901,7 @@ void AsyncCompileJob::FinishCompile() { ...@@ -2901,7 +2901,7 @@ void AsyncCompileJob::FinishCompile() {
// The {managed_module} will take ownership of the {WasmModule} object, // The {managed_module} will take ownership of the {WasmModule} object,
// and it will be destroyed when the GC reclaims the wrapper object. // and it will be destroyed when the GC reclaims the wrapper object.
size_t module_size = 0; // TODO(titzer): estimate size of decoded module. size_t module_size = EstimateWasmModuleSize(module_.get());
Handle<Managed<WasmModule>> managed_module = Handle<Managed<WasmModule>> managed_module =
Managed<WasmModule>::FromUniquePtr(isolate_, module_size, Managed<WasmModule>::FromUniquePtr(isolate_, module_size,
std::move(module_)); std::move(module_));
......
...@@ -885,8 +885,6 @@ bool WasmCodeManager::Commit(Address start, size_t size) { ...@@ -885,8 +885,6 @@ bool WasmCodeManager::Commit(Address start, size_t size) {
remaining_uncommitted_code_space_.fetch_add(size); remaining_uncommitted_code_space_.fetch_add(size);
return false; return false;
} }
// This API assumes main thread
isolate_->AdjustAmountOfExternalAllocatedMemory(size);
if (WouldGCHelp()) { if (WouldGCHelp()) {
// This API does not assume main thread, and would schedule // This API does not assume main thread, and would schedule
// a GC if called from a different thread, instead of synchronously // a GC if called from a different thread, instead of synchronously
...@@ -927,34 +925,39 @@ void WasmCodeManager::TryAllocate(size_t size, VirtualMemory* ret, void* hint) { ...@@ -927,34 +925,39 @@ void WasmCodeManager::TryAllocate(size_t size, VirtualMemory* ret, void* hint) {
reinterpret_cast<void*>(ret->end()), ret->size()); reinterpret_cast<void*>(ret->end()), ret->size());
} }
size_t WasmCodeManager::GetAllocationChunk(const WasmModule& module) { size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) {
// TODO(mtrofin): this should pick up its 'maximal code range size' constexpr size_t kCodeSizeMultiplier = 4;
// from something embedder-provided constexpr size_t kImportSize = 32 * kPointerSize;
if (kRequiresCodeRange) return kMaxWasmCodeMemory;
DCHECK(kModuleCanAllocateMoreMemory); size_t estimate =
size_t ret = AllocatePageSize(); AllocatePageSize() /* TODO(titzer): 1 page spot bonus */ +
// a ballpark guesstimate on native inflation factor. sizeof(NativeModule) +
constexpr size_t kMultiplier = 4; (sizeof(WasmCode*) * module->functions.size() /* code table size */) +
(sizeof(WasmCode) * module->functions.size() /* code object size */) +
for (auto& function : module.functions) { (kImportSize * module->num_imported_functions /* import size */);
ret += kMultiplier * function.code.length();
for (auto& function : module->functions) {
estimate += kCodeSizeMultiplier * function.code.length();
} }
return ret;
return estimate;
} }
std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule( std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
const WasmModule& module, ModuleEnv& env) { const WasmModule& module, ModuleEnv& env) {
size_t code_size = GetAllocationChunk(module); size_t memory_estimate = EstimateNativeModuleSize(&module);
return NewNativeModule( return NewNativeModule(
code_size, static_cast<uint32_t>(module.functions.size()), memory_estimate, static_cast<uint32_t>(module.functions.size()),
module.num_imported_functions, kModuleCanAllocateMoreMemory, env); module.num_imported_functions, kModuleCanAllocateMoreMemory, env);
} }
std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule( std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
size_t size_estimate, uint32_t num_functions, size_t memory_estimate, uint32_t num_functions,
uint32_t num_imported_functions, bool can_request_more, ModuleEnv& env) { uint32_t num_imported_functions, bool can_request_more, ModuleEnv& env) {
VirtualMemory mem; VirtualMemory mem;
TryAllocate(size_estimate, &mem); // If the code must be contiguous, reserve enough address space up front.
size_t vmem_size = kRequiresCodeRange ? kMaxWasmCodeMemory : memory_estimate;
TryAllocate(vmem_size, &mem);
if (mem.IsReserved()) { if (mem.IsReserved()) {
Address start = mem.address(); Address start = mem.address();
size_t size = mem.size(); size_t size = mem.size();
...@@ -1041,8 +1044,6 @@ void WasmCodeManager::FreeNativeModule(NativeModule* native_module) { ...@@ -1041,8 +1044,6 @@ void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
// which we currently indicate by having the isolate_ as null // which we currently indicate by having the isolate_ as null
if (isolate_ == nullptr) return; if (isolate_ == nullptr) return;
remaining_uncommitted_code_space_.fetch_add(code_size); remaining_uncommitted_code_space_.fetch_add(code_size);
isolate_->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(code_size));
} }
// TODO(wasm): We can make this more efficient if needed. For // TODO(wasm): We can make this more efficient if needed. For
......
...@@ -439,6 +439,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final { ...@@ -439,6 +439,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
void SetModuleCodeSizeHistogram(Histogram* histogram) { void SetModuleCodeSizeHistogram(Histogram* histogram) {
module_code_size_mb_ = histogram; module_code_size_mb_ = histogram;
} }
static size_t EstimateNativeModuleSize(const WasmModule* module);
private: private:
friend class NativeModule; friend class NativeModule;
...@@ -452,7 +453,6 @@ class V8_EXPORT_PRIVATE WasmCodeManager final { ...@@ -452,7 +453,6 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
void FreeNativeModule(NativeModule*); void FreeNativeModule(NativeModule*);
void Free(VirtualMemory* mem); void Free(VirtualMemory* mem);
void AssignRanges(Address start, Address end, NativeModule*); void AssignRanges(Address start, Address end, NativeModule*);
size_t GetAllocationChunk(const WasmModule& module);
bool WouldGCHelp() const; bool WouldGCHelp() const;
std::map<Address, std::pair<Address, NativeModule*>> lookup_map_; std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
......
...@@ -341,6 +341,24 @@ Handle<FixedArray> DecodeLocalNames(Isolate* isolate, ...@@ -341,6 +341,24 @@ Handle<FixedArray> DecodeLocalNames(Isolate* isolate,
} }
return locals_names; return locals_names;
} }
namespace {
template <typename T>
inline size_t VectorSize(const std::vector<T>& vector) {
return sizeof(T) * vector.size();
}
} // namespace
size_t EstimateWasmModuleSize(const WasmModule* module) {
size_t estimate =
sizeof(WasmModule) + VectorSize(module->signatures) +
VectorSize(module->signature_ids) + VectorSize(module->functions) +
VectorSize(module->data_segments) + VectorSize(module->function_tables) +
VectorSize(module->import_table) + VectorSize(module->export_table) +
VectorSize(module->exceptions) + VectorSize(module->table_inits);
// TODO(wasm): include names table and wire bytes in size estimate
return estimate;
}
} // namespace wasm } // namespace wasm
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -180,6 +180,8 @@ struct V8_EXPORT_PRIVATE WasmModule { ...@@ -180,6 +180,8 @@ struct V8_EXPORT_PRIVATE WasmModule {
void AddNameForTesting(int function_index, WireBytesRef name); void AddNameForTesting(int function_index, WireBytesRef name);
}; };
size_t EstimateWasmModuleSize(const WasmModule* module);
// Interface to the storage (wire bytes) of a wasm module. // Interface to the storage (wire bytes) of a wasm module.
// It is illegal for anyone receiving a ModuleWireBytes to store pointers based // It is illegal for anyone receiving a ModuleWireBytes to store pointers based
// on module_bytes, as this storage is only guaranteed to be alive as long as // on module_bytes, as this storage is only guaranteed to be alive as long as
......
...@@ -127,6 +127,16 @@ class WasmInstanceNativeAllocations { ...@@ -127,6 +127,16 @@ class WasmInstanceNativeAllocations {
#undef SET #undef SET
}; };
size_t EstimateNativeAllocationsSize(const WasmModule* module) {
size_t estimate = sizeof(WasmInstanceNativeAllocations) +
(1 * kPointerSize * module->num_imported_mutable_globals) +
(2 * kPointerSize * module->num_imported_functions);
for (auto& table : module->function_tables) {
estimate += 3 * kPointerSize * table.initial_size;
}
return estimate;
}
WasmInstanceNativeAllocations* GetNativeAllocations( WasmInstanceNativeAllocations* GetNativeAllocations(
WasmInstanceObject* instance) { WasmInstanceObject* instance) {
return reinterpret_cast<Managed<WasmInstanceNativeAllocations>*>( return reinterpret_cast<Managed<WasmInstanceNativeAllocations>*>(
...@@ -811,11 +821,10 @@ Handle<WasmInstanceObject> WasmInstanceObject::New( ...@@ -811,11 +821,10 @@ Handle<WasmInstanceObject> WasmInstanceObject::New(
reinterpret_cast<WasmInstanceObject*>(*instance_object), isolate); reinterpret_cast<WasmInstanceObject*>(*instance_object), isolate);
// Initialize the imported function arrays. // Initialize the imported function arrays.
auto num_imported_functions = auto module = module_object->shared()->module();
module_object->shared()->module()->num_imported_functions; auto num_imported_functions = module->num_imported_functions;
auto num_imported_mutable_globals = auto num_imported_mutable_globals = module->num_imported_mutable_globals;
module_object->shared()->module()->num_imported_mutable_globals; size_t native_allocations_size = EstimateNativeAllocationsSize(module);
size_t native_allocations_size = 0; // TODO(titzer): estimate properly.
auto native_allocations = Managed<WasmInstanceNativeAllocations>::Allocate( auto native_allocations = Managed<WasmInstanceNativeAllocations>::Allocate(
isolate, native_allocations_size, instance, num_imported_functions, isolate, native_allocations_size, instance, num_imported_functions,
num_imported_mutable_globals); num_imported_mutable_globals);
...@@ -1388,12 +1397,13 @@ Handle<WasmCompiledModule> WasmCompiledModule::New(Isolate* isolate, ...@@ -1388,12 +1397,13 @@ Handle<WasmCompiledModule> WasmCompiledModule::New(Isolate* isolate,
isolate->factory()->NewStruct(WASM_COMPILED_MODULE_TYPE, TENURED)); isolate->factory()->NewStruct(WASM_COMPILED_MODULE_TYPE, TENURED));
compiled_module->set_weak_owning_instance(isolate->heap()->empty_weak_cell()); compiled_module->set_weak_owning_instance(isolate->heap()->empty_weak_cell());
{ {
size_t memory_estimate =
isolate->wasm_engine()->code_manager()->EstimateNativeModuleSize(
module);
auto native_module = auto native_module =
isolate->wasm_engine()->code_manager()->NewNativeModule(*module, env); isolate->wasm_engine()->code_manager()->NewNativeModule(*module, env);
size_t native_module_size =
0; // TODO(titzer): estimate native module size.
Handle<Foreign> native_module_wrapper = Handle<Foreign> native_module_wrapper =
Managed<wasm::NativeModule>::FromUniquePtr(isolate, native_module_size, Managed<wasm::NativeModule>::FromUniquePtr(isolate, memory_estimate,
std::move(native_module)); std::move(native_module));
compiled_module->set_native_module(*native_module_wrapper); compiled_module->set_native_module(*native_module_wrapper);
} }
......
...@@ -591,8 +591,8 @@ MaybeHandle<WasmModuleObject> DeserializeNativeModule( ...@@ -591,8 +591,8 @@ MaybeHandle<WasmModuleObject> DeserializeNativeModule(
.ToHandleChecked(); .ToHandleChecked();
DCHECK(module_bytes->IsSeqOneByteString()); DCHECK(module_bytes->IsSeqOneByteString());
// The {managed_module} will take ownership of the {WasmModule} object, // The {managed_module} will take ownership of the {WasmModule} object,
// and it will be destroyed when the GC reclaims the wrapper object. // and it will be destroyed when the GC reclaims it.
size_t module_size = 0; // TODO(titzer): estimate size properly. size_t module_size = EstimateWasmModuleSize(decode_result.val.get());
Handle<Managed<WasmModule>> managed_module = Handle<Managed<WasmModule>> managed_module =
Managed<WasmModule>::FromUniquePtr(isolate, module_size, Managed<WasmModule>::FromUniquePtr(isolate, module_size,
std::move(decode_result.val)); std::move(decode_result.val));
......
...@@ -255,7 +255,7 @@ TEST_P(WasmCodeManagerTest, GrowingVsFixedModule) { ...@@ -255,7 +255,7 @@ TEST_P(WasmCodeManagerTest, GrowingVsFixedModule) {
WasmCodeManager manager(v8_isolate(), 3 * page()); WasmCodeManager manager(v8_isolate(), 3 * page());
NativeModulePtr nm = AllocModule(&manager, 1 * page(), GetParam()); NativeModulePtr nm = AllocModule(&manager, 1 * page(), GetParam());
if (GetParam() == Fixed) { if (GetParam() == Fixed) {
ASSERT_DEATH_IF_SUPPORTED(AddCode(nm.get(), 0, 1 * page() + kCodeAlignment), ASSERT_DEATH_IF_SUPPORTED(AddCode(nm.get(), 0, kMaxWasmCodeMemory + 1),
"OOM in NativeModule::AddOwnedCode"); "OOM in NativeModule::AddOwnedCode");
} else { } else {
CHECK_NOT_NULL(AddCode(nm.get(), 0, 1 * page() + kCodeAlignment)); CHECK_NOT_NULL(AddCode(nm.get(), 0, 1 * page() + kCodeAlignment));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment