Commit a1ff298d authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Move Isolate management to WasmEngine

The WasmCodeManager held a list of all Isolates that use the
WasmEngine/WasmCodeManager (those two are 1:1).
Since we want to move all isolate-specific tasks (like code logging and
compilation callbacks) to the WasmEngine, this CL moves this management
from the WasmCodeManager to the WasmEngine. We now have a bidirectional
mapping from NativeModules to the Isolates that use them, and from an
Isolate to all the NativeModules it uses (n:n).
The IsolateData struct will be extended in follow-up CLs to hold things
like the ForegroundTaskRunner. The Isolate* in the NativeModule /
CompilationState will eventually be removed.

R=mstarzinger@chromium.org

Bug: v8:8689
Change-Id: Ic2c003c3949f73ce3264dd9dac96884a5c0b9896
Reviewed-on: https://chromium-review.googlesource.com/c/1433793
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#59092}
parent 58078730
......@@ -2592,7 +2592,6 @@ void Isolate::SetWasmEngine(std::shared_ptr<wasm::WasmEngine> engine) {
DCHECK_NULL(wasm_engine_); // Only call once before {Init}.
wasm_engine_ = std::move(engine);
wasm_engine_->AddIsolate(this);
wasm::WasmCodeManager::InstallSamplingGCCallback(this);
}
// NOLINTNEXTLINE
......
......@@ -907,7 +907,7 @@ std::unique_ptr<NativeModule> CompileToNativeModule(
wasm::WasmCodeManager::EstimateNativeModuleCodeSize(module.get());
// Create a new {NativeModule} first.
auto native_module = isolate->wasm_engine()->code_manager()->NewNativeModule(
auto native_module = isolate->wasm_engine()->NewNativeModule(
isolate, enabled, code_size_estimate,
wasm::NativeModule::kCanAllocateMoreMemory, std::move(module));
native_module->SetWireBytes(std::move(wire_bytes_copy));
......@@ -1046,7 +1046,7 @@ void AsyncCompileJob::CreateNativeModule(
size_t code_size_estimate =
wasm::WasmCodeManager::EstimateNativeModuleCodeSize(module.get());
native_module_ = isolate_->wasm_engine()->code_manager()->NewNativeModule(
native_module_ = isolate_->wasm_engine()->NewNativeModule(
isolate_, enabled_features_, code_size_estimate,
wasm::NativeModule::kCanAllocateMoreMemory, std::move(module));
native_module_->SetWireBytes({std::move(bytes_copy_), wire_bytes_.length()});
......
......@@ -380,9 +380,9 @@ WasmCode::~WasmCode() {
}
}
NativeModule::NativeModule(Isolate* isolate, const WasmFeatures& enabled,
bool can_request_more, VirtualMemory code_space,
WasmCodeManager* code_manager,
NativeModule::NativeModule(WasmEngine* engine, Isolate* isolate,
const WasmFeatures& enabled, bool can_request_more,
VirtualMemory code_space,
std::shared_ptr<const WasmModule> module)
: enabled_features_(enabled),
module_(std::move(module)),
......@@ -390,7 +390,7 @@ NativeModule::NativeModule(Isolate* isolate, const WasmFeatures& enabled,
import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
new WasmImportWrapperCache(this))),
free_code_space_(code_space.region()),
code_manager_(code_manager),
engine_(engine),
can_request_more_memory_(can_request_more),
use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
: kNoTrapHandler) {
......@@ -733,14 +733,15 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) {
Address hint = owned_code_space_.empty() ? kNullAddress
: owned_code_space_.back().end();
VirtualMemory new_mem =
code_manager_->TryAllocate(size, reinterpret_cast<void*>(hint));
VirtualMemory new_mem = engine_->code_manager()->TryAllocate(
size, reinterpret_cast<void*>(hint));
if (!new_mem.IsReserved()) {
V8::FatalProcessOutOfMemory(nullptr,
"NativeModule::AllocateForCode reservation");
UNREACHABLE();
}
code_manager_->AssignRanges(new_mem.address(), new_mem.end(), this);
engine_->code_manager()->AssignRanges(new_mem.address(), new_mem.end(),
this);
free_code_space_.Merge(new_mem.region());
owned_code_space_.emplace_back(std::move(new_mem));
......@@ -773,7 +774,7 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) {
Address start = std::max(commit_start, vmem.address());
Address end = std::min(commit_end, vmem.end());
size_t commit_size = static_cast<size_t>(end - start);
if (!code_manager_->Commit(start, commit_size)) {
if (!engine_->code_manager()->Commit(start, commit_size)) {
V8::FatalProcessOutOfMemory(nullptr,
"NativeModule::AllocateForCode commit");
UNREACHABLE();
......@@ -785,7 +786,8 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) {
if (commit_start >= commit_end) break;
}
#else
if (!code_manager_->Commit(commit_start, commit_end - commit_start)) {
if (!engine_->code_manager()->Commit(commit_start,
commit_end - commit_start)) {
V8::FatalProcessOutOfMemory(nullptr,
"NativeModule::AllocateForCode commit");
UNREACHABLE();
......@@ -880,7 +882,7 @@ NativeModule::~NativeModule() {
// Cancel all background compilation before resetting any field of the
// NativeModule or freeing anything.
compilation_state_->CancelAndWait();
code_manager_->FreeNativeModule(this);
engine_->FreeNativeModule(this);
}
WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
......@@ -931,13 +933,6 @@ void WasmCodeManager::AssignRanges(Address start, Address end,
lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
}
void WasmCodeManager::AssignRangesAndAddModule(Address start, Address end,
NativeModule* native_module) {
base::MutexGuard lock(&native_modules_mutex_);
lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
native_modules_.emplace(native_module);
}
VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
DCHECK_GT(size, 0);
......@@ -966,36 +961,11 @@ VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
return mem;
}
void WasmCodeManager::SampleModuleSizes(Isolate* isolate) const {
base::MutexGuard lock(&native_modules_mutex_);
for (NativeModule* native_module : native_modules_) {
int code_size =
static_cast<int>(native_module->committed_code_space_.load() / MB);
isolate->counters()->wasm_module_code_size_mb()->AddSample(code_size);
}
}
void WasmCodeManager::SetMaxCommittedMemoryForTesting(size_t limit) {
remaining_uncommitted_code_space_.store(limit);
critical_uncommitted_code_space_.store(limit / 2);
}
namespace {
void ModuleSamplingCallback(v8::Isolate* v8_isolate, v8::GCType type,
v8::GCCallbackFlags flags, void* data) {
Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
isolate->wasm_engine()->code_manager()->SampleModuleSizes(isolate);
}
} // namespace
// static
void WasmCodeManager::InstallSamplingGCCallback(Isolate* isolate) {
isolate->heap()->AddGCEpilogueCallback(ModuleSamplingCallback,
v8::kGCTypeMarkSweepCompact, nullptr);
}
// static
size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
constexpr size_t kCodeSizeMultiplier = 4;
......@@ -1031,8 +1001,9 @@ size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
}
std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
Isolate* isolate, const WasmFeatures& enabled, size_t code_size_estimate,
bool can_request_more, std::shared_ptr<const WasmModule> module) {
WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled,
size_t code_size_estimate, bool can_request_more,
std::shared_ptr<const WasmModule> module) {
DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
if (remaining_uncommitted_code_space_.load() <
critical_uncommitted_code_space_.load()) {
......@@ -1065,12 +1036,13 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
Address start = code_space.address();
size_t size = code_space.size();
Address end = code_space.end();
std::unique_ptr<NativeModule> ret(new NativeModule(
isolate, enabled, can_request_more, std::move(code_space),
isolate->wasm_engine()->code_manager(), std::move(module)));
std::unique_ptr<NativeModule> ret(
new NativeModule(engine, isolate, enabled, can_request_more,
std::move(code_space), std::move(module)));
TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
size);
AssignRangesAndAddModule(start, end, ret.get());
base::MutexGuard lock(&native_modules_mutex_);
lookup_map_.insert(std::make_pair(start, std::make_pair(end, ret.get())));
return ret;
}
......@@ -1125,8 +1097,6 @@ bool NativeModule::SetExecutable(bool executable) {
void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
base::MutexGuard lock(&native_modules_mutex_);
DCHECK_EQ(1, native_modules_.count(native_module));
native_modules_.erase(native_module);
TRACE_HEAP("Freeing NativeModule %p\n", native_module);
for (auto& code_space : native_module->owned_code_space_) {
DCHECK(code_space.IsReserved());
......
......@@ -30,6 +30,7 @@ namespace wasm {
class NativeModule;
class WasmCodeManager;
class WasmEngine;
class WasmMemoryTracker;
class WasmImportWrapperCache;
struct WasmModule;
......@@ -361,9 +362,9 @@ class V8_EXPORT_PRIVATE NativeModule final {
friend class WasmCodeManager;
friend class NativeModuleModificationScope;
NativeModule(Isolate* isolate, const WasmFeatures& enabled_features,
bool can_request_more, VirtualMemory code_space,
WasmCodeManager* code_manager,
NativeModule(WasmEngine* engine, Isolate* isolate,
const WasmFeatures& enabled_features, bool can_request_more,
VirtualMemory code_space,
std::shared_ptr<const WasmModule> module);
WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind,
......@@ -468,7 +469,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
// End of fields protected by {allocation_mutex_}.
//////////////////////////////////////////////////////////////////////////////
WasmCodeManager* const code_manager_;
WasmEngine* const engine_;
std::atomic<size_t> committed_code_space_{0};
int modification_scope_depth_ = 0;
bool can_request_more_memory_;
......@@ -483,36 +484,24 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
public:
explicit WasmCodeManager(WasmMemoryTracker* memory_tracker,
size_t max_committed);
// Create a new NativeModule. The caller is responsible for its
// lifetime. The native module will be given some memory for code,
// which will be page size aligned. The size of the initial memory
// is determined with a heuristic based on the total size of wasm
// code. The native module may later request more memory.
// TODO(titzer): isolate is only required here for CompilationState.
std::unique_ptr<NativeModule> NewNativeModule(
Isolate* isolate, const WasmFeatures& enabled_features,
size_t code_size_estimate, bool can_request_more,
std::shared_ptr<const WasmModule> module);
NativeModule* LookupNativeModule(Address pc) const;
WasmCode* LookupCode(Address pc) const;
size_t remaining_uncommitted_code_space() const;
// Add a sample of all module sizes.
void SampleModuleSizes(Isolate* isolate) const;
void SetMaxCommittedMemoryForTesting(size_t limit);
// TODO(v8:7424): For now we sample module sizes in a GC callback. This will
// bias samples towards apps with high memory pressure. We should switch to
// using sampling based on regular intervals independent of the GC.
static void InstallSamplingGCCallback(Isolate* isolate);
static size_t EstimateNativeModuleCodeSize(const WasmModule* module);
static size_t EstimateNativeModuleNonCodeSize(const WasmModule* module);
private:
friend class NativeModule;
friend class WasmEngine;
std::unique_ptr<NativeModule> NewNativeModule(
WasmEngine* engine, Isolate* isolate,
const WasmFeatures& enabled_features, size_t code_size_estimate,
bool can_request_more, std::shared_ptr<const WasmModule> module);
V8_WARN_UNUSED_RESULT VirtualMemory TryAllocate(size_t size,
void* hint = nullptr);
......@@ -522,8 +511,8 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
// There's no separate Uncommit.
void FreeNativeModule(NativeModule*);
void AssignRanges(Address start, Address end, NativeModule*);
void AssignRangesAndAddModule(Address start, Address end, NativeModule*);
WasmMemoryTracker* const memory_tracker_;
std::atomic<size_t> remaining_uncommitted_code_space_;
......@@ -538,7 +527,6 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
// Protected by {native_modules_mutex_}:
std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
std::unordered_set<NativeModule*> native_modules_;
// End of fields protected by {native_modules_mutex_}.
//////////////////////////////////////////////////////////////////////////////
......
......@@ -6,6 +6,7 @@
#include "src/code-tracer.h"
#include "src/compilation-statistics.h"
#include "src/counters.h"
#include "src/objects-inl.h"
#include "src/objects/heap-number.h"
#include "src/objects/js-promise.h"
......@@ -21,6 +22,12 @@ namespace v8 {
namespace internal {
namespace wasm {
struct WasmEngine::IsolateInfo {
// All native modules that are being used by this Isolate (currently only
// grows, never shrinks).
std::set<NativeModule*> native_modules;
};
WasmEngine::WasmEngine()
: code_manager_(&memory_tracker_, FLAG_wasm_max_code_space * MB) {}
......@@ -31,6 +38,8 @@ WasmEngine::~WasmEngine() {
DCHECK(jobs_.empty());
// All Isolates have been deregistered.
DCHECK(isolates_.empty());
// All NativeModules did die.
DCHECK(isolates_per_native_module_.empty());
}
bool WasmEngine::SyncValidate(Isolate* isolate, const WasmFeatures& enabled,
......@@ -246,16 +255,24 @@ std::shared_ptr<NativeModule> WasmEngine::ExportNativeModule(
}
Handle<WasmModuleObject> WasmEngine::ImportNativeModule(
Isolate* isolate, std::shared_ptr<NativeModule> shared_module) {
ModuleWireBytes wire_bytes(shared_module->wire_bytes());
const WasmModule* module = shared_module->module();
Isolate* isolate, std::shared_ptr<NativeModule> shared_native_module) {
NativeModule* native_module = shared_native_module.get();
ModuleWireBytes wire_bytes(native_module->wire_bytes());
const WasmModule* module = native_module->module();
Handle<Script> script =
CreateWasmScript(isolate, wire_bytes, module->source_map_url);
size_t code_size = shared_module->committed_code_space();
size_t code_size = native_module->committed_code_space();
Handle<WasmModuleObject> module_object = WasmModuleObject::New(
isolate, std::move(shared_module), script, code_size);
CompileJsToWasmWrappers(isolate, module_object->native_module()->module(),
isolate, std::move(shared_native_module), script, code_size);
CompileJsToWasmWrappers(isolate, native_module->module(),
handle(module_object->export_wrappers(), isolate));
{
base::MutexGuard lock(&mutex_);
DCHECK_EQ(1, isolates_.count(isolate));
isolates_[isolate]->native_modules.insert(native_module);
DCHECK_EQ(1, isolates_per_native_module_.count(native_module));
isolates_per_native_module_[native_module].insert(isolate);
}
return module_object;
}
......@@ -315,13 +332,19 @@ bool WasmEngine::HasRunningCompileJob(Isolate* isolate) {
}
void WasmEngine::DeleteCompileJobsOnIsolate(Isolate* isolate) {
base::MutexGuard guard(&mutex_);
DCHECK_EQ(1, isolates_.count(isolate));
for (auto it = jobs_.begin(); it != jobs_.end();) {
if (it->first->isolate() == isolate) {
// Under the mutex get all jobs to delete. Then delete them without holding
// the mutex, such that deletion can reenter the WasmEngine.
std::vector<std::unique_ptr<AsyncCompileJob>> jobs_to_delete;
{
base::MutexGuard guard(&mutex_);
DCHECK_EQ(1, isolates_.count(isolate));
for (auto it = jobs_.begin(); it != jobs_.end();) {
if (it->first->isolate() != isolate) {
++it;
continue;
}
jobs_to_delete.push_back(std::move(it->second));
it = jobs_.erase(it);
} else {
++it;
}
}
}
......@@ -329,13 +352,66 @@ void WasmEngine::DeleteCompileJobsOnIsolate(Isolate* isolate) {
void WasmEngine::AddIsolate(Isolate* isolate) {
base::MutexGuard guard(&mutex_);
DCHECK_EQ(0, isolates_.count(isolate));
isolates_.insert(isolate);
isolates_.emplace(isolate, base::make_unique<IsolateInfo>());
// Install sampling GC callback.
// TODO(v8:7424): For now we sample module sizes in a GC callback. This will
// bias samples towards apps with high memory pressure. We should switch to
// using sampling based on regular intervals independent of the GC.
auto callback = [](v8::Isolate* v8_isolate, v8::GCType type,
v8::GCCallbackFlags flags, void* data) {
Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
WasmEngine* engine = isolate->wasm_engine();
base::MutexGuard lock(&engine->mutex_);
DCHECK_EQ(1, engine->isolates_.count(isolate));
for (NativeModule* native_module :
engine->isolates_[isolate]->native_modules) {
int code_size =
static_cast<int>(native_module->committed_code_space() / MB);
isolate->counters()->wasm_module_code_size_mb()->AddSample(code_size);
}
};
isolate->heap()->AddGCEpilogueCallback(callback, v8::kGCTypeMarkSweepCompact,
nullptr);
}
void WasmEngine::RemoveIsolate(Isolate* isolate) {
base::MutexGuard guard(&mutex_);
auto it = isolates_.find(isolate);
DCHECK_NE(isolates_.end(), it);
for (NativeModule* native_module : it->second->native_modules) {
DCHECK_EQ(1, isolates_per_native_module_[native_module].count(isolate));
isolates_per_native_module_[native_module].erase(isolate);
}
isolates_.erase(it);
}
std::unique_ptr<NativeModule> WasmEngine::NewNativeModule(
Isolate* isolate, const WasmFeatures& enabled, size_t code_size_estimate,
bool can_request_more, std::shared_ptr<const WasmModule> module) {
std::unique_ptr<NativeModule> native_module =
code_manager_.NewNativeModule(this, isolate, enabled, code_size_estimate,
can_request_more, std::move(module));
base::MutexGuard lock(&mutex_);
isolates_per_native_module_[native_module.get()].insert(isolate);
DCHECK_EQ(1, isolates_.count(isolate));
isolates_.erase(isolate);
isolates_[isolate]->native_modules.insert(native_module.get());
return native_module;
}
void WasmEngine::FreeNativeModule(NativeModule* native_module) {
{
base::MutexGuard guard(&mutex_);
auto it = isolates_per_native_module_.find(native_module);
DCHECK_NE(isolates_per_native_module_.end(), it);
for (Isolate* isolate : it->second) {
DCHECK_EQ(1, isolates_.count(isolate));
DCHECK_EQ(1, isolates_[isolate]->native_modules.count(native_module));
isolates_[isolate]->native_modules.erase(native_module);
}
isolates_per_native_module_.erase(it);
}
code_manager_.FreeNativeModule(native_module);
}
namespace {
......
......@@ -147,6 +147,25 @@ class V8_EXPORT_PRIVATE WasmEngine {
void AddIsolate(Isolate* isolate);
void RemoveIsolate(Isolate* isolate);
template <typename T, typename... Args>
std::unique_ptr<T> NewBackgroundCompileTask(Args&&... args) {
return base::make_unique<T>(&background_compile_task_manager_,
std::forward<Args>(args)...);
}
// Create a new NativeModule. The caller is responsible for its
// lifetime. The native module will be given some memory for code,
// which will be page size aligned. The size of the initial memory
// is determined with a heuristic based on the total size of wasm
// code. The native module may later request more memory.
// TODO(titzer): isolate is only required here for CompilationState.
std::unique_ptr<NativeModule> NewNativeModule(
Isolate* isolate, const WasmFeatures& enabled_features,
size_t code_size_estimate, bool can_request_more,
std::shared_ptr<const WasmModule> module);
void FreeNativeModule(NativeModule*);
// Call on process start and exit.
static void InitializeOncePerProcess();
static void GlobalTearDown();
......@@ -155,13 +174,9 @@ class V8_EXPORT_PRIVATE WasmEngine {
// engines this might be a pointer to a new instance or to a shared one.
static std::shared_ptr<WasmEngine> GetWasmEngine();
template <typename T, typename... Args>
std::unique_ptr<T> NewBackgroundCompileTask(Args&&... args) {
return base::make_unique<T>(&background_compile_task_manager_,
std::forward<Args>(args)...);
}
private:
struct IsolateInfo;
AsyncCompileJob* CreateAsyncCompileJob(
Isolate* isolate, const WasmFeatures& enabled,
std::unique_ptr<byte[]> bytes_copy, size_t length,
......@@ -190,8 +205,13 @@ class V8_EXPORT_PRIVATE WasmEngine {
std::unique_ptr<CompilationStatistics> compilation_stats_;
std::unique_ptr<CodeTracer> code_tracer_;
// Set of isolates which use this WasmEngine. Used for cross-isolate GCs.
std::unordered_set<Isolate*> isolates_;
// Set of isolates which use this WasmEngine.
std::unordered_map<Isolate*, std::unique_ptr<IsolateInfo>> isolates_;
// Maps each NativeModule to the set of Isolates that have access to that
// NativeModule. The isolate sets currently only grow, they never shrink.
std::unordered_map<NativeModule*, std::unordered_set<Isolate*>>
isolates_per_native_module_;
// End of fields protected by {mutex_}.
//////////////////////////////////////////////////////////////////////////////
......
......@@ -210,7 +210,7 @@ Handle<WasmModuleObject> WasmModuleObject::New(
// Create a new {NativeModule} first.
size_t code_size_estimate =
wasm::WasmCodeManager::EstimateNativeModuleCodeSize(shared_module.get());
auto native_module = isolate->wasm_engine()->code_manager()->NewNativeModule(
auto native_module = isolate->wasm_engine()->NewNativeModule(
isolate, enabled, code_size_estimate,
wasm::NativeModule::kCanAllocateMoreMemory, std::move(shared_module));
native_module->SetWireBytes(std::move(wire_bytes));
......
......@@ -125,7 +125,7 @@ std::unique_ptr<wasm::NativeModule> AllocateNativeModule(Isolate* isolate,
// We have to add the code object to a NativeModule, because the
// WasmCallDescriptor assumes that code is on the native heap and not
// within a code object.
return isolate->wasm_engine()->code_manager()->NewNativeModule(
return isolate->wasm_engine()->NewNativeModule(
isolate, wasm::kAllWasmFeatures, code_size, false, std::move(module));
}
......
......@@ -18,11 +18,10 @@ namespace wasm {
namespace test_wasm_import_wrapper_cache {
std::unique_ptr<NativeModule> NewModule(Isolate* isolate) {
WasmCodeManager* manager = isolate->wasm_engine()->code_manager();
std::shared_ptr<WasmModule> module(new WasmModule);
bool can_request_more = false;
size_t size = 16384;
auto native_module = manager->NewNativeModule(
auto native_module = isolate->wasm_engine()->NewNativeModule(
isolate, kAllWasmFeatures, size, can_request_more, std::move(module));
native_module->SetRuntimeStubs(isolate);
return native_module;
......
......@@ -142,7 +142,7 @@ std::unique_ptr<wasm::NativeModule> AllocateNativeModule(i::Isolate* isolate,
// We have to add the code object to a NativeModule, because the
// WasmCallDescriptor assumes that code is on the native heap and not
// within a code object.
return isolate->wasm_engine()->code_manager()->NewNativeModule(
return isolate->wasm_engine()->NewNativeModule(
isolate, i::wasm::kAllWasmFeatures, code_size, false, std::move(module));
}
......
......@@ -165,8 +165,8 @@ class WasmCodeManagerTest : public TestWithContext,
std::shared_ptr<WasmModule> module(new WasmModule);
module->num_declared_functions = kNumFunctions;
bool can_request_more = style == Growable;
return manager()->NewNativeModule(i_isolate(), kAllWasmFeatures, size,
can_request_more, std::move(module));
return engine()->NewNativeModule(i_isolate(), kAllWasmFeatures, size,
can_request_more, std::move(module));
}
WasmCode* AddCode(NativeModule* native_module, uint32_t index, size_t size) {
......@@ -181,9 +181,9 @@ class WasmCodeManagerTest : public TestWithContext,
size_t page() const { return AllocatePageSize(); }
WasmCodeManager* manager() {
return i_isolate()->wasm_engine()->code_manager();
}
WasmEngine* engine() { return i_isolate()->wasm_engine(); }
WasmCodeManager* manager() { return engine()->code_manager(); }
void SetMaxCommittedMemory(size_t limit) {
manager()->SetMaxCommittedMemoryForTesting(limit);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment