Commit ad57eec5 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Store WasmModule directly in the NativeModule

Instead of storing both the {NativeModule} and the {WasmModule} in a
{Managed} object, just store the {WasmModule} in the {NativeModule}
directly. This fixes crashes that happen if the {Managed<WasmModule>}
dies before the {Managed<NativeModule>}.

R=mstarzinger@chromium.org

Bug: chromium:854794, v8:7879, v8:7889
Change-Id: I6b11729943fe7a03d225138782655ee5dafd26a6
Reviewed-on: https://chromium-review.googlesource.com/1118171
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54090}
parent 00f3ab17
...@@ -1579,8 +1579,6 @@ void WasmModuleObject::WasmModuleObjectVerify(Isolate* isolate) { ...@@ -1579,8 +1579,6 @@ void WasmModuleObject::WasmModuleObjectVerify(Isolate* isolate) {
CHECK(IsWasmModuleObject()); CHECK(IsWasmModuleObject());
VerifyObjectField(kNativeModuleOffset); VerifyObjectField(kNativeModuleOffset);
VerifyObjectField(kExportWrappersOffset); VerifyObjectField(kExportWrappersOffset);
VerifyObjectField(kManagedModuleOffset);
CHECK(managed_module()->IsForeign());
VerifyObjectField(kScriptOffset); VerifyObjectField(kScriptOffset);
VerifyObjectField(kAsmJsOffsetTableOffset); VerifyObjectField(kAsmJsOffsetTableOffset);
VerifyObjectField(kBreakPointInfosOffset); VerifyObjectField(kBreakPointInfosOffset);
......
...@@ -64,7 +64,7 @@ enum class CompileMode : uint8_t { kRegular, kTiering }; ...@@ -64,7 +64,7 @@ enum class CompileMode : uint8_t { kRegular, kTiering };
// compilation of functions. // compilation of functions.
class CompilationState { class CompilationState {
public: public:
CompilationState(internal::Isolate* isolate, ModuleEnv& env); CompilationState(internal::Isolate*, const ModuleEnv&);
~CompilationState(); ~CompilationState();
// Needs to be set before {AddCompilationUnits} is run, which triggers // Needs to be set before {AddCompilationUnits} is run, which triggers
...@@ -134,6 +134,7 @@ class CompilationState { ...@@ -134,6 +134,7 @@ class CompilationState {
} }
Isolate* const isolate_; Isolate* const isolate_;
// TODO(clemensh): Remove ModuleEnv, generate it when needed.
ModuleEnv module_env_; ModuleEnv module_env_;
const size_t max_memory_; const size_t max_memory_;
const CompileMode compile_mode_; const CompileMode compile_mode_;
...@@ -2846,7 +2847,7 @@ void CompilationStateDeleter::operator()( ...@@ -2846,7 +2847,7 @@ void CompilationStateDeleter::operator()(
} }
std::unique_ptr<CompilationState, CompilationStateDeleter> NewCompilationState( std::unique_ptr<CompilationState, CompilationStateDeleter> NewCompilationState(
Isolate* isolate, ModuleEnv& env) { Isolate* isolate, const ModuleEnv& env) {
return std::unique_ptr<CompilationState, CompilationStateDeleter>( return std::unique_ptr<CompilationState, CompilationStateDeleter>(
new CompilationState(isolate, env)); new CompilationState(isolate, env));
} }
...@@ -2855,13 +2856,12 @@ ModuleEnv* GetModuleEnv(CompilationState* compilation_state) { ...@@ -2855,13 +2856,12 @@ ModuleEnv* GetModuleEnv(CompilationState* compilation_state) {
return compilation_state->module_env(); return compilation_state->module_env();
} }
CompilationState::CompilationState(internal::Isolate* isolate, ModuleEnv& env) CompilationState::CompilationState(internal::Isolate* isolate,
const ModuleEnv& env)
: isolate_(isolate), : isolate_(isolate),
module_env_(env), module_env_(env),
max_memory_(GetMaxUsableMemorySize(isolate) / 2), max_memory_(GetMaxUsableMemorySize(isolate) / 2),
// TODO(clemensh): Fix fuzzers such that {env.module} is always non-null. compile_mode_(FLAG_wasm_tier_up && env.module->origin == kWasmOrigin
compile_mode_(FLAG_wasm_tier_up &&
(!env.module || env.module->origin == kWasmOrigin)
? CompileMode::kTiering ? CompileMode::kTiering
: CompileMode::kRegular), : CompileMode::kRegular),
wire_bytes_(ModuleWireBytes(nullptr, nullptr)), wire_bytes_(ModuleWireBytes(nullptr, nullptr)),
......
...@@ -43,7 +43,7 @@ struct CompilationStateDeleter { ...@@ -43,7 +43,7 @@ struct CompilationStateDeleter {
// Wrapper to create a CompilationState exists in order to avoid having // Wrapper to create a CompilationState exists in order to avoid having
// the the CompilationState in the header file. // the the CompilationState in the header file.
std::unique_ptr<CompilationState, CompilationStateDeleter> NewCompilationState( std::unique_ptr<CompilationState, CompilationStateDeleter> NewCompilationState(
Isolate* isolate, ModuleEnv& env); Isolate* isolate, const ModuleEnv& env);
ModuleEnv* GetModuleEnv(CompilationState* compilation_state); ModuleEnv* GetModuleEnv(CompilationState* compilation_state);
......
...@@ -42,12 +42,6 @@ struct WasmCodeUniquePtrComparator { ...@@ -42,12 +42,6 @@ struct WasmCodeUniquePtrComparator {
} }
}; };
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
constexpr bool kModuleCanAllocateMoreMemory = false;
#else
constexpr bool kModuleCanAllocateMoreMemory = true;
#endif
} // namespace } // namespace
void DisjointAllocationPool::Merge(AddressRange range) { void DisjointAllocationPool::Merge(AddressRange range) {
...@@ -294,24 +288,25 @@ WasmCode::~WasmCode() { ...@@ -294,24 +288,25 @@ WasmCode::~WasmCode() {
} }
} }
NativeModule::NativeModule(Isolate* isolate, uint32_t num_functions, NativeModule::NativeModule(Isolate* isolate, bool can_request_more,
uint32_t num_imported_functions, VirtualMemory* code_space,
bool can_request_more, VirtualMemory* code_space, WasmCodeManager* code_manager,
WasmCodeManager* code_manager, ModuleEnv& env) std::shared_ptr<const WasmModule> module,
: num_functions_(num_functions), const ModuleEnv& env)
num_imported_functions_(num_imported_functions), : module_(std::move(module)),
compilation_state_(NewCompilationState(isolate, env)), compilation_state_(NewCompilationState(isolate, env)),
free_code_space_({code_space->address(), code_space->end()}), free_code_space_({code_space->address(), code_space->end()}),
wasm_code_manager_(code_manager), wasm_code_manager_(code_manager),
can_request_more_memory_(can_request_more), can_request_more_memory_(can_request_more),
use_trap_handler_(env.use_trap_handler) { use_trap_handler_(env.use_trap_handler) {
DCHECK_EQ(module_.get(), env.module);
DCHECK_NOT_NULL(module_);
VirtualMemory my_mem; VirtualMemory my_mem;
owned_code_space_.push_back(my_mem); owned_code_space_.push_back(my_mem);
owned_code_space_.back().TakeControl(code_space); owned_code_space_.back().TakeControl(code_space);
owned_code_.reserve(num_functions); owned_code_.reserve(num_functions());
DCHECK_LE(num_imported_functions, num_functions); uint32_t num_wasm_functions = module_->num_declared_functions;
uint32_t num_wasm_functions = num_functions - num_imported_functions;
if (num_wasm_functions > 0) { if (num_wasm_functions > 0) {
code_table_.reset(new WasmCode*[num_wasm_functions]); code_table_.reset(new WasmCode*[num_wasm_functions]);
memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*)); memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
...@@ -321,20 +316,15 @@ NativeModule::NativeModule(Isolate* isolate, uint32_t num_functions, ...@@ -321,20 +316,15 @@ NativeModule::NativeModule(Isolate* isolate, uint32_t num_functions,
} }
void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) { void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
DCHECK_LE(num_functions_, max_functions); DCHECK_LE(num_functions(), max_functions);
uint32_t num_wasm = num_functions_ - num_imported_functions_; WasmCode** new_table = new WasmCode*[max_functions];
uint32_t max_wasm = max_functions - num_imported_functions_; memset(new_table, 0, max_functions * sizeof(*new_table));
WasmCode** new_table = new WasmCode*[max_wasm]; memcpy(new_table, code_table_.get(),
memset(new_table, 0, max_wasm * sizeof(*new_table)); module_->num_declared_functions * sizeof(*new_table));
memcpy(new_table, code_table_.get(), num_wasm * sizeof(*new_table));
code_table_.reset(new_table); code_table_.reset(new_table);
// Re-allocate jump table. // Re-allocate jump table.
jump_table_ = CreateEmptyJumpTable(max_wasm); jump_table_ = CreateEmptyJumpTable(max_functions);
}
void NativeModule::SetNumFunctionsForTesting(uint32_t num_functions) {
num_functions_ = num_functions;
} }
void NativeModule::LogWasmCodes(Isolate* isolate) { void NativeModule::LogWasmCodes(Isolate* isolate) {
...@@ -394,7 +384,7 @@ WasmCode* NativeModule::AddCodeCopy(Handle<Code> code, WasmCode::Kind kind, ...@@ -394,7 +384,7 @@ WasmCode* NativeModule::AddCodeCopy(Handle<Code> code, WasmCode::Kind kind,
// this NativeModule is a memory leak until the whole NativeModule dies. // this NativeModule is a memory leak until the whole NativeModule dies.
WasmCode* ret = AddAnonymousCode(code, kind); WasmCode* ret = AddAnonymousCode(code, kind);
ret->index_ = Just(index); ret->index_ = Just(index);
if (index >= num_imported_functions_) set_code(index, ret); if (index >= module_->num_imported_functions) set_code(index, ret);
return ret; return ret;
} }
...@@ -406,7 +396,7 @@ WasmCode* NativeModule::AddInterpreterEntry(Handle<Code> code, uint32_t index) { ...@@ -406,7 +396,7 @@ WasmCode* NativeModule::AddInterpreterEntry(Handle<Code> code, uint32_t index) {
} }
void NativeModule::SetLazyBuiltin(Handle<Code> code) { void NativeModule::SetLazyBuiltin(Handle<Code> code) {
uint32_t num_wasm_functions = num_functions_ - num_imported_functions_; uint32_t num_wasm_functions = module_->num_declared_functions;
if (num_wasm_functions == 0) return; if (num_wasm_functions == 0) return;
WasmCode* lazy_builtin = AddAnonymousCode(code, WasmCode::kLazyStub); WasmCode* lazy_builtin = AddAnonymousCode(code, WasmCode::kLazyStub);
// Fill the jump table with jumps to the lazy compile stub. // Fill the jump table with jumps to the lazy compile stub.
...@@ -417,7 +407,7 @@ void NativeModule::SetLazyBuiltin(Handle<Code> code) { ...@@ -417,7 +407,7 @@ void NativeModule::SetLazyBuiltin(Handle<Code> code) {
for (uint32_t i = 0; i < num_wasm_functions; ++i) { for (uint32_t i = 0; i < num_wasm_functions; ++i) {
// Check that the offset in the jump table increases as expected. // Check that the offset in the jump table increases as expected.
DCHECK_EQ(i * JumpTableAssembler::kJumpTableSlotSize, jtasm.pc_offset()); DCHECK_EQ(i * JumpTableAssembler::kJumpTableSlotSize, jtasm.pc_offset());
jtasm.EmitLazyCompileJumpSlot(i + num_imported_functions_, jtasm.EmitLazyCompileJumpSlot(i + module_->num_imported_functions,
lazy_compile_target); lazy_compile_target);
jtasm.NopBytes((i + 1) * JumpTableAssembler::kJumpTableSlotSize - jtasm.NopBytes((i + 1) * JumpTableAssembler::kJumpTableSlotSize -
jtasm.pc_offset()); jtasm.pc_offset());
...@@ -600,8 +590,8 @@ WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t num_wasm_functions) { ...@@ -600,8 +590,8 @@ WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t num_wasm_functions) {
void NativeModule::PatchJumpTable(uint32_t func_index, Address target, void NativeModule::PatchJumpTable(uint32_t func_index, Address target,
WasmCode::FlushICache flush_icache) { WasmCode::FlushICache flush_icache) {
DCHECK_LE(num_imported_functions_, func_index); DCHECK_LE(module_->num_imported_functions, func_index);
uint32_t slot_idx = func_index - num_imported_functions_; uint32_t slot_idx = func_index - module_->num_imported_functions;
Address jump_table_slot = jump_table_->instruction_start() + Address jump_table_slot = jump_table_->instruction_start() +
slot_idx * JumpTableAssembler::kJumpTableSlotSize; slot_idx * JumpTableAssembler::kJumpTableSlotSize;
JumpTableAssembler::PatchJumpTableSlot(jump_table_slot, target, flush_icache); JumpTableAssembler::PatchJumpTableSlot(jump_table_slot, target, flush_icache);
...@@ -695,7 +685,7 @@ Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const { ...@@ -695,7 +685,7 @@ Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
// Return the jump table slot for that function index. // Return the jump table slot for that function index.
DCHECK_NOT_NULL(jump_table_); DCHECK_NOT_NULL(jump_table_);
uint32_t slot_idx = func_index - num_imported_functions_; uint32_t slot_idx = func_index - module_->num_imported_functions;
DCHECK_LT(slot_idx, jump_table_->instructions().size() / DCHECK_LT(slot_idx, jump_table_->instructions().size() /
JumpTableAssembler::kJumpTableSlotSize); JumpTableAssembler::kJumpTableSlotSize);
return jump_table_->instruction_start() + return jump_table_->instruction_start() +
...@@ -707,8 +697,8 @@ uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(Address slot_address) { ...@@ -707,8 +697,8 @@ uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(Address slot_address) {
uint32_t offset = uint32_t offset =
static_cast<uint32_t>(slot_address - jump_table_->instruction_start()); static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
uint32_t slot_idx = offset / JumpTableAssembler::kJumpTableSlotSize; uint32_t slot_idx = offset / JumpTableAssembler::kJumpTableSlotSize;
DCHECK_LT(slot_idx, num_functions_ - num_imported_functions_); DCHECK_LT(slot_idx, module_->num_declared_functions);
return num_imported_functions_ + slot_idx; return module_->num_imported_functions + slot_idx;
} }
void NativeModule::DisableTrapHandler() { void NativeModule::DisableTrapHandler() {
...@@ -718,7 +708,7 @@ void NativeModule::DisableTrapHandler() { ...@@ -718,7 +708,7 @@ void NativeModule::DisableTrapHandler() {
// Clear the code table (just to increase the chances to hit an error if we // Clear the code table (just to increase the chances to hit an error if we
// forget to re-add all code). // forget to re-add all code).
uint32_t num_wasm_functions = num_functions_ - num_imported_functions_; uint32_t num_wasm_functions = module_->num_declared_functions;
memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*)); memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
// TODO(clemensh): Actually free the owned code, such that the memory can be // TODO(clemensh): Actually free the owned code, such that the memory can be
...@@ -790,8 +780,7 @@ size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) { ...@@ -790,8 +780,7 @@ size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) {
constexpr size_t kCodeSizeMultiplier = 4; constexpr size_t kCodeSizeMultiplier = 4;
constexpr size_t kImportSize = 32 * kPointerSize; constexpr size_t kImportSize = 32 * kPointerSize;
uint32_t num_functions = static_cast<uint32_t>(module->functions.size()); uint32_t num_wasm_functions = module->num_declared_functions;
uint32_t num_wasm_functions = num_functions - module->num_imported_functions;
size_t estimate = size_t estimate =
AllocatePageSize() /* TODO(titzer): 1 page spot bonus */ + AllocatePageSize() /* TODO(titzer): 1 page spot bonus */ +
...@@ -809,21 +798,9 @@ size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) { ...@@ -809,21 +798,9 @@ size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) {
return estimate; return estimate;
} }
std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(Isolate* isolate,
ModuleEnv& env) {
const WasmModule* module = env.module;
size_t memory_estimate = EstimateNativeModuleSize(module);
uint32_t num_wasm_functions =
module->num_imported_functions + module->num_declared_functions;
DCHECK_EQ(module->functions.size(), num_wasm_functions);
return NewNativeModule(isolate, memory_estimate, num_wasm_functions,
module->num_imported_functions,
kModuleCanAllocateMoreMemory, env);
}
std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule( std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
Isolate* isolate, size_t memory_estimate, uint32_t num_functions, Isolate* isolate, size_t memory_estimate, bool can_request_more,
uint32_t num_imported_functions, bool can_request_more, ModuleEnv& env) { std::shared_ptr<const WasmModule> module, const ModuleEnv& env) {
// TODO(titzer): we force a critical memory pressure notification // TODO(titzer): we force a critical memory pressure notification
// when the code space is almost exhausted, but only upon the next module // when the code space is almost exhausted, but only upon the next module
// creation. This is only for one isolate, and it should really do this for // creation. This is only for one isolate, and it should really do this for
...@@ -846,9 +823,8 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule( ...@@ -846,9 +823,8 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
Address start = mem.address(); Address start = mem.address();
size_t size = mem.size(); size_t size = mem.size();
Address end = mem.end(); Address end = mem.end();
std::unique_ptr<NativeModule> ret( std::unique_ptr<NativeModule> ret(new NativeModule(
new NativeModule(isolate, num_functions, num_imported_functions, isolate, can_request_more, &mem, this, std::move(module), env));
can_request_more, &mem, this, env));
TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", this, start, TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", this, start,
size); size);
AssignRanges(start, end, ret.get()); AssignRanges(start, end, ret.get());
......
...@@ -219,6 +219,12 @@ const char* GetWasmCodeKindAsString(WasmCode::Kind); ...@@ -219,6 +219,12 @@ const char* GetWasmCodeKindAsString(WasmCode::Kind);
class V8_EXPORT_PRIVATE NativeModule final { class V8_EXPORT_PRIVATE NativeModule final {
public: public:
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
static constexpr bool kCanAllocateMoreMemory = false;
#else
static constexpr bool kCanAllocateMoreMemory = true;
#endif
WasmCode* AddCode(uint32_t index, const CodeDesc& desc, uint32_t stack_slots, WasmCode* AddCode(uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
size_t safepoint_table_offset, size_t handler_table_offset, size_t safepoint_table_offset, size_t handler_table_offset,
OwnedVector<trap_handler::ProtectedInstructionData> OwnedVector<trap_handler::ProtectedInstructionData>
...@@ -257,16 +263,12 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -257,16 +263,12 @@ class V8_EXPORT_PRIVATE NativeModule final {
void SetRuntimeStubs(Isolate* isolate); void SetRuntimeStubs(Isolate* isolate);
WasmCode* code(uint32_t index) const { WasmCode* code(uint32_t index) const {
DCHECK_LT(index, num_functions_); DCHECK_LT(index, num_functions());
DCHECK_LE(num_imported_functions_, index); DCHECK_LE(module_->num_imported_functions, index);
return code_table_[index - num_imported_functions_]; return code_table_[index - module_->num_imported_functions];
} }
bool has_code(uint32_t index) const { bool has_code(uint32_t index) const { return code(index) != nullptr; }
DCHECK_LT(index, num_functions_);
DCHECK_LE(num_imported_functions_, index);
return code_table_[index - num_imported_functions_] != nullptr;
}
WasmCode* runtime_stub(WasmCode::RuntimeStubId index) const { WasmCode* runtime_stub(WasmCode::RuntimeStubId index) const {
DCHECK_LT(index, WasmCode::kRuntimeStubCount); DCHECK_LT(index, WasmCode::kRuntimeStubCount);
...@@ -298,16 +300,19 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -298,16 +300,19 @@ class V8_EXPORT_PRIVATE NativeModule final {
// For cctests, where we build both WasmModule and the runtime objects // For cctests, where we build both WasmModule and the runtime objects
// on the fly, and bypass the instance builder pipeline. // on the fly, and bypass the instance builder pipeline.
void ReserveCodeTableForTesting(uint32_t max_functions); void ReserveCodeTableForTesting(uint32_t max_functions);
void SetNumFunctionsForTesting(uint32_t num_functions);
void LogWasmCodes(Isolate* isolate); void LogWasmCodes(Isolate* isolate);
CompilationState* compilation_state() { return compilation_state_.get(); } CompilationState* compilation_state() { return compilation_state_.get(); }
uint32_t num_functions() const { return num_functions_; } uint32_t num_functions() const {
uint32_t num_imported_functions() const { return num_imported_functions_; } return module_->num_declared_functions + module_->num_imported_functions;
}
uint32_t num_imported_functions() const {
return module_->num_imported_functions;
}
Vector<WasmCode*> code_table() const { Vector<WasmCode*> code_table() const {
return {code_table_.get(), num_functions_ - num_imported_functions_}; return {code_table_.get(), module_->num_declared_functions};
} }
bool use_trap_handler() const { return use_trap_handler_; } bool use_trap_handler() const { return use_trap_handler_; }
void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; } void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
...@@ -330,10 +335,9 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -330,10 +335,9 @@ class V8_EXPORT_PRIVATE NativeModule final {
friend class WasmCodeManager; friend class WasmCodeManager;
friend class NativeModuleModificationScope; friend class NativeModuleModificationScope;
NativeModule(Isolate* isolate, uint32_t num_functions, NativeModule(Isolate* isolate, bool can_request_more,
uint32_t num_imported_functions, bool can_request_more,
VirtualMemory* code_space, WasmCodeManager* code_manager, VirtualMemory* code_space, WasmCodeManager* code_manager,
ModuleEnv& env); std::shared_ptr<const WasmModule> module, const ModuleEnv& env);
WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind); WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind);
Address AllocateForCode(size_t size); Address AllocateForCode(size_t size);
...@@ -357,18 +361,20 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -357,18 +361,20 @@ class V8_EXPORT_PRIVATE NativeModule final {
WasmCode::FlushICache); WasmCode::FlushICache);
void set_code(uint32_t index, WasmCode* code) { void set_code(uint32_t index, WasmCode* code) {
DCHECK_LT(index, num_functions_); DCHECK_LT(index, num_functions());
DCHECK_LE(num_imported_functions_, index); DCHECK_LE(module_->num_imported_functions, index);
DCHECK_EQ(code->index(), index); DCHECK_EQ(code->index(), index);
code_table_[index - num_imported_functions_] = code; code_table_[index - module_->num_imported_functions] = code;
} }
// TODO(clemensh): Make this a unique_ptr (requires refactoring
// AsyncCompileJob).
std::shared_ptr<const WasmModule> module_;
// Holds all allocated code objects, is maintained to be in ascending order // Holds all allocated code objects, is maintained to be in ascending order
// according to the codes instruction start address to allow lookups. // according to the codes instruction start address to allow lookups.
std::vector<std::unique_ptr<WasmCode>> owned_code_; std::vector<std::unique_ptr<WasmCode>> owned_code_;
uint32_t num_functions_;
uint32_t num_imported_functions_;
std::unique_ptr<WasmCode* []> code_table_; std::unique_ptr<WasmCode* []> code_table_;
size_t wire_bytes_len_; size_t wire_bytes_len_;
...@@ -406,12 +412,9 @@ class V8_EXPORT_PRIVATE WasmCodeManager final { ...@@ -406,12 +412,9 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
// is determined with a heuristic based on the total size of wasm // is determined with a heuristic based on the total size of wasm
// code. The native module may later request more memory. // code. The native module may later request more memory.
// TODO(titzer): isolate is only required here for CompilationState. // TODO(titzer): isolate is only required here for CompilationState.
std::unique_ptr<NativeModule> NewNativeModule(Isolate* isolate,
ModuleEnv& env);
// TODO(titzer): isolate is only required here for CompilationState.
std::unique_ptr<NativeModule> NewNativeModule( std::unique_ptr<NativeModule> NewNativeModule(
Isolate* isolate, size_t memory_estimate, uint32_t num_functions, Isolate* isolate, size_t memory_estimate, bool can_request_more,
uint32_t num_imported_functions, bool can_request_more, ModuleEnv& env); std::shared_ptr<const WasmModule> module, const ModuleEnv& env);
NativeModule* LookupNativeModule(Address pc) const; NativeModule* LookupNativeModule(Address pc) const;
WasmCode* LookupCode(Address pc) const; WasmCode* LookupCode(Address pc) const;
......
...@@ -52,8 +52,6 @@ CAST_ACCESSOR(WasmTableObject) ...@@ -52,8 +52,6 @@ CAST_ACCESSOR(WasmTableObject)
ACCESSORS(WasmModuleObject, managed_native_module, Managed<wasm::NativeModule>, ACCESSORS(WasmModuleObject, managed_native_module, Managed<wasm::NativeModule>,
kNativeModuleOffset) kNativeModuleOffset)
ACCESSORS(WasmModuleObject, export_wrappers, FixedArray, kExportWrappersOffset) ACCESSORS(WasmModuleObject, export_wrappers, FixedArray, kExportWrappersOffset)
ACCESSORS(WasmModuleObject, managed_module, Managed<const wasm::WasmModule>,
kManagedModuleOffset)
ACCESSORS(WasmModuleObject, script, Script, kScriptOffset) ACCESSORS(WasmModuleObject, script, Script, kScriptOffset)
ACCESSORS(WasmModuleObject, weak_instance_list, WeakArrayList, ACCESSORS(WasmModuleObject, weak_instance_list, WeakArrayList,
kWeakInstanceListOffset) kWeakInstanceListOffset)
...@@ -61,12 +59,13 @@ OPTIONAL_ACCESSORS(WasmModuleObject, asm_js_offset_table, ByteArray, ...@@ -61,12 +59,13 @@ OPTIONAL_ACCESSORS(WasmModuleObject, asm_js_offset_table, ByteArray,
kAsmJsOffsetTableOffset) kAsmJsOffsetTableOffset)
OPTIONAL_ACCESSORS(WasmModuleObject, breakpoint_infos, FixedArray, OPTIONAL_ACCESSORS(WasmModuleObject, breakpoint_infos, FixedArray,
kBreakPointInfosOffset) kBreakPointInfosOffset)
const wasm::WasmModule* WasmModuleObject::module() const { wasm::NativeModule* WasmModuleObject::native_module() const {
return managed_module()->raw();
}
wasm::NativeModule* WasmModuleObject::native_module() {
return managed_native_module()->raw(); return managed_native_module()->raw();
} }
const wasm::WasmModule* WasmModuleObject::module() const {
// TODO(clemensh): Remove this helper (inline in callers).
return native_module()->module();
}
void WasmModuleObject::reset_breakpoint_infos() { void WasmModuleObject::reset_breakpoint_infos() {
WRITE_FIELD(this, kBreakPointInfosOffset, GetHeap()->undefined_value()); WRITE_FIELD(this, kBreakPointInfosOffset, GetHeap()->undefined_value());
} }
......
...@@ -179,22 +179,13 @@ Handle<WasmModuleObject> WasmModuleObject::New( ...@@ -179,22 +179,13 @@ Handle<WasmModuleObject> WasmModuleObject::New(
std::shared_ptr<const wasm::WasmModule> shared_module, wasm::ModuleEnv& env, std::shared_ptr<const wasm::WasmModule> shared_module, wasm::ModuleEnv& env,
std::unique_ptr<const uint8_t[]> wire_bytes, size_t wire_bytes_len, std::unique_ptr<const uint8_t[]> wire_bytes, size_t wire_bytes_len,
Handle<Script> script, Handle<ByteArray> asm_js_offset_table) { Handle<Script> script, Handle<ByteArray> asm_js_offset_table) {
const WasmModule* module = shared_module.get(); DCHECK_EQ(shared_module.get(), env.module);
DCHECK_EQ(module, env.module);
size_t module_size = EstimateWasmModuleSize(module);
// The {managed_module} will take shared ownership of the {WasmModule} object,
// and release it when the GC reclaim the managed.
Handle<Managed<const WasmModule>> managed_module =
Managed<const WasmModule>::FromSharedPtr(isolate, module_size,
std::move(shared_module));
// Now create the {WasmModuleObject}. // Now create the {WasmModuleObject}.
Handle<JSFunction> module_cons( Handle<JSFunction> module_cons(
isolate->native_context()->wasm_module_constructor(), isolate); isolate->native_context()->wasm_module_constructor(), isolate);
auto module_object = Handle<WasmModuleObject>::cast( auto module_object = Handle<WasmModuleObject>::cast(
isolate->factory()->NewJSObject(module_cons)); isolate->factory()->NewJSObject(module_cons));
module_object->set_export_wrappers(*export_wrappers); module_object->set_export_wrappers(*export_wrappers);
module_object->set_managed_module(*managed_module);
if (script->type() == Script::TYPE_WASM) { if (script->type() == Script::TYPE_WASM) {
script->set_wasm_module_object(*module_object); script->set_wasm_module_object(*module_object);
} }
...@@ -206,10 +197,15 @@ Handle<WasmModuleObject> WasmModuleObject::New( ...@@ -206,10 +197,15 @@ Handle<WasmModuleObject> WasmModuleObject::New(
} }
// Create the {NativeModule}, and let the {WasmModuleObject} reference it. // Create the {NativeModule}, and let the {WasmModuleObject} reference it.
size_t native_memory_estimate =
isolate->wasm_engine()->code_manager()->EstimateNativeModuleSize(
env.module);
size_t memory_estimate = size_t memory_estimate =
isolate->wasm_engine()->code_manager()->EstimateNativeModuleSize(module); EstimateWasmModuleSize(env.module) + native_memory_estimate;
auto native_module = auto native_module = isolate->wasm_engine()->code_manager()->NewNativeModule(
isolate->wasm_engine()->code_manager()->NewNativeModule(isolate, env); isolate, native_memory_estimate,
wasm::NativeModule::kCanAllocateMoreMemory, std::move(shared_module),
env);
native_module->set_wire_bytes(std::move(wire_bytes), wire_bytes_len); native_module->set_wire_bytes(std::move(wire_bytes), wire_bytes_len);
native_module->SetRuntimeStubs(isolate); native_module->SetRuntimeStubs(isolate);
Handle<Managed<wasm::NativeModule>> managed_native_module = Handle<Managed<wasm::NativeModule>> managed_native_module =
......
...@@ -105,14 +105,13 @@ class WasmModuleObject : public JSObject { ...@@ -105,14 +105,13 @@ class WasmModuleObject : public JSObject {
DECL_CAST(WasmModuleObject) DECL_CAST(WasmModuleObject)
DECL_ACCESSORS(managed_native_module, Managed<wasm::NativeModule>) DECL_ACCESSORS(managed_native_module, Managed<wasm::NativeModule>)
inline wasm::NativeModule* native_module();
DECL_ACCESSORS(export_wrappers, FixedArray) DECL_ACCESSORS(export_wrappers, FixedArray)
DECL_ACCESSORS(managed_module, Managed<const wasm::WasmModule>)
inline const wasm::WasmModule* module() const;
DECL_ACCESSORS(script, Script) DECL_ACCESSORS(script, Script)
DECL_ACCESSORS(weak_instance_list, WeakArrayList) DECL_ACCESSORS(weak_instance_list, WeakArrayList)
DECL_OPTIONAL_ACCESSORS(asm_js_offset_table, ByteArray) DECL_OPTIONAL_ACCESSORS(asm_js_offset_table, ByteArray)
DECL_OPTIONAL_ACCESSORS(breakpoint_infos, FixedArray) DECL_OPTIONAL_ACCESSORS(breakpoint_infos, FixedArray)
inline wasm::NativeModule* native_module() const;
inline const wasm::WasmModule* module() const;
inline void reset_breakpoint_infos(); inline void reset_breakpoint_infos();
// Dispatched behavior. // Dispatched behavior.
...@@ -123,7 +122,6 @@ class WasmModuleObject : public JSObject { ...@@ -123,7 +122,6 @@ class WasmModuleObject : public JSObject {
#define WASM_MODULE_OBJECT_FIELDS(V) \ #define WASM_MODULE_OBJECT_FIELDS(V) \
V(kNativeModuleOffset, kPointerSize) \ V(kNativeModuleOffset, kPointerSize) \
V(kExportWrappersOffset, kPointerSize) \ V(kExportWrappersOffset, kPointerSize) \
V(kManagedModuleOffset, kPointerSize) \
V(kScriptOffset, kPointerSize) \ V(kScriptOffset, kPointerSize) \
V(kWeakInstanceListOffset, kPointerSize) \ V(kWeakInstanceListOffset, kPointerSize) \
V(kAsmJsOffsetTableOffset, kPointerSize) \ V(kAsmJsOffsetTableOffset, kPointerSize) \
......
...@@ -44,9 +44,7 @@ CallDescriptor* CreateCallDescriptor(Zone* zone, int return_count, ...@@ -44,9 +44,7 @@ CallDescriptor* CreateCallDescriptor(Zone* zone, int return_count,
return compiler::GetWasmCallDescriptor(zone, builder.Build()); return compiler::GetWasmCallDescriptor(zone, builder.Build());
} }
} // namespace Node* MakeConstant(RawMachineAssembler& m, MachineType type, int value) {
Node* Constant(RawMachineAssembler& m, MachineType type, int value) {
switch (type.representation()) { switch (type.representation()) {
case MachineRepresentation::kWord32: case MachineRepresentation::kWord32:
return m.Int32Constant(static_cast<int32_t>(value)); return m.Int32Constant(static_cast<int32_t>(value));
...@@ -123,16 +121,16 @@ Node* ToInt32(RawMachineAssembler& m, MachineType type, Node* a) { ...@@ -123,16 +121,16 @@ Node* ToInt32(RawMachineAssembler& m, MachineType type, Node* a) {
std::unique_ptr<wasm::NativeModule> AllocateNativeModule(Isolate* isolate, std::unique_ptr<wasm::NativeModule> AllocateNativeModule(Isolate* isolate,
size_t code_size) { size_t code_size) {
std::shared_ptr<wasm::WasmModule> module(new wasm::WasmModule());
module->num_declared_functions = 1;
wasm::ModuleEnv env( wasm::ModuleEnv env(
nullptr, wasm::UseTrapHandler::kNoTrapHandler, module.get(), wasm::UseTrapHandler::kNoTrapHandler,
wasm::RuntimeExceptionSupport::kNoRuntimeExceptionSupport); wasm::RuntimeExceptionSupport::kNoRuntimeExceptionSupport);
// We have to add the code object to a NativeModule, because the // We have to add the code object to a NativeModule, because the
// WasmCallDescriptor assumes that code is on the native heap and not // WasmCallDescriptor assumes that code is on the native heap and not
// within a code object. // within a code object.
std::unique_ptr<wasm::NativeModule> module = return isolate->wasm_engine()->code_manager()->NewNativeModule(
isolate->wasm_engine()->code_manager()->NewNativeModule( isolate, code_size, false, std::move(module), env);
isolate, code_size, 1, 0, false, env);
return module;
} }
void TestReturnMultipleValues(MachineType type) { void TestReturnMultipleValues(MachineType type) {
...@@ -194,11 +192,12 @@ void TestReturnMultipleValues(MachineType type) { ...@@ -194,11 +192,12 @@ void TestReturnMultipleValues(MachineType type) {
// WasmContext dummy // WasmContext dummy
mt.PointerConstant(nullptr), mt.PointerConstant(nullptr),
// Inputs // Inputs
Constant(mt, type, a), Constant(mt, type, b)}; MakeConstant(mt, type, a),
MakeConstant(mt, type, b)};
Node* ret_multi = mt.AddNode(mt.common()->Call(desc), Node* ret_multi = mt.AddNode(mt.common()->Call(desc),
arraysize(call_inputs), call_inputs); arraysize(call_inputs), call_inputs);
Node* ret = Constant(mt, type, 0); Node* ret = MakeConstant(mt, type, 0);
bool sign = false; bool sign = false;
for (int i = 0; i < count; ++i) { for (int i = 0; i < count; ++i) {
Node* x = (count == 1) Node* x = (count == 1)
...@@ -219,6 +218,8 @@ void TestReturnMultipleValues(MachineType type) { ...@@ -219,6 +218,8 @@ void TestReturnMultipleValues(MachineType type) {
} }
} }
} // namespace
#define TEST_MULTI(Type, type) \ #define TEST_MULTI(Type, type) \
TEST(ReturnMultiple##Type) { TestReturnMultipleValues(type); } TEST(ReturnMultiple##Type) { TestReturnMultipleValues(type); }
...@@ -251,7 +252,7 @@ void ReturnLastValue(MachineType type) { ...@@ -251,7 +252,7 @@ void ReturnLastValue(MachineType type) {
std::unique_ptr<Node* []> returns(new Node*[return_count]); std::unique_ptr<Node* []> returns(new Node*[return_count]);
for (int i = 0; i < return_count; ++i) { for (int i = 0; i < return_count; ++i) {
returns[i] = Constant(m, type, i); returns[i] = MakeConstant(m, type, i);
} }
m.Return(return_count, returns.get()); m.Return(return_count, returns.get());
...@@ -313,7 +314,7 @@ void ReturnSumOfReturns(MachineType type) { ...@@ -313,7 +314,7 @@ void ReturnSumOfReturns(MachineType type) {
std::unique_ptr<Node* []> returns(new Node*[return_count]); std::unique_ptr<Node* []> returns(new Node*[return_count]);
for (int i = 0; i < return_count; ++i) { for (int i = 0; i < return_count; ++i) {
returns[i] = Constant(m, type, i); returns[i] = MakeConstant(m, type, i);
} }
m.Return(return_count, returns.get()); m.Return(return_count, returns.get());
......
...@@ -45,9 +45,6 @@ TestingModuleBuilder::TestingModuleBuilder( ...@@ -45,9 +45,6 @@ TestingModuleBuilder::TestingModuleBuilder(
maybe_import_index, test_module_->origin, maybe_import_index, test_module_->origin,
trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
: kNoTrapHandler); : kNoTrapHandler);
if (native_module_->num_functions() <= maybe_import_index) {
native_module_->SetNumFunctionsForTesting(maybe_import_index + 1);
}
auto wasm_to_js_wrapper = native_module_->AddCodeCopy( auto wasm_to_js_wrapper = native_module_->AddCodeCopy(
code.ToHandleChecked(), wasm::WasmCode::kWasmToJsWrapper, code.ToHandleChecked(), wasm::WasmCode::kWasmToJsWrapper,
maybe_import_index); maybe_import_index);
...@@ -99,9 +96,6 @@ uint32_t TestingModuleBuilder::AddFunction(FunctionSig* sig, const char* name, ...@@ -99,9 +96,6 @@ uint32_t TestingModuleBuilder::AddFunction(FunctionSig* sig, const char* name,
test_module_->functions.reserve(kMaxFunctions); test_module_->functions.reserve(kMaxFunctions);
} }
uint32_t index = static_cast<uint32_t>(test_module_->functions.size()); uint32_t index = static_cast<uint32_t>(test_module_->functions.size());
if (native_module_ && native_module_->num_functions() <= index) {
native_module_->SetNumFunctionsForTesting(index + 1);
}
test_module_->functions.push_back({sig, index, 0, {0, 0}, false, false}); test_module_->functions.push_back({sig, index, 0, {0, 0}, false, false});
if (type == kImport) { if (type == kImport) {
DCHECK_EQ(0, test_module_->num_declared_functions); DCHECK_EQ(0, test_module_->num_declared_functions);
......
...@@ -150,17 +150,17 @@ CallDescriptor* CreateRandomCallDescriptor(Zone* zone, size_t return_count, ...@@ -150,17 +150,17 @@ CallDescriptor* CreateRandomCallDescriptor(Zone* zone, size_t return_count,
std::unique_ptr<wasm::NativeModule> AllocateNativeModule(i::Isolate* isolate, std::unique_ptr<wasm::NativeModule> AllocateNativeModule(i::Isolate* isolate,
size_t code_size) { size_t code_size) {
std::shared_ptr<wasm::WasmModule> module(new wasm::WasmModule);
module->num_declared_functions = 1;
wasm::ModuleEnv env( wasm::ModuleEnv env(
nullptr, wasm::UseTrapHandler::kNoTrapHandler, module.get(), wasm::UseTrapHandler::kNoTrapHandler,
wasm::RuntimeExceptionSupport::kNoRuntimeExceptionSupport); wasm::RuntimeExceptionSupport::kNoRuntimeExceptionSupport);
// We have to add the code object to a NativeModule, because the // We have to add the code object to a NativeModule, because the
// WasmCallDescriptor assumes that code is on the native heap and not // WasmCallDescriptor assumes that code is on the native heap and not
// within a code object. // within a code object.
std::unique_ptr<wasm::NativeModule> module = return isolate->wasm_engine()->code_manager()->NewNativeModule(
isolate->wasm_engine()->code_manager()->NewNativeModule( isolate, code_size, false, std::move(module), env);
isolate, code_size, 1, 0, false, env);
return module;
} }
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) { extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
......
...@@ -163,11 +163,13 @@ class WasmCodeManagerTest : public TestWithContext, ...@@ -163,11 +163,13 @@ class WasmCodeManagerTest : public TestWithContext,
NativeModulePtr AllocModule(WasmCodeManager* manager, size_t size, NativeModulePtr AllocModule(WasmCodeManager* manager, size_t size,
ModuleStyle style) { ModuleStyle style) {
std::shared_ptr<WasmModule> module(new WasmModule);
module->num_declared_functions = kNumFunctions;
bool can_request_more = style == Growable; bool can_request_more = style == Growable;
wasm::ModuleEnv env(nullptr, UseTrapHandler::kNoTrapHandler, wasm::ModuleEnv env(module.get(), UseTrapHandler::kNoTrapHandler,
RuntimeExceptionSupport::kNoRuntimeExceptionSupport); RuntimeExceptionSupport::kNoRuntimeExceptionSupport);
return manager->NewNativeModule(i_isolate(), size, kNumFunctions, 0, return manager->NewNativeModule(i_isolate(), size, can_request_more,
can_request_more, env); std::move(module), env);
} }
WasmCode* AddCode(NativeModule* native_module, uint32_t index, size_t size) { WasmCode* AddCode(NativeModule* native_module, uint32_t index, size_t size) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment