Commit ad57eec5 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Store WasmModule directly in the NativeModule

Instead of storing both the {NativeModule} and the {WasmModule} in a
{Managed} object, just store the {WasmModule} in the {NativeModule}
directly. This fixes crashes that happen if the {Managed<WasmModule>}
dies before the {Managed<NativeModule>}.

R=mstarzinger@chromium.org

Bug: chromium:854794, v8:7879, v8:7889
Change-Id: I6b11729943fe7a03d225138782655ee5dafd26a6
Reviewed-on: https://chromium-review.googlesource.com/1118171
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54090}
parent 00f3ab17
......@@ -1579,8 +1579,6 @@ void WasmModuleObject::WasmModuleObjectVerify(Isolate* isolate) {
CHECK(IsWasmModuleObject());
VerifyObjectField(kNativeModuleOffset);
VerifyObjectField(kExportWrappersOffset);
VerifyObjectField(kManagedModuleOffset);
CHECK(managed_module()->IsForeign());
VerifyObjectField(kScriptOffset);
VerifyObjectField(kAsmJsOffsetTableOffset);
VerifyObjectField(kBreakPointInfosOffset);
......
......@@ -64,7 +64,7 @@ enum class CompileMode : uint8_t { kRegular, kTiering };
// compilation of functions.
class CompilationState {
public:
CompilationState(internal::Isolate* isolate, ModuleEnv& env);
CompilationState(internal::Isolate*, const ModuleEnv&);
~CompilationState();
// Needs to be set before {AddCompilationUnits} is run, which triggers
......@@ -134,6 +134,7 @@ class CompilationState {
}
Isolate* const isolate_;
// TODO(clemensh): Remove ModuleEnv, generate it when needed.
ModuleEnv module_env_;
const size_t max_memory_;
const CompileMode compile_mode_;
......@@ -2846,7 +2847,7 @@ void CompilationStateDeleter::operator()(
}
std::unique_ptr<CompilationState, CompilationStateDeleter> NewCompilationState(
Isolate* isolate, ModuleEnv& env) {
Isolate* isolate, const ModuleEnv& env) {
return std::unique_ptr<CompilationState, CompilationStateDeleter>(
new CompilationState(isolate, env));
}
......@@ -2855,13 +2856,12 @@ ModuleEnv* GetModuleEnv(CompilationState* compilation_state) {
return compilation_state->module_env();
}
CompilationState::CompilationState(internal::Isolate* isolate, ModuleEnv& env)
CompilationState::CompilationState(internal::Isolate* isolate,
const ModuleEnv& env)
: isolate_(isolate),
module_env_(env),
max_memory_(GetMaxUsableMemorySize(isolate) / 2),
// TODO(clemensh): Fix fuzzers such that {env.module} is always non-null.
compile_mode_(FLAG_wasm_tier_up &&
(!env.module || env.module->origin == kWasmOrigin)
compile_mode_(FLAG_wasm_tier_up && env.module->origin == kWasmOrigin
? CompileMode::kTiering
: CompileMode::kRegular),
wire_bytes_(ModuleWireBytes(nullptr, nullptr)),
......
......@@ -43,7 +43,7 @@ struct CompilationStateDeleter {
// Wrapper to create a CompilationState exists in order to avoid having
// the the CompilationState in the header file.
std::unique_ptr<CompilationState, CompilationStateDeleter> NewCompilationState(
Isolate* isolate, ModuleEnv& env);
Isolate* isolate, const ModuleEnv& env);
ModuleEnv* GetModuleEnv(CompilationState* compilation_state);
......
......@@ -42,12 +42,6 @@ struct WasmCodeUniquePtrComparator {
}
};
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
constexpr bool kModuleCanAllocateMoreMemory = false;
#else
constexpr bool kModuleCanAllocateMoreMemory = true;
#endif
} // namespace
void DisjointAllocationPool::Merge(AddressRange range) {
......@@ -294,24 +288,25 @@ WasmCode::~WasmCode() {
}
}
NativeModule::NativeModule(Isolate* isolate, uint32_t num_functions,
uint32_t num_imported_functions,
bool can_request_more, VirtualMemory* code_space,
WasmCodeManager* code_manager, ModuleEnv& env)
: num_functions_(num_functions),
num_imported_functions_(num_imported_functions),
NativeModule::NativeModule(Isolate* isolate, bool can_request_more,
VirtualMemory* code_space,
WasmCodeManager* code_manager,
std::shared_ptr<const WasmModule> module,
const ModuleEnv& env)
: module_(std::move(module)),
compilation_state_(NewCompilationState(isolate, env)),
free_code_space_({code_space->address(), code_space->end()}),
wasm_code_manager_(code_manager),
can_request_more_memory_(can_request_more),
use_trap_handler_(env.use_trap_handler) {
DCHECK_EQ(module_.get(), env.module);
DCHECK_NOT_NULL(module_);
VirtualMemory my_mem;
owned_code_space_.push_back(my_mem);
owned_code_space_.back().TakeControl(code_space);
owned_code_.reserve(num_functions);
owned_code_.reserve(num_functions());
DCHECK_LE(num_imported_functions, num_functions);
uint32_t num_wasm_functions = num_functions - num_imported_functions;
uint32_t num_wasm_functions = module_->num_declared_functions;
if (num_wasm_functions > 0) {
code_table_.reset(new WasmCode*[num_wasm_functions]);
memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
......@@ -321,20 +316,15 @@ NativeModule::NativeModule(Isolate* isolate, uint32_t num_functions,
}
void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
DCHECK_LE(num_functions_, max_functions);
uint32_t num_wasm = num_functions_ - num_imported_functions_;
uint32_t max_wasm = max_functions - num_imported_functions_;
WasmCode** new_table = new WasmCode*[max_wasm];
memset(new_table, 0, max_wasm * sizeof(*new_table));
memcpy(new_table, code_table_.get(), num_wasm * sizeof(*new_table));
DCHECK_LE(num_functions(), max_functions);
WasmCode** new_table = new WasmCode*[max_functions];
memset(new_table, 0, max_functions * sizeof(*new_table));
memcpy(new_table, code_table_.get(),
module_->num_declared_functions * sizeof(*new_table));
code_table_.reset(new_table);
// Re-allocate jump table.
jump_table_ = CreateEmptyJumpTable(max_wasm);
}
void NativeModule::SetNumFunctionsForTesting(uint32_t num_functions) {
num_functions_ = num_functions;
jump_table_ = CreateEmptyJumpTable(max_functions);
}
void NativeModule::LogWasmCodes(Isolate* isolate) {
......@@ -394,7 +384,7 @@ WasmCode* NativeModule::AddCodeCopy(Handle<Code> code, WasmCode::Kind kind,
// this NativeModule is a memory leak until the whole NativeModule dies.
WasmCode* ret = AddAnonymousCode(code, kind);
ret->index_ = Just(index);
if (index >= num_imported_functions_) set_code(index, ret);
if (index >= module_->num_imported_functions) set_code(index, ret);
return ret;
}
......@@ -406,7 +396,7 @@ WasmCode* NativeModule::AddInterpreterEntry(Handle<Code> code, uint32_t index) {
}
void NativeModule::SetLazyBuiltin(Handle<Code> code) {
uint32_t num_wasm_functions = num_functions_ - num_imported_functions_;
uint32_t num_wasm_functions = module_->num_declared_functions;
if (num_wasm_functions == 0) return;
WasmCode* lazy_builtin = AddAnonymousCode(code, WasmCode::kLazyStub);
// Fill the jump table with jumps to the lazy compile stub.
......@@ -417,7 +407,7 @@ void NativeModule::SetLazyBuiltin(Handle<Code> code) {
for (uint32_t i = 0; i < num_wasm_functions; ++i) {
// Check that the offset in the jump table increases as expected.
DCHECK_EQ(i * JumpTableAssembler::kJumpTableSlotSize, jtasm.pc_offset());
jtasm.EmitLazyCompileJumpSlot(i + num_imported_functions_,
jtasm.EmitLazyCompileJumpSlot(i + module_->num_imported_functions,
lazy_compile_target);
jtasm.NopBytes((i + 1) * JumpTableAssembler::kJumpTableSlotSize -
jtasm.pc_offset());
......@@ -600,8 +590,8 @@ WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t num_wasm_functions) {
void NativeModule::PatchJumpTable(uint32_t func_index, Address target,
WasmCode::FlushICache flush_icache) {
DCHECK_LE(num_imported_functions_, func_index);
uint32_t slot_idx = func_index - num_imported_functions_;
DCHECK_LE(module_->num_imported_functions, func_index);
uint32_t slot_idx = func_index - module_->num_imported_functions;
Address jump_table_slot = jump_table_->instruction_start() +
slot_idx * JumpTableAssembler::kJumpTableSlotSize;
JumpTableAssembler::PatchJumpTableSlot(jump_table_slot, target, flush_icache);
......@@ -695,7 +685,7 @@ Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
// Return the jump table slot for that function index.
DCHECK_NOT_NULL(jump_table_);
uint32_t slot_idx = func_index - num_imported_functions_;
uint32_t slot_idx = func_index - module_->num_imported_functions;
DCHECK_LT(slot_idx, jump_table_->instructions().size() /
JumpTableAssembler::kJumpTableSlotSize);
return jump_table_->instruction_start() +
......@@ -707,8 +697,8 @@ uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(Address slot_address) {
uint32_t offset =
static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
uint32_t slot_idx = offset / JumpTableAssembler::kJumpTableSlotSize;
DCHECK_LT(slot_idx, num_functions_ - num_imported_functions_);
return num_imported_functions_ + slot_idx;
DCHECK_LT(slot_idx, module_->num_declared_functions);
return module_->num_imported_functions + slot_idx;
}
void NativeModule::DisableTrapHandler() {
......@@ -718,7 +708,7 @@ void NativeModule::DisableTrapHandler() {
// Clear the code table (just to increase the chances to hit an error if we
// forget to re-add all code).
uint32_t num_wasm_functions = num_functions_ - num_imported_functions_;
uint32_t num_wasm_functions = module_->num_declared_functions;
memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
// TODO(clemensh): Actually free the owned code, such that the memory can be
......@@ -790,8 +780,7 @@ size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) {
constexpr size_t kCodeSizeMultiplier = 4;
constexpr size_t kImportSize = 32 * kPointerSize;
uint32_t num_functions = static_cast<uint32_t>(module->functions.size());
uint32_t num_wasm_functions = num_functions - module->num_imported_functions;
uint32_t num_wasm_functions = module->num_declared_functions;
size_t estimate =
AllocatePageSize() /* TODO(titzer): 1 page spot bonus */ +
......@@ -809,21 +798,9 @@ size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) {
return estimate;
}
std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(Isolate* isolate,
ModuleEnv& env) {
const WasmModule* module = env.module;
size_t memory_estimate = EstimateNativeModuleSize(module);
uint32_t num_wasm_functions =
module->num_imported_functions + module->num_declared_functions;
DCHECK_EQ(module->functions.size(), num_wasm_functions);
return NewNativeModule(isolate, memory_estimate, num_wasm_functions,
module->num_imported_functions,
kModuleCanAllocateMoreMemory, env);
}
std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
Isolate* isolate, size_t memory_estimate, uint32_t num_functions,
uint32_t num_imported_functions, bool can_request_more, ModuleEnv& env) {
Isolate* isolate, size_t memory_estimate, bool can_request_more,
std::shared_ptr<const WasmModule> module, const ModuleEnv& env) {
// TODO(titzer): we force a critical memory pressure notification
// when the code space is almost exhausted, but only upon the next module
// creation. This is only for one isolate, and it should really do this for
......@@ -846,9 +823,8 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
Address start = mem.address();
size_t size = mem.size();
Address end = mem.end();
std::unique_ptr<NativeModule> ret(
new NativeModule(isolate, num_functions, num_imported_functions,
can_request_more, &mem, this, env));
std::unique_ptr<NativeModule> ret(new NativeModule(
isolate, can_request_more, &mem, this, std::move(module), env));
TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", this, start,
size);
AssignRanges(start, end, ret.get());
......
......@@ -219,6 +219,12 @@ const char* GetWasmCodeKindAsString(WasmCode::Kind);
class V8_EXPORT_PRIVATE NativeModule final {
public:
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
static constexpr bool kCanAllocateMoreMemory = false;
#else
static constexpr bool kCanAllocateMoreMemory = true;
#endif
WasmCode* AddCode(uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
size_t safepoint_table_offset, size_t handler_table_offset,
OwnedVector<trap_handler::ProtectedInstructionData>
......@@ -257,16 +263,12 @@ class V8_EXPORT_PRIVATE NativeModule final {
void SetRuntimeStubs(Isolate* isolate);
WasmCode* code(uint32_t index) const {
DCHECK_LT(index, num_functions_);
DCHECK_LE(num_imported_functions_, index);
return code_table_[index - num_imported_functions_];
DCHECK_LT(index, num_functions());
DCHECK_LE(module_->num_imported_functions, index);
return code_table_[index - module_->num_imported_functions];
}
bool has_code(uint32_t index) const {
DCHECK_LT(index, num_functions_);
DCHECK_LE(num_imported_functions_, index);
return code_table_[index - num_imported_functions_] != nullptr;
}
bool has_code(uint32_t index) const { return code(index) != nullptr; }
WasmCode* runtime_stub(WasmCode::RuntimeStubId index) const {
DCHECK_LT(index, WasmCode::kRuntimeStubCount);
......@@ -298,16 +300,19 @@ class V8_EXPORT_PRIVATE NativeModule final {
// For cctests, where we build both WasmModule and the runtime objects
// on the fly, and bypass the instance builder pipeline.
void ReserveCodeTableForTesting(uint32_t max_functions);
void SetNumFunctionsForTesting(uint32_t num_functions);
void LogWasmCodes(Isolate* isolate);
CompilationState* compilation_state() { return compilation_state_.get(); }
uint32_t num_functions() const { return num_functions_; }
uint32_t num_imported_functions() const { return num_imported_functions_; }
uint32_t num_functions() const {
return module_->num_declared_functions + module_->num_imported_functions;
}
uint32_t num_imported_functions() const {
return module_->num_imported_functions;
}
Vector<WasmCode*> code_table() const {
return {code_table_.get(), num_functions_ - num_imported_functions_};
return {code_table_.get(), module_->num_declared_functions};
}
bool use_trap_handler() const { return use_trap_handler_; }
void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
......@@ -330,10 +335,9 @@ class V8_EXPORT_PRIVATE NativeModule final {
friend class WasmCodeManager;
friend class NativeModuleModificationScope;
NativeModule(Isolate* isolate, uint32_t num_functions,
uint32_t num_imported_functions, bool can_request_more,
NativeModule(Isolate* isolate, bool can_request_more,
VirtualMemory* code_space, WasmCodeManager* code_manager,
ModuleEnv& env);
std::shared_ptr<const WasmModule> module, const ModuleEnv& env);
WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind);
Address AllocateForCode(size_t size);
......@@ -357,18 +361,20 @@ class V8_EXPORT_PRIVATE NativeModule final {
WasmCode::FlushICache);
void set_code(uint32_t index, WasmCode* code) {
DCHECK_LT(index, num_functions_);
DCHECK_LE(num_imported_functions_, index);
DCHECK_LT(index, num_functions());
DCHECK_LE(module_->num_imported_functions, index);
DCHECK_EQ(code->index(), index);
code_table_[index - num_imported_functions_] = code;
code_table_[index - module_->num_imported_functions] = code;
}
// TODO(clemensh): Make this a unique_ptr (requires refactoring
// AsyncCompileJob).
std::shared_ptr<const WasmModule> module_;
// Holds all allocated code objects, is maintained to be in ascending order
// according to the codes instruction start address to allow lookups.
std::vector<std::unique_ptr<WasmCode>> owned_code_;
uint32_t num_functions_;
uint32_t num_imported_functions_;
std::unique_ptr<WasmCode* []> code_table_;
size_t wire_bytes_len_;
......@@ -406,12 +412,9 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
// is determined with a heuristic based on the total size of wasm
// code. The native module may later request more memory.
// TODO(titzer): isolate is only required here for CompilationState.
std::unique_ptr<NativeModule> NewNativeModule(Isolate* isolate,
ModuleEnv& env);
// TODO(titzer): isolate is only required here for CompilationState.
std::unique_ptr<NativeModule> NewNativeModule(
Isolate* isolate, size_t memory_estimate, uint32_t num_functions,
uint32_t num_imported_functions, bool can_request_more, ModuleEnv& env);
Isolate* isolate, size_t memory_estimate, bool can_request_more,
std::shared_ptr<const WasmModule> module, const ModuleEnv& env);
NativeModule* LookupNativeModule(Address pc) const;
WasmCode* LookupCode(Address pc) const;
......
......@@ -52,8 +52,6 @@ CAST_ACCESSOR(WasmTableObject)
ACCESSORS(WasmModuleObject, managed_native_module, Managed<wasm::NativeModule>,
kNativeModuleOffset)
ACCESSORS(WasmModuleObject, export_wrappers, FixedArray, kExportWrappersOffset)
ACCESSORS(WasmModuleObject, managed_module, Managed<const wasm::WasmModule>,
kManagedModuleOffset)
ACCESSORS(WasmModuleObject, script, Script, kScriptOffset)
ACCESSORS(WasmModuleObject, weak_instance_list, WeakArrayList,
kWeakInstanceListOffset)
......@@ -61,12 +59,13 @@ OPTIONAL_ACCESSORS(WasmModuleObject, asm_js_offset_table, ByteArray,
kAsmJsOffsetTableOffset)
OPTIONAL_ACCESSORS(WasmModuleObject, breakpoint_infos, FixedArray,
kBreakPointInfosOffset)
const wasm::WasmModule* WasmModuleObject::module() const {
return managed_module()->raw();
}
wasm::NativeModule* WasmModuleObject::native_module() {
wasm::NativeModule* WasmModuleObject::native_module() const {
return managed_native_module()->raw();
}
const wasm::WasmModule* WasmModuleObject::module() const {
// TODO(clemensh): Remove this helper (inline in callers).
return native_module()->module();
}
void WasmModuleObject::reset_breakpoint_infos() {
WRITE_FIELD(this, kBreakPointInfosOffset, GetHeap()->undefined_value());
}
......
......@@ -179,22 +179,13 @@ Handle<WasmModuleObject> WasmModuleObject::New(
std::shared_ptr<const wasm::WasmModule> shared_module, wasm::ModuleEnv& env,
std::unique_ptr<const uint8_t[]> wire_bytes, size_t wire_bytes_len,
Handle<Script> script, Handle<ByteArray> asm_js_offset_table) {
const WasmModule* module = shared_module.get();
DCHECK_EQ(module, env.module);
size_t module_size = EstimateWasmModuleSize(module);
// The {managed_module} will take shared ownership of the {WasmModule} object,
// and release it when the GC reclaim the managed.
Handle<Managed<const WasmModule>> managed_module =
Managed<const WasmModule>::FromSharedPtr(isolate, module_size,
std::move(shared_module));
DCHECK_EQ(shared_module.get(), env.module);
// Now create the {WasmModuleObject}.
Handle<JSFunction> module_cons(
isolate->native_context()->wasm_module_constructor(), isolate);
auto module_object = Handle<WasmModuleObject>::cast(
isolate->factory()->NewJSObject(module_cons));
module_object->set_export_wrappers(*export_wrappers);
module_object->set_managed_module(*managed_module);
if (script->type() == Script::TYPE_WASM) {
script->set_wasm_module_object(*module_object);
}
......@@ -206,10 +197,15 @@ Handle<WasmModuleObject> WasmModuleObject::New(
}
// Create the {NativeModule}, and let the {WasmModuleObject} reference it.
size_t native_memory_estimate =
isolate->wasm_engine()->code_manager()->EstimateNativeModuleSize(
env.module);
size_t memory_estimate =
isolate->wasm_engine()->code_manager()->EstimateNativeModuleSize(module);
auto native_module =
isolate->wasm_engine()->code_manager()->NewNativeModule(isolate, env);
EstimateWasmModuleSize(env.module) + native_memory_estimate;
auto native_module = isolate->wasm_engine()->code_manager()->NewNativeModule(
isolate, native_memory_estimate,
wasm::NativeModule::kCanAllocateMoreMemory, std::move(shared_module),
env);
native_module->set_wire_bytes(std::move(wire_bytes), wire_bytes_len);
native_module->SetRuntimeStubs(isolate);
Handle<Managed<wasm::NativeModule>> managed_native_module =
......
......@@ -105,14 +105,13 @@ class WasmModuleObject : public JSObject {
DECL_CAST(WasmModuleObject)
DECL_ACCESSORS(managed_native_module, Managed<wasm::NativeModule>)
inline wasm::NativeModule* native_module();
DECL_ACCESSORS(export_wrappers, FixedArray)
DECL_ACCESSORS(managed_module, Managed<const wasm::WasmModule>)
inline const wasm::WasmModule* module() const;
DECL_ACCESSORS(script, Script)
DECL_ACCESSORS(weak_instance_list, WeakArrayList)
DECL_OPTIONAL_ACCESSORS(asm_js_offset_table, ByteArray)
DECL_OPTIONAL_ACCESSORS(breakpoint_infos, FixedArray)
inline wasm::NativeModule* native_module() const;
inline const wasm::WasmModule* module() const;
inline void reset_breakpoint_infos();
// Dispatched behavior.
......@@ -123,7 +122,6 @@ class WasmModuleObject : public JSObject {
#define WASM_MODULE_OBJECT_FIELDS(V) \
V(kNativeModuleOffset, kPointerSize) \
V(kExportWrappersOffset, kPointerSize) \
V(kManagedModuleOffset, kPointerSize) \
V(kScriptOffset, kPointerSize) \
V(kWeakInstanceListOffset, kPointerSize) \
V(kAsmJsOffsetTableOffset, kPointerSize) \
......
......@@ -44,9 +44,7 @@ CallDescriptor* CreateCallDescriptor(Zone* zone, int return_count,
return compiler::GetWasmCallDescriptor(zone, builder.Build());
}
} // namespace
Node* Constant(RawMachineAssembler& m, MachineType type, int value) {
Node* MakeConstant(RawMachineAssembler& m, MachineType type, int value) {
switch (type.representation()) {
case MachineRepresentation::kWord32:
return m.Int32Constant(static_cast<int32_t>(value));
......@@ -123,16 +121,16 @@ Node* ToInt32(RawMachineAssembler& m, MachineType type, Node* a) {
std::unique_ptr<wasm::NativeModule> AllocateNativeModule(Isolate* isolate,
size_t code_size) {
std::shared_ptr<wasm::WasmModule> module(new wasm::WasmModule());
module->num_declared_functions = 1;
wasm::ModuleEnv env(
nullptr, wasm::UseTrapHandler::kNoTrapHandler,
module.get(), wasm::UseTrapHandler::kNoTrapHandler,
wasm::RuntimeExceptionSupport::kNoRuntimeExceptionSupport);
// We have to add the code object to a NativeModule, because the
// WasmCallDescriptor assumes that code is on the native heap and not
// within a code object.
std::unique_ptr<wasm::NativeModule> module =
isolate->wasm_engine()->code_manager()->NewNativeModule(
isolate, code_size, 1, 0, false, env);
return module;
return isolate->wasm_engine()->code_manager()->NewNativeModule(
isolate, code_size, false, std::move(module), env);
}
void TestReturnMultipleValues(MachineType type) {
......@@ -194,11 +192,12 @@ void TestReturnMultipleValues(MachineType type) {
// WasmContext dummy
mt.PointerConstant(nullptr),
// Inputs
Constant(mt, type, a), Constant(mt, type, b)};
MakeConstant(mt, type, a),
MakeConstant(mt, type, b)};
Node* ret_multi = mt.AddNode(mt.common()->Call(desc),
arraysize(call_inputs), call_inputs);
Node* ret = Constant(mt, type, 0);
Node* ret = MakeConstant(mt, type, 0);
bool sign = false;
for (int i = 0; i < count; ++i) {
Node* x = (count == 1)
......@@ -219,6 +218,8 @@ void TestReturnMultipleValues(MachineType type) {
}
}
} // namespace
#define TEST_MULTI(Type, type) \
TEST(ReturnMultiple##Type) { TestReturnMultipleValues(type); }
......@@ -251,7 +252,7 @@ void ReturnLastValue(MachineType type) {
std::unique_ptr<Node* []> returns(new Node*[return_count]);
for (int i = 0; i < return_count; ++i) {
returns[i] = Constant(m, type, i);
returns[i] = MakeConstant(m, type, i);
}
m.Return(return_count, returns.get());
......@@ -313,7 +314,7 @@ void ReturnSumOfReturns(MachineType type) {
std::unique_ptr<Node* []> returns(new Node*[return_count]);
for (int i = 0; i < return_count; ++i) {
returns[i] = Constant(m, type, i);
returns[i] = MakeConstant(m, type, i);
}
m.Return(return_count, returns.get());
......
......@@ -45,9 +45,6 @@ TestingModuleBuilder::TestingModuleBuilder(
maybe_import_index, test_module_->origin,
trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
: kNoTrapHandler);
if (native_module_->num_functions() <= maybe_import_index) {
native_module_->SetNumFunctionsForTesting(maybe_import_index + 1);
}
auto wasm_to_js_wrapper = native_module_->AddCodeCopy(
code.ToHandleChecked(), wasm::WasmCode::kWasmToJsWrapper,
maybe_import_index);
......@@ -99,9 +96,6 @@ uint32_t TestingModuleBuilder::AddFunction(FunctionSig* sig, const char* name,
test_module_->functions.reserve(kMaxFunctions);
}
uint32_t index = static_cast<uint32_t>(test_module_->functions.size());
if (native_module_ && native_module_->num_functions() <= index) {
native_module_->SetNumFunctionsForTesting(index + 1);
}
test_module_->functions.push_back({sig, index, 0, {0, 0}, false, false});
if (type == kImport) {
DCHECK_EQ(0, test_module_->num_declared_functions);
......
......@@ -150,17 +150,17 @@ CallDescriptor* CreateRandomCallDescriptor(Zone* zone, size_t return_count,
std::unique_ptr<wasm::NativeModule> AllocateNativeModule(i::Isolate* isolate,
size_t code_size) {
std::shared_ptr<wasm::WasmModule> module(new wasm::WasmModule);
module->num_declared_functions = 1;
wasm::ModuleEnv env(
nullptr, wasm::UseTrapHandler::kNoTrapHandler,
module.get(), wasm::UseTrapHandler::kNoTrapHandler,
wasm::RuntimeExceptionSupport::kNoRuntimeExceptionSupport);
// We have to add the code object to a NativeModule, because the
// WasmCallDescriptor assumes that code is on the native heap and not
// within a code object.
std::unique_ptr<wasm::NativeModule> module =
isolate->wasm_engine()->code_manager()->NewNativeModule(
isolate, code_size, 1, 0, false, env);
return module;
return isolate->wasm_engine()->code_manager()->NewNativeModule(
isolate, code_size, false, std::move(module), env);
}
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
......
......@@ -163,11 +163,13 @@ class WasmCodeManagerTest : public TestWithContext,
NativeModulePtr AllocModule(WasmCodeManager* manager, size_t size,
ModuleStyle style) {
std::shared_ptr<WasmModule> module(new WasmModule);
module->num_declared_functions = kNumFunctions;
bool can_request_more = style == Growable;
wasm::ModuleEnv env(nullptr, UseTrapHandler::kNoTrapHandler,
wasm::ModuleEnv env(module.get(), UseTrapHandler::kNoTrapHandler,
RuntimeExceptionSupport::kNoRuntimeExceptionSupport);
return manager->NewNativeModule(i_isolate(), size, kNumFunctions, 0,
can_request_more, env);
return manager->NewNativeModule(i_isolate(), size, can_request_more,
std::move(module), env);
}
WasmCode* AddCode(NativeModule* native_module, uint32_t index, size_t size) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment