Commit 0c1b4c25 authored by Ben L. Titzer's avatar Ben L. Titzer Committed by Commit Bot

[wasm] Move indirect function tables into the WasmContext

This CL changes the WASM implementation to access indirect function
tables through the WasmContext, whereas previously indirect function
tables and their sizes were constants that were inlined into compiled
into code, requiring code patching. This is a necessary step for sharing
code between instances and eventually, isolates.

R=clemensh@chromium.org,mstarzinger@chromium.org

Bug: v8:7424
Change-Id: Ida4138ed92729730dfbc0a81a84d8484b233d808
Reviewed-on: https://chromium-review.googlesource.com/895683
Commit-Queue: Ben Titzer <titzer@chromium.org>
Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51427}
parent fb0144f6
...@@ -2529,8 +2529,12 @@ Node* WasmGraphBuilder::BuildCCall(MachineSignature* sig, Node* function, ...@@ -2529,8 +2529,12 @@ Node* WasmGraphBuilder::BuildCCall(MachineSignature* sig, Node* function,
Node* WasmGraphBuilder::BuildWasmCall(wasm::FunctionSig* sig, Node** args, Node* WasmGraphBuilder::BuildWasmCall(wasm::FunctionSig* sig, Node** args,
Node*** rets, Node*** rets,
wasm::WasmCodePosition position) { wasm::WasmCodePosition position,
Node* wasm_context) {
if (wasm_context == nullptr) {
DCHECK_NOT_NULL(wasm_context_); DCHECK_NOT_NULL(wasm_context_);
wasm_context = wasm_context_.get();
}
SetNeedsStackCheck(); SetNeedsStackCheck();
const size_t params = sig->parameter_count(); const size_t params = sig->parameter_count();
const size_t extra = 3; // wasm_context, effect, and control. const size_t extra = 3; // wasm_context, effect, and control.
...@@ -2541,7 +2545,7 @@ Node* WasmGraphBuilder::BuildWasmCall(wasm::FunctionSig* sig, Node** args, ...@@ -2541,7 +2545,7 @@ Node* WasmGraphBuilder::BuildWasmCall(wasm::FunctionSig* sig, Node** args,
// Make room for the wasm_context parameter at index 1, just after code. // Make room for the wasm_context parameter at index 1, just after code.
memmove(&args[2], &args[1], params * sizeof(Node*)); memmove(&args[2], &args[1], params * sizeof(Node*));
args[1] = wasm_context_.get(); args[1] = wasm_context;
// Add effect and control inputs. // Add effect and control inputs.
args[params + 2] = *effect_; args[params + 2] = *effect_;
...@@ -2602,13 +2606,15 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args, ...@@ -2602,13 +2606,15 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
uint32_t table_index = 0; uint32_t table_index = 0;
wasm::FunctionSig* sig = env_->module->signatures[sig_index]; wasm::FunctionSig* sig = env_->module->signatures[sig_index];
EnsureFunctionTableNodes(); Node* table = nullptr;
Node* table_size = nullptr;
GetFunctionTableNodes(table_index, &table, &table_size);
MachineOperatorBuilder* machine = jsgraph()->machine(); MachineOperatorBuilder* machine = jsgraph()->machine();
Node* key = args[0]; Node* key = args[0];
// Bounds check against the table size. // Bounds check against the table size.
Node* size = function_tables_[table_index].size; Node* in_bounds =
Node* in_bounds = graph()->NewNode(machine->Uint32LessThan(), key, size); graph()->NewNode(machine->Uint32LessThan(), key, table_size);
TrapIfFalse(wasm::kTrapFuncInvalid, in_bounds, position); TrapIfFalse(wasm::kTrapFuncInvalid, in_bounds, position);
// Mask the key to prevent SSCA. // Mask the key to prevent SSCA.
...@@ -2617,36 +2623,72 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args, ...@@ -2617,36 +2623,72 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
Node* neg_key = Node* neg_key =
graph()->NewNode(machine->Word32Xor(), key, Int32Constant(-1)); graph()->NewNode(machine->Word32Xor(), key, Int32Constant(-1));
Node* masked_diff = graph()->NewNode( Node* masked_diff = graph()->NewNode(
machine->Word32And(), graph()->NewNode(machine->Int32Sub(), key, size), machine->Word32And(),
neg_key); graph()->NewNode(machine->Int32Sub(), key, table_size), neg_key);
Node* mask = Node* mask =
graph()->NewNode(machine->Word32Sar(), masked_diff, Int32Constant(31)); graph()->NewNode(machine->Word32Sar(), masked_diff, Int32Constant(31));
key = graph()->NewNode(machine->Word32And(), key, mask); key = graph()->NewNode(machine->Word32And(), key, mask);
} }
Node* table_address = function_tables_[table_index].table_addr;
Node* table = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::AnyTagged()), table_address,
jsgraph()->IntPtrConstant(0), *effect_, *control_);
// Load signature from the table and check. // Load signature from the table and check.
// The table is a FixedArray; signatures are encoded as SMIs. // The table is a FixedArray; signatures are encoded as SMIs.
// [sig1, code1, sig2, code2, sig3, code3, ...] // [sig1, code1, sig2, code2, sig3, code3, ...]
static_assert(compiler::kFunctionTableEntrySize == 2, "consistency"); static_assert(compiler::kFunctionTableEntrySize == 2, "consistency");
static_assert(compiler::kFunctionTableSignatureOffset == 0, "consistency"); static_assert(compiler::kFunctionTableSignatureOffset == 0, "consistency");
static_assert(compiler::kFunctionTableCodeOffset == 1, "consistency"); static_assert(compiler::kFunctionTableCodeOffset == 1, "consistency");
int32_t canonical_sig_num = env_->module->signature_ids[sig_index];
if (WASM_CONTEXT_TABLES) {
// The table entries are {IndirectFunctionTableEntry} structs.
Node* scaled_key =
graph()->NewNode(machine->Int32Mul(), key,
Int32Constant(sizeof(IndirectFunctionTableEntry)));
const Operator* add = nullptr;
if (machine->Is64()) {
scaled_key = graph()->NewNode(machine->ChangeInt32ToInt64(), scaled_key);
add = machine->Int64Add();
} else {
add = machine->Int32Add();
}
Node* entry_address = graph()->NewNode(add, table, scaled_key);
Node* loaded_sig = graph()->NewNode(
machine->Load(MachineType::Int32()), entry_address,
Int32Constant(offsetof(IndirectFunctionTableEntry, sig_id)), *effect_,
*control_);
Node* sig_match = graph()->NewNode(machine->WordEqual(), loaded_sig,
Int32Constant(canonical_sig_num));
TrapIfFalse(wasm::kTrapFuncSigMismatch, sig_match, position);
Node* target = graph()->NewNode(
machine->Load(MachineType::Pointer()), entry_address,
Int32Constant(offsetof(IndirectFunctionTableEntry, target)), *effect_,
*control_);
Node* loaded_context = graph()->NewNode(
machine->Load(MachineType::Pointer()), entry_address,
Int32Constant(offsetof(IndirectFunctionTableEntry, context)), *effect_,
*control_);
args[0] = target;
return BuildWasmCall(sig, args, rets, position, loaded_context);
}
// The table entries are elements of a fixed array.
ElementAccess access = AccessBuilder::ForFixedArrayElement(); ElementAccess access = AccessBuilder::ForFixedArrayElement();
const int fixed_offset = access.header_size - access.tag(); const int fixed_offset = access.header_size - access.tag();
Node* key_offset = graph()->NewNode(machine->Word32Shl(), key, Node* key_offset = graph()->NewNode(machine->Word32Shl(), key,
Int32Constant(kPointerSizeLog2 + 1)); Int32Constant(kPointerSizeLog2 + 1));
Node* load_sig = Node* loaded_sig =
graph()->NewNode(machine->Load(MachineType::AnyTagged()), table, graph()->NewNode(machine->Load(MachineType::AnyTagged()), table,
graph()->NewNode(machine->Int32Add(), key_offset, graph()->NewNode(machine->Int32Add(), key_offset,
Int32Constant(fixed_offset)), Int32Constant(fixed_offset)),
*effect_, *control_); *effect_, *control_);
int32_t canonical_sig_num = env_->module->signature_ids[sig_index];
CHECK_GE(canonical_sig_num, 0); CHECK_GE(canonical_sig_num, 0);
Node* sig_match = graph()->NewNode(machine->WordEqual(), load_sig, Node* sig_match = graph()->NewNode(machine->WordEqual(), loaded_sig,
jsgraph()->SmiConstant(canonical_sig_num)); jsgraph()->SmiConstant(canonical_sig_num));
TrapIfFalse(wasm::kTrapFuncSigMismatch, sig_match, position); TrapIfFalse(wasm::kTrapFuncSigMismatch, sig_match, position);
// Load code object from the table. It is held by a Foreign. // Load code object from the table. It is held by a Foreign.
...@@ -2655,15 +2697,7 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args, ...@@ -2655,15 +2697,7 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
graph()->NewNode(machine->Int32Add(), key_offset, graph()->NewNode(machine->Int32Add(), key_offset,
Uint32Constant(fixed_offset + kPointerSize)), Uint32Constant(fixed_offset + kPointerSize)),
*effect_, *control_); *effect_, *control_);
if (FLAG_wasm_jit_to_native) {
Node* address = graph()->NewNode(
machine->Load(MachineType::Pointer()), entry,
Int32Constant(Foreign::kForeignAddressOffset - kHeapObjectTag),
*effect_, *control_);
args[0] = address;
} else {
args[0] = entry; args[0] = entry;
}
return BuildWasmCall(sig, args, rets, position); return BuildWasmCall(sig, args, rets, position);
} }
...@@ -3615,8 +3649,25 @@ Node* WasmGraphBuilder::CurrentMemoryPages() { ...@@ -3615,8 +3649,25 @@ Node* WasmGraphBuilder::CurrentMemoryPages() {
jsgraph()->Int32Constant(WhichPowerOf2(wasm::kWasmPageSize))); jsgraph()->Int32Constant(WhichPowerOf2(wasm::kWasmPageSize)));
} }
void WasmGraphBuilder::EnsureFunctionTableNodes() { void WasmGraphBuilder::GetFunctionTableNodes(uint32_t table_index, Node** table,
if (function_tables_.size() > 0) return; Node** table_size) {
if (WASM_CONTEXT_TABLES) {
// The table address and size are stored in the WasmContext.
// Don't bother caching them, since they are only used in indirect calls,
// which would cause them to be spilled on the stack anyway.
*table = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::UintPtr()), wasm_context_.get(),
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, table))),
*effect_, *control_);
*table_size = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::Uint32()), wasm_context_.get(),
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, table_size))),
*effect_, *control_);
} else {
// The function table nodes are relocatable constants.
if (function_tables_.size() == 0) {
size_t tables_size = env_->function_tables.size(); size_t tables_size = env_->function_tables.size();
for (size_t i = 0; i < tables_size; ++i) { for (size_t i = 0; i < tables_size; ++i) {
wasm::GlobalHandleAddress function_handle_address = wasm::GlobalHandleAddress function_handle_address =
...@@ -3630,6 +3681,13 @@ void WasmGraphBuilder::EnsureFunctionTableNodes() { ...@@ -3630,6 +3681,13 @@ void WasmGraphBuilder::EnsureFunctionTableNodes() {
RelocInfo::WASM_FUNCTION_TABLE_SIZE_REFERENCE); RelocInfo::WASM_FUNCTION_TABLE_SIZE_REFERENCE);
function_tables_.push_back({table_addr, size}); function_tables_.push_back({table_addr, size});
} }
}
*table_size = function_tables_[table_index].size;
*table =
graph()->NewNode(jsgraph()->machine()->Load(MachineType::AnyTagged()),
function_tables_[table_index].table_addr,
jsgraph()->IntPtrConstant(0), *effect_, *control_);
}
} }
Node* WasmGraphBuilder::BuildModifyThreadInWasmFlag(bool new_value) { Node* WasmGraphBuilder::BuildModifyThreadInWasmFlag(bool new_value) {
......
...@@ -372,7 +372,8 @@ class WasmGraphBuilder { ...@@ -372,7 +372,8 @@ class WasmGraphBuilder {
Node* ToJS(Node* node, wasm::ValueType type); Node* ToJS(Node* node, wasm::ValueType type);
Node* FromJS(Node* node, Node* js_context, wasm::ValueType type); Node* FromJS(Node* node, Node* js_context, wasm::ValueType type);
Node* Invert(Node* node); Node* Invert(Node* node);
void EnsureFunctionTableNodes(); void GetFunctionTableNodes(uint32_t table_index, Node** table,
Node** table_size);
//----------------------------------------------------------------------- //-----------------------------------------------------------------------
// Operations that concern the linear memory. // Operations that concern the linear memory.
...@@ -506,7 +507,8 @@ class WasmGraphBuilder { ...@@ -506,7 +507,8 @@ class WasmGraphBuilder {
template <typename... Args> template <typename... Args>
Node* BuildCCall(MachineSignature* sig, Node* function, Args... args); Node* BuildCCall(MachineSignature* sig, Node* function, Args... args);
Node* BuildWasmCall(wasm::FunctionSig* sig, Node** args, Node*** rets, Node* BuildWasmCall(wasm::FunctionSig* sig, Node** args, Node*** rets,
wasm::WasmCodePosition position); wasm::WasmCodePosition position,
Node* wasm_context = nullptr);
Node* BuildF32CopySign(Node* left, Node* right); Node* BuildF32CopySign(Node* left, Node* right);
Node* BuildF64CopySign(Node* left, Node* right); Node* BuildF64CopySign(Node* left, Node* right);
......
...@@ -218,7 +218,7 @@ void LiftoffAssembler::CallRuntime(Zone* zone, Runtime::FunctionId fid) { ...@@ -218,7 +218,7 @@ void LiftoffAssembler::CallRuntime(Zone* zone, Runtime::FunctionId fid) {
} }
void LiftoffAssembler::CallIndirect(wasm::FunctionSig* sig, void LiftoffAssembler::CallIndirect(wasm::FunctionSig* sig,
compiler::CallDescriptor* call_descriptor, compiler::CallDescriptor* call_desc,
Register target) { Register target) {
BAILOUT("CallIndirect"); BAILOUT("CallIndirect");
} }
......
...@@ -218,7 +218,7 @@ void LiftoffAssembler::CallRuntime(Zone* zone, Runtime::FunctionId fid) { ...@@ -218,7 +218,7 @@ void LiftoffAssembler::CallRuntime(Zone* zone, Runtime::FunctionId fid) {
} }
void LiftoffAssembler::CallIndirect(wasm::FunctionSig* sig, void LiftoffAssembler::CallIndirect(wasm::FunctionSig* sig,
compiler::CallDescriptor* call_descriptor, compiler::CallDescriptor* call_desc,
Register target) { Register target) {
BAILOUT("CallIndirect"); BAILOUT("CallIndirect");
} }
......
...@@ -467,7 +467,8 @@ void LiftoffAssembler::SpillAllRegisters() { ...@@ -467,7 +467,8 @@ void LiftoffAssembler::SpillAllRegisters() {
void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig, void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig,
compiler::CallDescriptor* call_descriptor, compiler::CallDescriptor* call_descriptor,
uint32_t* max_used_spill_slot, uint32_t* max_used_spill_slot,
Register* target) { Register* target,
LiftoffRegister* explicit_context) {
uint32_t num_params = static_cast<uint32_t>(sig->parameter_count()); uint32_t num_params = static_cast<uint32_t>(sig->parameter_count());
// Input 0 is the call target. // Input 0 is the call target.
constexpr size_t kInputShift = 1; constexpr size_t kInputShift = 1;
...@@ -483,13 +484,23 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig, ...@@ -483,13 +484,23 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig,
} }
StackTransferRecipe stack_transfers(this); StackTransferRecipe stack_transfers(this);
LiftoffRegList param_regs;
// Move the explicit context (if any) into the correct context register.
compiler::LinkageLocation context_loc =
call_descriptor->GetInputLocation(kInputShift);
DCHECK(context_loc.IsRegister() && !context_loc.IsAnyRegister());
LiftoffRegister context_reg(Register::from_code(context_loc.AsRegister()));
param_regs.set(context_reg);
if (explicit_context && *explicit_context != context_reg) {
stack_transfers.MoveRegister(context_reg, *explicit_context, kWasmIntPtr);
}
// Now move all parameter values into the right slot for the call. // Now move all parameter values into the right slot for the call.
// Don't pop values yet, such that the stack height is still correct when // Don't pop values yet, such that the stack height is still correct when
// executing the {stack_transfers}. // executing the {stack_transfers}.
// Process parameters backwards, such that pushes of caller frame slots are // Process parameters backwards, such that pushes of caller frame slots are
// in the correct order. // in the correct order.
LiftoffRegList param_regs;
uint32_t param_base = cache_state_.stack_height() - num_params; uint32_t param_base = cache_state_.stack_height() - num_params;
uint32_t call_desc_input_idx = uint32_t call_desc_input_idx =
static_cast<uint32_t>(call_descriptor->InputCount()); static_cast<uint32_t>(call_descriptor->InputCount());
...@@ -527,12 +538,6 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig, ...@@ -527,12 +538,6 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig,
// {call_desc_input_idx} should point after the context parameter now. // {call_desc_input_idx} should point after the context parameter now.
DCHECK_EQ(call_desc_input_idx, kInputShift + 1); DCHECK_EQ(call_desc_input_idx, kInputShift + 1);
compiler::LinkageLocation context_loc =
call_descriptor->GetInputLocation(kInputShift);
DCHECK(context_loc.IsRegister() && !context_loc.IsAnyRegister());
Register context_reg = Register::from_code(context_loc.AsRegister());
param_regs.set(LiftoffRegister(context_reg));
// If the target register overlaps with a parameter register, then move the // If the target register overlaps with a parameter register, then move the
// target to another free register, or spill to the stack. // target to another free register, or spill to the stack.
if (target && param_regs.has(LiftoffRegister(*target))) { if (target && param_regs.has(LiftoffRegister(*target))) {
...@@ -563,8 +568,10 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig, ...@@ -563,8 +568,10 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig,
// Reset register use counters. // Reset register use counters.
cache_state_.reset_used_registers(); cache_state_.reset_used_registers();
// Fill the wasm context into the right register. // Reload the context from the stack.
FillContextInto(context_reg); if (!explicit_context) {
FillContextInto(context_reg.gp());
}
} }
void LiftoffAssembler::FinishCall(wasm::FunctionSig* sig, void LiftoffAssembler::FinishCall(wasm::FunctionSig* sig,
......
...@@ -310,7 +310,8 @@ class LiftoffAssembler : public TurboAssembler { ...@@ -310,7 +310,8 @@ class LiftoffAssembler : public TurboAssembler {
// TODO(clemensh): Remove {max_used_spill_slot} once we support arbitrary // TODO(clemensh): Remove {max_used_spill_slot} once we support arbitrary
// stack sizes. // stack sizes.
void PrepareCall(wasm::FunctionSig*, compiler::CallDescriptor*, void PrepareCall(wasm::FunctionSig*, compiler::CallDescriptor*,
uint32_t* max_used_spill_slot, Register* target = nullptr); uint32_t* max_used_spill_slot, Register* target = nullptr,
LiftoffRegister* explicit_context = nullptr);
// Process return values of the call. // Process return values of the call.
void FinishCall(wasm::FunctionSig*, compiler::CallDescriptor*); void FinishCall(wasm::FunctionSig*, compiler::CallDescriptor*);
......
...@@ -1141,12 +1141,14 @@ class LiftoffCompiler { ...@@ -1141,12 +1141,14 @@ class LiftoffCompiler {
void CallIndirect(Decoder* decoder, const Value& index_val, void CallIndirect(Decoder* decoder, const Value& index_val,
const CallIndirectOperand<validate>& operand, const CallIndirectOperand<validate>& operand,
const Value args[], Value returns[]) { const Value args[], Value returns[]) {
if (operand.sig->return_count() > 1) if (operand.sig->return_count() > 1) {
return unsupported(decoder, "multi-return"); return unsupported(decoder, "multi-return");
}
if (operand.sig->return_count() == 1 && if (operand.sig->return_count() == 1 &&
!CheckSupportedType(decoder, kTypes_ilfd, operand.sig->GetReturn(0), !CheckSupportedType(decoder, kTypes_ilfd, operand.sig->GetReturn(0),
"return")) "return")) {
return; return;
}
// Assume only one table for now. // Assume only one table for now.
uint32_t table_index = 0; uint32_t table_index = 0;
...@@ -1169,63 +1171,106 @@ class LiftoffCompiler { ...@@ -1169,63 +1171,106 @@ class LiftoffCompiler {
pinned.set(__ GetUnusedRegister(kGpReg, pinned)); pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LiftoffRegister scratch = pinned.set(__ GetUnusedRegister(kGpReg, pinned)); LiftoffRegister scratch = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LiftoffRegister* explicit_context = nullptr;
// Bounds check against the table size. // Bounds check against the table size.
{ Label* invalid_func_label = AddOutOfLineTrap(
decoder->position(), Builtins::kThrowWasmTrapFuncInvalid);
static constexpr LoadType kPointerLoadType =
kPointerSize == 8 ? LoadType::kI64Load : LoadType::kI32Load;
static constexpr int kFixedArrayOffset =
FixedArray::kHeaderSize - kHeapObjectTag;
uint32_t canonical_sig_num = env_->module->signature_ids[operand.sig_index];
DCHECK_GE(canonical_sig_num, 0);
DCHECK_GE(kMaxInt, canonical_sig_num);
if (WASM_CONTEXT_TABLES) {
// Compare against table size stored in {wasm_context->table_size}.
__ LoadFromContext(tmp_const.gp(), offsetof(WasmContext, table_size),
sizeof(uint32_t));
__ emit_cond_jump(kUnsignedGreaterEqual, invalid_func_label, kWasmI32,
index.gp(), tmp_const.gp());
// Load the table from {wasm_context->table}
__ LoadFromContext(table.gp(), offsetof(WasmContext, table),
kPointerSize);
// Load the signature from {wasm_context->table[$index].sig_id}
// == wasm_context.table + $index * #sizeof(IndirectionFunctionTableEntry)
// + #offsetof(sig_id)
__ LoadConstant(
tmp_const,
WasmValue(static_cast<uint32_t>(sizeof(IndirectFunctionTableEntry))));
__ emit_i32_mul(index.gp(), index.gp(), tmp_const.gp());
__ Load(scratch, table.gp(), index.gp(),
offsetof(IndirectFunctionTableEntry, sig_id), LoadType::kI32Load,
pinned);
__ LoadConstant(tmp_const, WasmValue(canonical_sig_num));
Label* sig_mismatch_label = AddOutOfLineTrap(
decoder->position(), Builtins::kThrowWasmTrapFuncSigMismatch);
__ emit_cond_jump(kUnequal, sig_mismatch_label,
LiftoffAssembler::kWasmIntPtr, scratch.gp(),
tmp_const.gp());
// Load the target address from {wasm_context->table[$index].target}
__ Load(scratch, table.gp(), index.gp(),
offsetof(IndirectFunctionTableEntry, target), kPointerLoadType,
pinned);
// Load the context from {wasm_context->table[$index].context}
// TODO(wasm): directly allocate the correct context register to avoid
// any potential moves.
__ Load(tmp_const, table.gp(), index.gp(),
offsetof(IndirectFunctionTableEntry, context), kPointerLoadType,
pinned);
explicit_context = &tmp_const;
} else {
// Compare against table size, which is a patchable constant.
uint32_t table_size = uint32_t table_size =
env_->module->function_tables[table_index].initial_size; env_->module->function_tables[table_index].initial_size;
Label* trap_label = AddOutOfLineTrap(decoder->position(),
Builtins::kThrowWasmTrapFuncInvalid);
__ LoadConstant(tmp_const, WasmValue(table_size), __ LoadConstant(tmp_const, WasmValue(table_size),
RelocInfo::WASM_FUNCTION_TABLE_SIZE_REFERENCE); RelocInfo::WASM_FUNCTION_TABLE_SIZE_REFERENCE);
__ emit_cond_jump(kUnsignedGreaterEqual, trap_label, kWasmI32, index.gp(),
tmp_const.gp()); __ emit_cond_jump(kUnsignedGreaterEqual, invalid_func_label, kWasmI32,
} index.gp(), tmp_const.gp());
wasm::GlobalHandleAddress function_table_handle_address = wasm::GlobalHandleAddress function_table_handle_address =
env_->function_tables[table_index]; env_->function_tables[table_index];
__ LoadConstant(table, WasmPtrValue(function_table_handle_address), __ LoadConstant(table, WasmPtrValue(function_table_handle_address),
RelocInfo::WASM_GLOBAL_HANDLE); RelocInfo::WASM_GLOBAL_HANDLE);
static constexpr LoadType kPointerLoadType =
kPointerSize == 8 ? LoadType::kI64Load : LoadType::kI32Load;
__ Load(table, table.gp(), no_reg, 0, kPointerLoadType, pinned); __ Load(table, table.gp(), no_reg, 0, kPointerLoadType, pinned);
// Load signature from the table and check. // Load signature from the table and check.
// The table is a FixedArray; signatures are encoded as SMIs. // The table is a FixedArray; signatures are encoded as SMIs.
// [sig1, code1, sig2, code2, sig3, code3, ...] // [sig1, code1, sig2, code2, sig3, code3, ...]
static_assert(compiler::kFunctionTableEntrySize == 2, "consistency"); static_assert(compiler::kFunctionTableEntrySize == 2, "consistency");
static_assert(compiler::kFunctionTableSignatureOffset == 0, "consistency"); static_assert(compiler::kFunctionTableSignatureOffset == 0,
"consistency");
static_assert(compiler::kFunctionTableCodeOffset == 1, "consistency"); static_assert(compiler::kFunctionTableCodeOffset == 1, "consistency");
constexpr int kFixedArrayOffset = FixedArray::kHeaderSize - kHeapObjectTag;
__ LoadConstant(tmp_const, WasmValue(kPointerSizeLog2 + 1)); __ LoadConstant(tmp_const, WasmValue(kPointerSizeLog2 + 1));
// Shift index such that it's the offset of the signature in the FixedArray. // Shift index such that it's the offset of the signature in the
// FixedArray.
__ emit_i32_shl(index.gp(), index.gp(), tmp_const.gp(), pinned); __ emit_i32_shl(index.gp(), index.gp(), tmp_const.gp(), pinned);
// Load the signature. // Load the signature.
__ Load(scratch, table.gp(), index.gp(), kFixedArrayOffset, __ Load(scratch, table.gp(), index.gp(), kFixedArrayOffset,
kPointerLoadType, pinned); kPointerLoadType, pinned);
uint32_t canonical_sig_num = env_->module->signature_ids[operand.sig_index];
DCHECK_GE(canonical_sig_num, 0);
DCHECK_GE(kMaxInt, canonical_sig_num);
__ LoadConstant(tmp_const, WasmPtrValue(Smi::FromInt(canonical_sig_num))); __ LoadConstant(tmp_const, WasmPtrValue(Smi::FromInt(canonical_sig_num)));
Label* trap_label = AddOutOfLineTrap( Label* sig_mismatch_label = AddOutOfLineTrap(
decoder->position(), Builtins::kThrowWasmTrapFuncSigMismatch); decoder->position(), Builtins::kThrowWasmTrapFuncSigMismatch);
__ emit_cond_jump(kUnequal, trap_label, LiftoffAssembler::kWasmIntPtr, __ emit_cond_jump(kUnequal, sig_mismatch_label,
scratch.gp(), tmp_const.gp()); LiftoffAssembler::kWasmIntPtr, scratch.gp(),
tmp_const.gp());
// Load code object. // Load code object.
__ Load(scratch, table.gp(), index.gp(), kFixedArrayOffset + kPointerSize, __ Load(scratch, table.gp(), index.gp(), kFixedArrayOffset + kPointerSize,
kPointerLoadType, pinned); kPointerLoadType, pinned);
if (FLAG_wasm_jit_to_native) {
// The table holds a Foreign pointing to the instruction start.
__ Load(scratch, scratch.gp(), no_reg,
Foreign::kForeignAddressOffset - kHeapObjectTag, kPointerLoadType,
pinned);
} else {
// Move the pointer from the Code object to the instruction start. // Move the pointer from the Code object to the instruction start.
__ LoadConstant(tmp_const, __ LoadConstant(tmp_const,
WasmPtrValue(Code::kHeaderSize - kHeapObjectTag)); WasmPtrValue(Code::kHeaderSize - kHeapObjectTag));
...@@ -1242,7 +1287,8 @@ class LiftoffCompiler { ...@@ -1242,7 +1287,8 @@ class LiftoffCompiler {
uint32_t max_used_spill_slot = 0; uint32_t max_used_spill_slot = 0;
Register target = scratch.gp(); Register target = scratch.gp();
__ PrepareCall(operand.sig, call_descriptor, &max_used_spill_slot, &target); __ PrepareCall(operand.sig, call_descriptor, &max_used_spill_slot, &target,
explicit_context);
__ CallIndirect(operand.sig, call_descriptor, target); __ CallIndirect(operand.sig, call_descriptor, target);
if (max_used_spill_slot > if (max_used_spill_slot >
__ num_locals() + LiftoffAssembler::kMaxValueStackHeight) { __ num_locals() + LiftoffAssembler::kMaxValueStackHeight) {
......
...@@ -306,6 +306,8 @@ class InstanceBuilder { ...@@ -306,6 +306,8 @@ class InstanceBuilder {
// Build an instance, in all of its glory. // Build an instance, in all of its glory.
MaybeHandle<WasmInstanceObject> Build(); MaybeHandle<WasmInstanceObject> Build();
// Run the start function, if any.
bool ExecuteStartFunction();
private: private:
// Represents the initialized state of a table. // Represents the initialized state of a table.
...@@ -333,6 +335,7 @@ class InstanceBuilder { ...@@ -333,6 +335,7 @@ class InstanceBuilder {
Handle<WasmCompiledModule> compiled_module_; Handle<WasmCompiledModule> compiled_module_;
std::vector<TableInstance> table_instances_; std::vector<TableInstance> table_instances_;
std::vector<Handle<JSFunction>> js_wrappers_; std::vector<Handle<JSFunction>> js_wrappers_;
Handle<WasmExportedFunction> start_function_;
JSToWasmWrapperCache js_to_wasm_cache_; JSToWasmWrapperCache js_to_wasm_cache_;
std::vector<SanitizedImport> sanitized_imports_; std::vector<SanitizedImport> sanitized_imports_;
...@@ -424,8 +427,9 @@ class InstanceBuilder { ...@@ -424,8 +427,9 @@ class InstanceBuilder {
class SetOfNativeModuleModificationScopes final { class SetOfNativeModuleModificationScopes final {
public: public:
void Add(NativeModule* module) { void Add(NativeModule* module) {
if (native_modules_.insert(module).second) {
module->SetExecutable(false); module->SetExecutable(false);
native_modules_.insert(module); }
} }
~SetOfNativeModuleModificationScopes() { ~SetOfNativeModuleModificationScopes() {
...@@ -438,6 +442,16 @@ class SetOfNativeModuleModificationScopes final { ...@@ -438,6 +442,16 @@ class SetOfNativeModuleModificationScopes final {
std::unordered_set<NativeModule*> native_modules_; std::unordered_set<NativeModule*> native_modules_;
}; };
void EnsureWasmContextTable(WasmContext* wasm_context, int table_size) {
if (wasm_context->table) return;
wasm_context->table_size = table_size;
wasm_context->table = reinterpret_cast<IndirectFunctionTableEntry*>(
calloc(table_size, sizeof(IndirectFunctionTableEntry)));
for (int i = 0; i < table_size; i++) {
wasm_context->table[i].sig_id = kInvalidSigIndex;
}
}
} // namespace } // namespace
MaybeHandle<WasmInstanceObject> InstantiateToInstanceObject( MaybeHandle<WasmInstanceObject> InstantiateToInstanceObject(
...@@ -445,7 +459,11 @@ MaybeHandle<WasmInstanceObject> InstantiateToInstanceObject( ...@@ -445,7 +459,11 @@ MaybeHandle<WasmInstanceObject> InstantiateToInstanceObject(
Handle<WasmModuleObject> module_object, MaybeHandle<JSReceiver> imports, Handle<WasmModuleObject> module_object, MaybeHandle<JSReceiver> imports,
MaybeHandle<JSArrayBuffer> memory) { MaybeHandle<JSArrayBuffer> memory) {
InstanceBuilder builder(isolate, thrower, module_object, imports, memory); InstanceBuilder builder(isolate, thrower, module_object, imports, memory);
return builder.Build(); auto instance = builder.Build();
if (!instance.is_null() && builder.ExecuteStartFunction()) {
return instance;
}
return {};
} }
Handle<Code> CompileLazyOnGCHeap(Isolate* isolate) { Handle<Code> CompileLazyOnGCHeap(Isolate* isolate) {
...@@ -638,13 +656,20 @@ Address CompileLazy(Isolate* isolate) { ...@@ -638,13 +656,20 @@ Address CompileLazy(Isolate* isolate) {
// See EnsureExportedLazyDeoptData: exp_deopt_data[0...(len-1)] are pairs // See EnsureExportedLazyDeoptData: exp_deopt_data[0...(len-1)] are pairs
// of <export_table, index> followed by undefined values. Use this // of <export_table, index> followed by undefined values. Use this
// information here to patch all export tables. // information here to patch all export tables.
Address target = result->instructions().start();
Handle<Foreign> foreign_holder = Handle<Foreign> foreign_holder =
isolate->factory()->NewForeign(result->instructions().start(), TENURED); isolate->factory()->NewForeign(target, TENURED);
for (int idx = 0, end = exp_deopt_data->length(); idx < end; idx += 2) { for (int idx = 0, end = exp_deopt_data->length(); idx < end; idx += 2) {
if (exp_deopt_data->get(idx)->IsUndefined(isolate)) break; if (exp_deopt_data->get(idx)->IsUndefined(isolate)) break;
DisallowHeapAllocation no_gc; DisallowHeapAllocation no_gc;
int exp_index = Smi::ToInt(exp_deopt_data->get(idx + 1)); int exp_index = Smi::ToInt(exp_deopt_data->get(idx + 1));
FixedArray* exp_table = FixedArray::cast(exp_deopt_data->get(idx)); FixedArray* exp_table = FixedArray::cast(exp_deopt_data->get(idx));
if (WASM_CONTEXT_TABLES) {
// TODO(titzer): patching of function tables for lazy compilation
// only works for a single instance.
instance->wasm_context()->get()->table[exp_index].target = target;
} else {
int table_index = compiler::FunctionTableCodeOffset(exp_index); int table_index = compiler::FunctionTableCodeOffset(exp_index);
DCHECK_EQ(Foreign::cast(exp_table->get(table_index))->foreign_address(), DCHECK_EQ(Foreign::cast(exp_table->get(table_index))->foreign_address(),
lazy_stub_or_copy->instructions().start()); lazy_stub_or_copy->instructions().start());
...@@ -652,16 +677,16 @@ Address CompileLazy(Isolate* isolate) { ...@@ -652,16 +677,16 @@ Address CompileLazy(Isolate* isolate) {
exp_table->set(table_index, *foreign_holder); exp_table->set(table_index, *foreign_holder);
++patched; ++patched;
} }
// TODO(6792): No longer needed once WebAssembly code is off heap. }
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// After processing, remove the list of exported entries, such that we don't // After processing, remove the list of exported entries, such that we don't
// do the patching redundantly. // do the patching redundantly.
compiled_module->lazy_compile_data()->set( compiled_module->lazy_compile_data()->set(
func_index, isolate->heap()->undefined_value()); func_index, isolate->heap()->undefined_value());
if (!WASM_CONTEXT_TABLES) {
DCHECK_LT(0, patched); DCHECK_LT(0, patched);
USE(patched); USE(patched);
} }
}
return result->instructions().start(); return result->instructions().start();
} }
...@@ -671,8 +696,7 @@ compiler::ModuleEnv CreateModuleEnvFromCompiledModule( ...@@ -671,8 +696,7 @@ compiler::ModuleEnv CreateModuleEnvFromCompiledModule(
DisallowHeapAllocation no_gc; DisallowHeapAllocation no_gc;
WasmModule* module = compiled_module->shared()->module(); WasmModule* module = compiled_module->shared()->module();
if (FLAG_wasm_jit_to_native) { if (FLAG_wasm_jit_to_native) {
NativeModule* native_module = compiled_module->GetNativeModule(); compiler::ModuleEnv result(module, std::vector<Address>{},
compiler::ModuleEnv result(module, native_module->function_tables(),
std::vector<Handle<Code>>{}, std::vector<Handle<Code>>{},
BUILTIN_CODE(isolate, WasmCompileLazy), BUILTIN_CODE(isolate, WasmCompileLazy),
compiled_module->use_trap_handler()); compiled_module->use_trap_handler());
...@@ -1649,7 +1673,7 @@ WasmCodeWrapper EnsureTableExportLazyDeoptData( ...@@ -1649,7 +1673,7 @@ WasmCodeWrapper EnsureTableExportLazyDeoptData(
Isolate* isolate, Handle<WasmInstanceObject> instance, Isolate* isolate, Handle<WasmInstanceObject> instance,
Handle<FixedArray> code_table, wasm::NativeModule* native_module, Handle<FixedArray> code_table, wasm::NativeModule* native_module,
uint32_t func_index, Handle<FixedArray> export_table, int export_index, uint32_t func_index, Handle<FixedArray> export_table, int export_index,
std::unordered_map<uint32_t, uint32_t>* table_export_count) { std::unordered_map<uint32_t, uint32_t>* num_table_exports) {
if (!FLAG_wasm_jit_to_native) { if (!FLAG_wasm_jit_to_native) {
Handle<Code> code = Handle<Code> code =
EnsureExportedLazyDeoptData(isolate, instance, code_table, EnsureExportedLazyDeoptData(isolate, instance, code_table,
...@@ -1669,10 +1693,10 @@ WasmCodeWrapper EnsureTableExportLazyDeoptData( ...@@ -1669,10 +1693,10 @@ WasmCodeWrapper EnsureTableExportLazyDeoptData(
// [#4: export table // [#4: export table
// #5: export table index] // #5: export table index]
// ... // ...
// table_export_count counts down and determines the index for the new // num_table_exports counts down and determines the index for the new
// export table entry. // export table entry.
auto table_export_entry = table_export_count->find(func_index); auto table_export_entry = num_table_exports->find(func_index);
DCHECK(table_export_entry != table_export_count->end()); DCHECK(table_export_entry != num_table_exports->end());
DCHECK_LT(0, table_export_entry->second); DCHECK_LT(0, table_export_entry->second);
uint32_t this_idx = 2 * table_export_entry->second; uint32_t this_idx = 2 * table_export_entry->second;
--table_export_entry->second; --table_export_entry->second;
...@@ -1705,10 +1729,10 @@ WasmCodeWrapper EnsureTableExportLazyDeoptData( ...@@ -1705,10 +1729,10 @@ WasmCodeWrapper EnsureTableExportLazyDeoptData(
// [#2: export table // [#2: export table
// #3: export table index] // #3: export table index]
// ... // ...
// table_export_count counts down and determines the index for the new // num_table_exports counts down and determines the index for the new
// export table entry. // export table entry.
auto table_export_entry = table_export_count->find(func_index); auto table_export_entry = num_table_exports->find(func_index);
DCHECK(table_export_entry != table_export_count->end()); DCHECK(table_export_entry != num_table_exports->end());
DCHECK_LT(0, table_export_entry->second); DCHECK_LT(0, table_export_entry->second);
--table_export_entry->second; --table_export_entry->second;
uint32_t this_idx = 2 * table_export_entry->second; uint32_t this_idx = 2 * table_export_entry->second;
...@@ -2044,12 +2068,8 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { ...@@ -2044,12 +2068,8 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
if (thrower_->error()) return {}; if (thrower_->error()) return {};
// TODO(6792): No longer needed once WebAssembly code is off heap. // TODO(6792): No longer needed once WebAssembly code is off heap.
// Use base::Optional to be able to close the scope before executing the start CodeSpaceMemoryModificationScope modification_scope(isolate_->heap());
// function.
base::Optional<CodeSpaceMemoryModificationScope> modification_scope(
base::in_place_t(), isolate_->heap());
// From here on, we expect the build pipeline to run without exiting to JS. // From here on, we expect the build pipeline to run without exiting to JS.
// Exception is when we run the startup function.
DisallowJavascriptExecution no_js(isolate_); DisallowJavascriptExecution no_js(isolate_);
// Record build time into correct bucket, then build instance. // Record build time into correct bucket, then build instance.
TimedHistogramScope wasm_instantiate_module_time_scope( TimedHistogramScope wasm_instantiate_module_time_scope(
...@@ -2425,41 +2445,20 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { ...@@ -2425,41 +2445,20 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
} }
//-------------------------------------------------------------------------- //--------------------------------------------------------------------------
// Execute the start function if one was specified. // Create a wrapper for the start function.
//-------------------------------------------------------------------------- //--------------------------------------------------------------------------
if (module_->start_function_index >= 0) { if (module_->start_function_index >= 0) {
HandleScope scope(isolate_);
int start_index = module_->start_function_index; int start_index = module_->start_function_index;
WasmCodeWrapper startup_code = EnsureExportedLazyDeoptData( WasmCodeWrapper start_code = EnsureExportedLazyDeoptData(
isolate_, instance, code_table, native_module, start_index); isolate_, instance, code_table, native_module, start_index);
FunctionSig* sig = module_->functions[start_index].sig; FunctionSig* sig = module_->functions[start_index].sig;
Handle<Code> wrapper_code = js_to_wasm_cache_.CloneOrCompileJSToWasmWrapper( Handle<Code> wrapper_code = js_to_wasm_cache_.CloneOrCompileJSToWasmWrapper(
isolate_, module_, startup_code, start_index, isolate_, module_, start_code, start_index,
compiled_module_->use_trap_handler()); compiled_module_->use_trap_handler());
Handle<WasmExportedFunction> startup_fct = WasmExportedFunction::New( start_function_ = WasmExportedFunction::New(
isolate_, instance, MaybeHandle<String>(), start_index, isolate_, instance, MaybeHandle<String>(), start_index,
static_cast<int>(sig->parameter_count()), wrapper_code); static_cast<int>(sig->parameter_count()), wrapper_code);
RecordStats(startup_code, counters()); RecordStats(start_code, counters());
// Call the JS function.
Handle<Object> undefined = factory->undefined_value();
// Close the modification scopes, so we can execute the start function.
modification_scope.reset();
native_module_modification_scope.reset();
{
// We're OK with JS execution here. The instance is fully setup.
AllowJavascriptExecution allow_js(isolate_);
MaybeHandle<Object> retval =
Execution::Call(isolate_, startup_fct, undefined, 0, nullptr);
if (retval.is_null()) {
DCHECK(isolate_->has_pending_exception());
// It's unfortunate that the new instance is already linked in the
// chain. However, we need to set up everything before executing the
// startup unction, such that stack trace information can be generated
// correctly already in the start function.
return {};
}
}
} }
DCHECK(!isolate_->has_pending_exception()); DCHECK(!isolate_->has_pending_exception());
...@@ -2473,6 +2472,22 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { ...@@ -2473,6 +2472,22 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
return instance; return instance;
} }
bool InstanceBuilder::ExecuteStartFunction() {
if (start_function_.is_null()) return true; // No start function.
HandleScope scope(isolate_);
// Call the JS function.
Handle<Object> undefined = isolate_->factory()->undefined_value();
MaybeHandle<Object> retval =
Execution::Call(isolate_, start_function_, undefined, 0, nullptr);
if (retval.is_null()) {
DCHECK(isolate_->has_pending_exception());
return false;
}
return true;
}
// Look up an import value in the {ffi_} object. // Look up an import value in the {ffi_} object.
MaybeHandle<Object> InstanceBuilder::LookupImport(uint32_t index, MaybeHandle<Object> InstanceBuilder::LookupImport(uint32_t index,
Handle<String> module_name, Handle<String> module_name,
...@@ -2757,6 +2772,11 @@ int InstanceBuilder::ProcessImports(Handle<FixedArray> code_table, ...@@ -2757,6 +2772,11 @@ int InstanceBuilder::ProcessImports(Handle<FixedArray> code_table,
i += kFunctionTableEntrySize) { i += kFunctionTableEntrySize) {
table_instance.function_table->set(i, Smi::FromInt(kInvalidSigIndex)); table_instance.function_table->set(i, Smi::FromInt(kInvalidSigIndex));
} }
WasmContext* wasm_context = nullptr;
if (WASM_CONTEXT_TABLES) {
wasm_context = instance->wasm_context()->get();
EnsureWasmContextTable(wasm_context, imported_cur_size);
}
// Initialize the dispatch table with the (foreign) JS functions // Initialize the dispatch table with the (foreign) JS functions
// that are already in the table. // that are already in the table.
for (int i = 0; i < imported_cur_size; ++i) { for (int i = 0; i < imported_cur_size; ++i) {
...@@ -2774,7 +2794,7 @@ int InstanceBuilder::ProcessImports(Handle<FixedArray> code_table, ...@@ -2774,7 +2794,7 @@ int InstanceBuilder::ProcessImports(Handle<FixedArray> code_table,
// id, then the signature does not appear at all in this module, // id, then the signature does not appear at all in this module,
// so putting {-1} in the table will cause checks to always fail. // so putting {-1} in the table will cause checks to always fail.
auto target = Handle<WasmExportedFunction>::cast(val); auto target = Handle<WasmExportedFunction>::cast(val);
if (!FLAG_wasm_jit_to_native) { if (!WASM_CONTEXT_TABLES) {
FunctionSig* sig = nullptr; FunctionSig* sig = nullptr;
Handle<Code> code = Handle<Code> code =
MakeWasmToWasmWrapper(isolate_, target, nullptr, &sig, MakeWasmToWasmWrapper(isolate_, target, nullptr, &sig,
...@@ -2786,34 +2806,17 @@ int InstanceBuilder::ProcessImports(Handle<FixedArray> code_table, ...@@ -2786,34 +2806,17 @@ int InstanceBuilder::ProcessImports(Handle<FixedArray> code_table,
table_instance.function_table->set( table_instance.function_table->set(
compiler::FunctionTableCodeOffset(i), *code); compiler::FunctionTableCodeOffset(i), *code);
} else { } else {
const wasm::WasmCode* exported_code =
target->GetWasmCode().GetWasmCode();
wasm::NativeModule* exporting_module = exported_code->owner();
Handle<WasmInstanceObject> imported_instance = Handle<WasmInstanceObject> imported_instance =
handle(target->instance()); handle(target->instance());
imported_wasm_instances.Set(imported_instance, imported_instance); const wasm::WasmCode* exported_code =
target->GetWasmCode().GetWasmCode();
FunctionSig* sig = imported_instance->module() FunctionSig* sig = imported_instance->module()
->functions[exported_code->index()] ->functions[exported_code->index()]
.sig; .sig;
wasm::WasmCode* wrapper_code = auto& entry = wasm_context->table[i];
exporting_module->GetExportedWrapper(exported_code->index()); entry.context = imported_instance->wasm_context()->get();
if (wrapper_code == nullptr) { entry.sig_id = module_->signature_map.Find(sig);
WasmContext* other_context = entry.target = exported_code->instructions().start();
imported_instance->wasm_context()->get();
Handle<Code> wrapper = compiler::CompileWasmToWasmWrapper(
isolate_, target->GetWasmCode(), sig,
reinterpret_cast<Address>(other_context));
set_of_native_module_scopes.Add(exporting_module);
wrapper_code = exporting_module->AddExportedWrapper(
wrapper, exported_code->index());
}
int sig_index = module_->signature_map.Find(sig);
Handle<Foreign> foreign_holder = isolate_->factory()->NewForeign(
wrapper_code->instructions().start(), TENURED);
table_instance.function_table->set(
compiler::FunctionTableSigOffset(i), Smi::FromInt(sig_index));
table_instance.function_table->set(
compiler::FunctionTableCodeOffset(i), *foreign_holder);
} }
} }
...@@ -3177,12 +3180,6 @@ void InstanceBuilder::InitializeTables( ...@@ -3177,12 +3180,6 @@ void InstanceBuilder::InitializeTables(
Handle<WasmInstanceObject> instance, Handle<WasmInstanceObject> instance,
CodeSpecialization* code_specialization) { CodeSpecialization* code_specialization) {
size_t function_table_count = module_->function_tables.size(); size_t function_table_count = module_->function_tables.size();
std::vector<GlobalHandleAddress> new_function_tables(function_table_count);
wasm::NativeModule* native_module = compiled_module_->GetNativeModule();
std::vector<GlobalHandleAddress> empty;
std::vector<GlobalHandleAddress>& old_function_tables =
FLAG_wasm_jit_to_native ? native_module->function_tables() : empty;
Handle<FixedArray> old_function_tables_gc = Handle<FixedArray> old_function_tables_gc =
FLAG_wasm_jit_to_native FLAG_wasm_jit_to_native
...@@ -3204,9 +3201,7 @@ void InstanceBuilder::InitializeTables( ...@@ -3204,9 +3201,7 @@ void InstanceBuilder::InitializeTables(
instance->set_function_tables(*rooted_function_tables); instance->set_function_tables(*rooted_function_tables);
if (FLAG_wasm_jit_to_native) { if (!FLAG_wasm_jit_to_native) {
DCHECK_EQ(old_function_tables.size(), new_function_tables.size());
} else {
DCHECK_EQ(old_function_tables_gc->length(), DCHECK_EQ(old_function_tables_gc->length(),
new_function_tables_gc->length()); new_function_tables_gc->length());
} }
...@@ -3218,6 +3213,11 @@ void InstanceBuilder::InitializeTables( ...@@ -3218,6 +3213,11 @@ void InstanceBuilder::InitializeTables(
int num_table_entries = static_cast<int>(table.initial_size); int num_table_entries = static_cast<int>(table.initial_size);
int table_size = compiler::kFunctionTableEntrySize * num_table_entries; int table_size = compiler::kFunctionTableEntrySize * num_table_entries;
if (WASM_CONTEXT_TABLES) {
WasmContext* wasm_context = instance->wasm_context()->get();
EnsureWasmContextTable(wasm_context, num_table_entries);
}
if (table_instance.function_table.is_null()) { if (table_instance.function_table.is_null()) {
// Create a new dispatch table if necessary. // Create a new dispatch table if necessary.
table_instance.function_table = table_instance.function_table =
...@@ -3259,24 +3259,18 @@ void InstanceBuilder::InitializeTables( ...@@ -3259,24 +3259,18 @@ void InstanceBuilder::InitializeTables(
GlobalHandleAddress new_func_table_addr = global_func_table.address(); GlobalHandleAddress new_func_table_addr = global_func_table.address();
GlobalHandleAddress old_func_table_addr; GlobalHandleAddress old_func_table_addr;
if (!FLAG_wasm_jit_to_native) { if (!WASM_CONTEXT_TABLES) {
WasmCompiledModule::SetTableValue(isolate_, new_function_tables_gc, WasmCompiledModule::SetTableValue(isolate_, new_function_tables_gc,
int_index, new_func_table_addr); int_index, new_func_table_addr);
old_func_table_addr = old_func_table_addr =
WasmCompiledModule::GetTableValue(*old_function_tables_gc, int_index); WasmCompiledModule::GetTableValue(*old_function_tables_gc, int_index);
} else {
new_function_tables[int_index] = new_func_table_addr;
old_func_table_addr = old_function_tables[int_index];
}
code_specialization->RelocatePointer(old_func_table_addr, code_specialization->RelocatePointer(old_func_table_addr,
new_func_table_addr); new_func_table_addr);
} }
}
if (FLAG_wasm_jit_to_native) { if (!WASM_CONTEXT_TABLES) {
native_module->function_tables() = new_function_tables;
} else {
compiled_module_->set_function_tables(*new_function_tables_gc); compiled_module_->set_function_tables(*new_function_tables_gc);
} }
} }
...@@ -3331,10 +3325,12 @@ void InstanceBuilder::LoadTableSegments(Handle<FixedArray> code_table, ...@@ -3331,10 +3325,12 @@ void InstanceBuilder::LoadTableSegments(Handle<FixedArray> code_table,
uint32_t func_index = table_init.entries[i]; uint32_t func_index = table_init.entries[i];
WasmFunction* function = &module_->functions[func_index]; WasmFunction* function = &module_->functions[func_index];
int table_index = static_cast<int>(i + base); int table_index = static_cast<int>(i + base);
uint32_t sig_index = module_->signature_ids[function->sig_index];
// Update the local dispatch table first.
uint32_t sig_id = module_->signature_ids[function->sig_index];
table_instance.function_table->set( table_instance.function_table->set(
compiler::FunctionTableSigOffset(table_index), compiler::FunctionTableSigOffset(table_index),
Smi::FromInt(sig_index)); Smi::FromInt(sig_id));
WasmCodeWrapper wasm_code = EnsureTableExportLazyDeoptData( WasmCodeWrapper wasm_code = EnsureTableExportLazyDeoptData(
isolate_, instance, code_table, native_module, func_index, isolate_, instance, code_table, native_module, func_index,
table_instance.function_table, table_index, &num_table_exports); table_instance.function_table, table_index, &num_table_exports);
...@@ -3349,7 +3345,17 @@ void InstanceBuilder::LoadTableSegments(Handle<FixedArray> code_table, ...@@ -3349,7 +3345,17 @@ void InstanceBuilder::LoadTableSegments(Handle<FixedArray> code_table,
table_instance.function_table->set( table_instance.function_table->set(
compiler::FunctionTableCodeOffset(table_index), compiler::FunctionTableCodeOffset(table_index),
*value_to_update_with); *value_to_update_with);
if (WASM_CONTEXT_TABLES) {
WasmContext* wasm_context = instance->wasm_context()->get();
auto& entry = wasm_context->table[table_index];
entry.sig_id = sig_id;
entry.context = wasm_context;
entry.target = wasm_code.instructions().start();
}
if (!table_instance.table_object.is_null()) { if (!table_instance.table_object.is_null()) {
// Update the table object's other dispatch tables.
if (js_wrappers_[func_index].is_null()) { if (js_wrappers_[func_index].is_null()) {
// No JSFunction entry yet exists for this function. Create one. // No JSFunction entry yet exists for this function. Create one.
// TODO(titzer): We compile JS->wasm wrappers for functions are // TODO(titzer): We compile JS->wasm wrappers for functions are
...@@ -3378,31 +3384,10 @@ void InstanceBuilder::LoadTableSegments(Handle<FixedArray> code_table, ...@@ -3378,31 +3384,10 @@ void InstanceBuilder::LoadTableSegments(Handle<FixedArray> code_table,
} }
table_instance.js_wrappers->set(table_index, table_instance.js_wrappers->set(table_index,
*js_wrappers_[func_index]); *js_wrappers_[func_index]);
// When updating dispatch tables, we need to provide a wasm-to-wasm // UpdateDispatchTables() should update this instance as well.
// wrapper for wasm_code - unless wasm_code is already a wrapper. If WasmTableObject::UpdateDispatchTables(
// it's a wasm-to-js wrapper, we don't need to construct a isolate_, table_instance.table_object, table_index, function->sig,
// wasm-to-wasm wrapper because there's no context switching required. instance, wasm_code, func_index);
// The remaining case is that it's a wasm-to-wasm wrapper, in which
// case it's already doing "the right thing", and wrapping it again
// would be redundant.
if (func_index >= module_->num_imported_functions) {
value_to_update_with = GetOrCreateIndirectCallWrapper(
isolate_, instance, wasm_code, func_index, function->sig);
} else {
if (wasm_code.IsCodeObject()) {
DCHECK(wasm_code.GetCode()->kind() == Code::WASM_TO_JS_FUNCTION ||
wasm_code.GetCode()->kind() ==
Code::WASM_TO_WASM_FUNCTION);
} else {
DCHECK(wasm_code.GetWasmCode()->kind() ==
WasmCode::kWasmToJsWrapper ||
wasm_code.GetWasmCode()->kind() ==
WasmCode::kWasmToWasmWrapper);
}
}
WasmTableObject::UpdateDispatchTables(table_instance.table_object,
table_index, function->sig,
value_to_update_with);
} }
} }
} }
......
...@@ -950,7 +950,6 @@ std::unique_ptr<NativeModule> NativeModule::Clone() { ...@@ -950,7 +950,6 @@ std::unique_ptr<NativeModule> NativeModule::Clone() {
UNREACHABLE(); UNREACHABLE();
} }
} }
ret->specialization_data_ = specialization_data_;
return ret; return ret;
} }
...@@ -1021,14 +1020,14 @@ intptr_t WasmCodeManager::remaining_uncommitted() const { ...@@ -1021,14 +1020,14 @@ intptr_t WasmCodeManager::remaining_uncommitted() const {
NativeModuleModificationScope::NativeModuleModificationScope( NativeModuleModificationScope::NativeModuleModificationScope(
NativeModule* native_module) NativeModule* native_module)
: native_module_(native_module) { : native_module_(native_module) {
if (native_module_) { if (native_module_ && (native_module_->modification_scope_depth_++) == 0) {
bool success = native_module_->SetExecutable(false); bool success = native_module_->SetExecutable(false);
CHECK(success); CHECK(success);
} }
} }
NativeModuleModificationScope::~NativeModuleModificationScope() { NativeModuleModificationScope::~NativeModuleModificationScope() {
if (native_module_) { if (native_module_ && (native_module_->modification_scope_depth_--) == 1) {
bool success = native_module_->SetExecutable(true); bool success = native_module_->SetExecutable(true);
CHECK(success); CHECK(success);
} }
......
...@@ -245,19 +245,7 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -245,19 +245,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
WasmCompiledModule* compiled_module() const; WasmCompiledModule* compiled_module() const;
void SetCompiledModule(Handle<WasmCompiledModule>); void SetCompiledModule(Handle<WasmCompiledModule>);
// Shorthand accessors to the specialization data content.
std::vector<wasm::GlobalHandleAddress>& function_tables() {
return specialization_data_.function_tables;
}
std::vector<wasm::GlobalHandleAddress>& empty_function_tables() {
return specialization_data_.empty_function_tables;
}
uint32_t num_imported_functions() const { return num_imported_functions_; } uint32_t num_imported_functions() const { return num_imported_functions_; }
size_t num_function_tables() const {
return specialization_data_.empty_function_tables.size();
}
size_t committed_memory() const { return committed_memory_; } size_t committed_memory() const { return committed_memory_; }
const size_t instance_id = 0; const size_t instance_id = 0;
...@@ -267,6 +255,7 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -267,6 +255,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
friend class WasmCodeManager; friend class WasmCodeManager;
friend class NativeModuleSerializer; friend class NativeModuleSerializer;
friend class NativeModuleDeserializer; friend class NativeModuleDeserializer;
friend class NativeModuleModificationScope;
struct WasmCodeUniquePtrComparer { struct WasmCodeUniquePtrComparer {
bool operator()(const std::unique_ptr<WasmCode>& a, bool operator()(const std::unique_ptr<WasmCode>& a,
...@@ -325,14 +314,7 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -325,14 +314,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
size_t committed_memory_ = 0; size_t committed_memory_ = 0;
bool can_request_more_memory_; bool can_request_more_memory_;
bool is_executable_ = false; bool is_executable_ = false;
int modification_scope_depth_ = 0;
// Specialization data that needs to be serialized and cloned.
// Keeping it groupped together because it makes cloning of all these
// elements a 1 line copy.
struct {
std::vector<wasm::GlobalHandleAddress> function_tables;
std::vector<wasm::GlobalHandleAddress> empty_function_tables;
} specialization_data_;
}; };
class V8_EXPORT_PRIVATE WasmCodeManager final { class V8_EXPORT_PRIVATE WasmCodeManager final {
......
...@@ -681,7 +681,9 @@ wasm::WasmInterpreter* WasmDebugInfo::SetupForTesting( ...@@ -681,7 +681,9 @@ wasm::WasmInterpreter* WasmDebugInfo::SetupForTesting(
auto interp_handle = auto interp_handle =
Managed<wasm::InterpreterHandle>::Allocate(isolate, isolate, *debug_info); Managed<wasm::InterpreterHandle>::Allocate(isolate, isolate, *debug_info);
debug_info->set(kInterpreterHandleIndex, *interp_handle); debug_info->set(kInterpreterHandleIndex, *interp_handle);
return interp_handle->get()->interpreter(); auto ret = interp_handle->get()->interpreter();
ret->SetCallIndirectTestMode();
return ret;
} }
bool WasmDebugInfo::IsWasmDebugInfo(Object* object) { bool WasmDebugInfo::IsWasmDebugInfo(Object* object) {
......
...@@ -968,6 +968,9 @@ class CodeMap { ...@@ -968,6 +968,9 @@ class CodeMap {
// This handle is set and reset by the SetInstanceObject() / // This handle is set and reset by the SetInstanceObject() /
// ClearInstanceObject() method, which is used by the HeapObjectsScope. // ClearInstanceObject() method, which is used by the HeapObjectsScope.
Handle<WasmInstanceObject> instance_; Handle<WasmInstanceObject> instance_;
// TODO(wasm): Remove this testing wart. It is needed because interpreter
// entry stubs are not generated in testing the interpreter in cctests.
bool call_indirect_through_module_ = false;
public: public:
CodeMap(Isolate* isolate, const WasmModule* module, CodeMap(Isolate* isolate, const WasmModule* module,
...@@ -986,6 +989,12 @@ class CodeMap { ...@@ -986,6 +989,12 @@ class CodeMap {
} }
} }
bool call_indirect_through_module() { return call_indirect_through_module_; }
void set_call_indirect_through_module(bool val) {
call_indirect_through_module_ = val;
}
void SetInstanceObject(Handle<WasmInstanceObject> instance) { void SetInstanceObject(Handle<WasmInstanceObject> instance) {
DCHECK(instance_.is_null()); DCHECK(instance_.is_null());
instance_ = instance; instance_ = instance;
...@@ -2553,7 +2562,8 @@ class ThreadImpl { ...@@ -2553,7 +2562,8 @@ class ThreadImpl {
} }
if (code->kind() == wasm::WasmCode::kWasmToJsWrapper) { if (code->kind() == wasm::WasmCode::kWasmToJsWrapper) {
return CallExternalJSFunction(isolate, WasmCodeWrapper(code), signature); return CallExternalJSFunction(isolate, WasmCodeWrapper(code), signature);
} else if (code->kind() == wasm::WasmCode::kWasmToWasmWrapper) { } else if (code->kind() == wasm::WasmCode::kWasmToWasmWrapper ||
code->kind() == wasm::WasmCode::kInterpreterStub) {
return CallExternalWasmFunction(isolate, WasmCodeWrapper(code), return CallExternalWasmFunction(isolate, WasmCodeWrapper(code),
signature); signature);
} }
...@@ -2582,23 +2592,8 @@ class ThreadImpl { ...@@ -2582,23 +2592,8 @@ class ThreadImpl {
ExternalCallResult CallIndirectFunction(uint32_t table_index, ExternalCallResult CallIndirectFunction(uint32_t table_index,
uint32_t entry_index, uint32_t entry_index,
uint32_t sig_index) { uint32_t sig_index) {
bool no_func_tables = !codemap()->has_instance(); if (codemap()->call_indirect_through_module()) {
if (FLAG_wasm_jit_to_native) { // Rely on the information stored in the WasmModule.
no_func_tables = no_func_tables || codemap()
->instance()
->compiled_module()
->GetNativeModule()
->function_tables()
.empty();
} else {
no_func_tables =
no_func_tables ||
!codemap()->instance()->compiled_module()->has_function_tables();
}
if (no_func_tables) {
// No instance. Rely on the information stored in the WasmModule.
// TODO(wasm): This is only needed for testing. Refactor testing to use
// the same paths as production.
InterpreterCode* code = InterpreterCode* code =
codemap()->GetIndirectCode(table_index, entry_index); codemap()->GetIndirectCode(table_index, entry_index);
if (!code) return {ExternalCallResult::INVALID_FUNC}; if (!code) return {ExternalCallResult::INVALID_FUNC};
...@@ -2632,7 +2627,7 @@ class ThreadImpl { ...@@ -2632,7 +2627,7 @@ class ThreadImpl {
DCHECK_EQ(canonical_sig_index, DCHECK_EQ(canonical_sig_index,
module()->signature_map.Find(module()->signatures[sig_index])); module()->signature_map.Find(module()->signatures[sig_index]));
if (!FLAG_wasm_jit_to_native) { if (!WASM_CONTEXT_TABLES) {
// Check signature. // Check signature.
FixedArray* fun_tables = compiled_module->function_tables(); FixedArray* fun_tables = compiled_module->function_tables();
if (table_index >= static_cast<uint32_t>(fun_tables->length())) { if (table_index >= static_cast<uint32_t>(fun_tables->length())) {
...@@ -2659,33 +2654,23 @@ class ThreadImpl { ...@@ -2659,33 +2654,23 @@ class ThreadImpl {
target_gc = Code::cast(fun_table->get( target_gc = Code::cast(fun_table->get(
compiler::FunctionTableCodeOffset(static_cast<int>(entry_index)))); compiler::FunctionTableCodeOffset(static_cast<int>(entry_index))));
} else { } else {
// Check signature. // The function table is stored in the wasm context.
std::vector<GlobalHandleAddress>& fun_tables = // TODO(wasm): the wasm interpreter currently supports only one table.
compiled_module->GetNativeModule()->function_tables(); CHECK_EQ(0, table_index);
if (table_index >= fun_tables.size()) { // Bounds check against table size.
if (entry_index >= wasm_context_->table_size) {
return {ExternalCallResult::INVALID_FUNC}; return {ExternalCallResult::INVALID_FUNC};
} }
// Reconstitute the global handle to the function table, from the // Signature check.
// address stored in the respective table of tables. int32_t entry_sig = wasm_context_->table[entry_index].sig_id;
FixedArray* fun_table = if (entry_sig != static_cast<int32_t>(canonical_sig_index)) {
*reinterpret_cast<FixedArray**>(fun_tables[table_index]);
// Function tables store <smi, code> pairs.
int num_funcs_in_table =
fun_table->length() / compiler::kFunctionTableEntrySize;
if (entry_index >= static_cast<uint32_t>(num_funcs_in_table)) {
return {ExternalCallResult::INVALID_FUNC};
}
int found_sig = Smi::ToInt(fun_table->get(
compiler::FunctionTableSigOffset(static_cast<int>(entry_index))));
if (static_cast<uint32_t>(found_sig) != canonical_sig_index) {
return {ExternalCallResult::SIGNATURE_MISMATCH}; return {ExternalCallResult::SIGNATURE_MISMATCH};
} }
// Load the target address (first instruction of code).
Address first_instr = wasm_context_->table[entry_index].target;
// TODO(titzer): load the wasm context instead of relying on the
// target code being specialized to the target instance.
// Get code object. // Get code object.
Address first_instr =
Foreign::cast(fun_table->get(compiler::FunctionTableCodeOffset(
static_cast<int>(entry_index))))
->foreign_address();
target = target =
isolate->wasm_engine()->code_manager()->GetCodeFromStartAddress( isolate->wasm_engine()->code_manager()->GetCodeFromStartAddress(
first_instr); first_instr);
...@@ -2978,6 +2963,10 @@ void WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function, ...@@ -2978,6 +2963,10 @@ void WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function,
internals_->codemap_.SetFunctionCode(function, start, end); internals_->codemap_.SetFunctionCode(function, start, end);
} }
void WasmInterpreter::SetCallIndirectTestMode() {
internals_->codemap_.set_call_indirect_through_module(true);
}
ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting( ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
Zone* zone, const WasmModule* module, const byte* start, const byte* end) { Zone* zone, const WasmModule* module, const byte* start, const byte* end) {
// Create some dummy structures, to avoid special-casing the implementation // Create some dummy structures, to avoid special-casing the implementation
......
...@@ -215,6 +215,7 @@ class V8_EXPORT_PRIVATE WasmInterpreter { ...@@ -215,6 +215,7 @@ class V8_EXPORT_PRIVATE WasmInterpreter {
// Manually adds code to the interpreter for the given function. // Manually adds code to the interpreter for the given function.
void SetFunctionCodeForTesting(const WasmFunction* function, void SetFunctionCodeForTesting(const WasmFunction* function,
const byte* start, const byte* end); const byte* start, const byte* end);
void SetCallIndirectTestMode();
// Computes the control transfers for the given bytecode. Used internally in // Computes the control transfers for the given bytecode. Used internally in
// the interpreter, but exposed for testing. // the interpreter, but exposed for testing.
......
...@@ -156,7 +156,7 @@ WasmFunction* GetWasmFunctionForExport(Isolate* isolate, ...@@ -156,7 +156,7 @@ WasmFunction* GetWasmFunctionForExport(Isolate* isolate,
Handle<Object> GetOrCreateIndirectCallWrapper( Handle<Object> GetOrCreateIndirectCallWrapper(
Isolate* isolate, Handle<WasmInstanceObject> owning_instance, Isolate* isolate, Handle<WasmInstanceObject> owning_instance,
WasmCodeWrapper wasm_code, uint32_t index, FunctionSig* sig) { WasmCodeWrapper wasm_code, uint32_t func_index, FunctionSig* sig) {
Address new_context_address = Address new_context_address =
reinterpret_cast<Address>(owning_instance->wasm_context()->get()); reinterpret_cast<Address>(owning_instance->wasm_context()->get());
if (!wasm_code.IsCodeObject()) { if (!wasm_code.IsCodeObject()) {
...@@ -172,6 +172,8 @@ Handle<Object> GetOrCreateIndirectCallWrapper( ...@@ -172,6 +172,8 @@ Handle<Object> GetOrCreateIndirectCallWrapper(
wasm::WasmCode* exported_wrapper = wasm::WasmCode* exported_wrapper =
native_module->GetExportedWrapper(wasm_code.GetWasmCode()->index()); native_module->GetExportedWrapper(wasm_code.GetWasmCode()->index());
if (exported_wrapper == nullptr) { if (exported_wrapper == nullptr) {
wasm::NativeModuleModificationScope native_modification_scope(
native_module);
Handle<Code> new_wrapper = compiler::CompileWasmToWasmWrapper( Handle<Code> new_wrapper = compiler::CompileWasmToWasmWrapper(
isolate, wasm_code, sig, new_context_address); isolate, wasm_code, sig, new_context_address);
exported_wrapper = native_module->AddExportedWrapper( exported_wrapper = native_module->AddExportedWrapper(
...@@ -180,10 +182,11 @@ Handle<Object> GetOrCreateIndirectCallWrapper( ...@@ -180,10 +182,11 @@ Handle<Object> GetOrCreateIndirectCallWrapper(
Address target = exported_wrapper->instructions().start(); Address target = exported_wrapper->instructions().start();
return isolate->factory()->NewForeign(target, TENURED); return isolate->factory()->NewForeign(target, TENURED);
} }
CodeSpaceMemoryModificationScope gc_modification_scope(isolate->heap());
Handle<Code> code = compiler::CompileWasmToWasmWrapper( Handle<Code> code = compiler::CompileWasmToWasmWrapper(
isolate, wasm_code, sig, new_context_address); isolate, wasm_code, sig, new_context_address);
AttachWasmFunctionInfo(isolate, code, owning_instance, AttachWasmFunctionInfo(isolate, code, owning_instance,
static_cast<int>(index)); static_cast<int>(func_index));
return code; return code;
} }
......
...@@ -275,7 +275,7 @@ WasmFunction* GetWasmFunctionForExport(Isolate* isolate, Handle<Object> target); ...@@ -275,7 +275,7 @@ WasmFunction* GetWasmFunctionForExport(Isolate* isolate, Handle<Object> target);
Handle<Object> GetOrCreateIndirectCallWrapper( Handle<Object> GetOrCreateIndirectCallWrapper(
Isolate* isolate, Handle<WasmInstanceObject> owning_instance, Isolate* isolate, Handle<WasmInstanceObject> owning_instance,
WasmCodeWrapper wasm_code, uint32_t index, FunctionSig* sig); WasmCodeWrapper wasm_code, uint32_t func_index, FunctionSig* sig);
void UnpackAndRegisterProtectedInstructionsGC(Isolate* isolate, void UnpackAndRegisterProtectedInstructionsGC(Isolate* isolate,
Handle<FixedArray> code_table); Handle<FixedArray> code_table);
......
...@@ -248,11 +248,44 @@ void WasmTableObject::AddDispatchTable(Isolate* isolate, ...@@ -248,11 +248,44 @@ void WasmTableObject::AddDispatchTable(Isolate* isolate,
} }
void WasmTableObject::Grow(Isolate* isolate, uint32_t count) { void WasmTableObject::Grow(Isolate* isolate, uint32_t count) {
// TODO(6792): No longer needed once WebAssembly code is off heap. if (count == 0) return; // Degenerate case: nothing to do.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
Handle<FixedArray> dispatch_tables(this->dispatch_tables()); Handle<FixedArray> dispatch_tables(this->dispatch_tables());
DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements); DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
uint32_t old_size = functions()->length(); uint32_t old_size = functions()->length();
constexpr int kInvalidSigIndex = -1;
if (WASM_CONTEXT_TABLES) {
// If tables are stored in the WASM context, no code patching is
// necessary. We simply have to grow the raw tables in the WasmContext
// for each instance that has imported this table.
// TODO(titzer): replace the dispatch table with a weak list of all
// the instances that import a given table.
for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) {
// TODO(titzer): potentially racy update of WasmContext::table
WasmContext* wasm_context =
WasmInstanceObject::cast(dispatch_tables->get(i))
->wasm_context()
->get();
DCHECK_EQ(old_size, wasm_context->table_size);
uint32_t new_size = old_size + count;
wasm_context->table = reinterpret_cast<IndirectFunctionTableEntry*>(
realloc(wasm_context->table,
new_size * sizeof(IndirectFunctionTableEntry)));
for (uint32_t j = old_size; j < new_size; j++) {
wasm_context->table[j].sig_id = kInvalidSigIndex;
wasm_context->table[j].context = nullptr;
wasm_context->table[j].target = nullptr;
}
wasm_context->table_size = new_size;
}
return;
}
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
Zone specialization_zone(isolate->allocator(), ZONE_NAME); Zone specialization_zone(isolate->allocator(), ZONE_NAME);
for (int i = 0; i < dispatch_tables->length(); for (int i = 0; i < dispatch_tables->length();
...@@ -272,24 +305,7 @@ void WasmTableObject::Grow(Isolate* isolate, uint32_t count) { ...@@ -272,24 +305,7 @@ void WasmTableObject::Grow(Isolate* isolate, uint32_t count) {
*new_function_table); *new_function_table);
// Patch the code of the respective instance. // Patch the code of the respective instance.
if (FLAG_wasm_jit_to_native) { if (!WASM_CONTEXT_TABLES) {
DisallowHeapAllocation no_gc;
wasm::CodeSpecialization code_specialization(isolate,
&specialization_zone);
WasmInstanceObject* instance =
WasmInstanceObject::cast(dispatch_tables->get(i));
WasmCompiledModule* compiled_module = instance->compiled_module();
wasm::NativeModule* native_module = compiled_module->GetNativeModule();
wasm::NativeModuleModificationScope native_module_modification_scope(
native_module);
GlobalHandleAddress old_function_table_addr =
native_module->function_tables()[table_index];
code_specialization.PatchTableSize(old_size, old_size + count);
code_specialization.RelocatePointer(old_function_table_addr,
new_function_table_addr);
code_specialization.ApplyToWholeInstance(instance);
native_module->function_tables()[table_index] = new_function_table_addr;
} else {
DisallowHeapAllocation no_gc; DisallowHeapAllocation no_gc;
wasm::CodeSpecialization code_specialization(isolate, wasm::CodeSpecialization code_specialization(isolate,
&specialization_zone); &specialization_zone);
...@@ -311,70 +327,104 @@ void WasmTableObject::Grow(Isolate* isolate, uint32_t count) { ...@@ -311,70 +327,104 @@ void WasmTableObject::Grow(Isolate* isolate, uint32_t count) {
} }
void WasmTableObject::Set(Isolate* isolate, Handle<WasmTableObject> table, void WasmTableObject::Set(Isolate* isolate, Handle<WasmTableObject> table,
int32_t index, Handle<JSFunction> function) { int32_t table_index, Handle<JSFunction> function) {
Handle<FixedArray> array(table->functions(), isolate); Handle<FixedArray> array(table->functions(), isolate);
if (function.is_null()) {
ClearDispatchTables(table, table_index); // Degenerate case of null value.
array->set(table_index, isolate->heap()->null_value());
return;
}
Handle<FixedArray> dispatch_tables(table->dispatch_tables(), isolate); // TODO(titzer): Change this to MaybeHandle<WasmExportedFunction>
wasm::FunctionSig* sig = nullptr;
Handle<Object> code = Handle<Object>::null();
Handle<Object> value = isolate->factory()->null_value();
if (!function.is_null()) {
auto exported_function = Handle<WasmExportedFunction>::cast(function); auto exported_function = Handle<WasmExportedFunction>::cast(function);
auto* wasm_function = wasm::GetWasmFunctionForExport(isolate, function); auto* wasm_function = wasm::GetWasmFunctionForExport(isolate, function);
// The verification that {function} is an export was done DCHECK_NOT_NULL(wasm_function);
// by the caller. DCHECK_NOT_NULL(wasm_function->sig);
DCHECK(wasm_function != nullptr && wasm_function->sig != nullptr);
sig = wasm_function->sig;
value = function;
// TODO(titzer): Make JSToWasm wrappers just call the WASM to WASM wrapper,
// and then we can just reuse the WASM to WASM wrapper.
WasmCodeWrapper wasm_code = exported_function->GetWasmCode(); WasmCodeWrapper wasm_code = exported_function->GetWasmCode();
wasm::NativeModule* native_module = UpdateDispatchTables(isolate, table, table_index, wasm_function->sig,
wasm_code.IsCodeObject() ? nullptr : wasm_code.GetWasmCode()->owner(); handle(exported_function->instance()), wasm_code,
CodeSpaceMemoryModificationScope gc_modification_scope(isolate->heap()); exported_function->function_index());
wasm::NativeModuleModificationScope native_modification_scope( array->set(table_index, *function);
native_module); }
code = wasm::GetOrCreateIndirectCallWrapper(
isolate, handle(exported_function->instance()), wasm_code, void WasmTableObject::UpdateDispatchTables(
exported_function->function_index(), sig); Isolate* isolate, Handle<WasmTableObject> table, int table_index,
} wasm::FunctionSig* sig, Handle<WasmInstanceObject> from_instance,
UpdateDispatchTables(table, index, sig, code); WasmCodeWrapper wasm_code, int func_index) {
array->set(index, *value); if (WASM_CONTEXT_TABLES) {
} // We simply need to update the WASM contexts for each instance
// that imports this table.
void WasmTableObject::UpdateDispatchTables(Handle<WasmTableObject> table,
int index, wasm::FunctionSig* sig,
Handle<Object> code_or_foreign) {
DisallowHeapAllocation no_gc; DisallowHeapAllocation no_gc;
FixedArray* dispatch_tables = table->dispatch_tables(); FixedArray* dispatch_tables = table->dispatch_tables();
DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements); DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
for (int i = 0; i < dispatch_tables->length(); for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) { i += kDispatchTableNumElements) {
FixedArray* function_table = FixedArray::cast( // Note that {SignatureMap::Find} may return {-1} if the signature is
dispatch_tables->get(i + kDispatchTableFunctionTableOffset)); // not found; it will simply never match any check.
Smi* sig_smi = Smi::FromInt(-1); WasmInstanceObject* to_instance = WasmInstanceObject::cast(
Object* code = Smi::kZero;
if (sig) {
DCHECK(code_or_foreign->IsCode() || code_or_foreign->IsForeign());
WasmInstanceObject* instance = WasmInstanceObject::cast(
dispatch_tables->get(i + kDispatchTableInstanceOffset)); dispatch_tables->get(i + kDispatchTableInstanceOffset));
auto sig_id = to_instance->module()->signature_map.Find(sig);
auto& entry = to_instance->wasm_context()->get()->table[table_index];
entry.sig_id = sig_id;
entry.context = from_instance->wasm_context()->get();
entry.target = wasm_code.instructions().start();
}
} else {
// We may need to compile a new WASM->WASM wrapper for this.
Handle<Object> code_or_foreign = wasm::GetOrCreateIndirectCallWrapper(
isolate, from_instance, wasm_code, func_index, sig);
DisallowHeapAllocation no_gc;
FixedArray* dispatch_tables = table->dispatch_tables();
DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) {
// Note that {SignatureMap::Find} may return {-1} if the signature is // Note that {SignatureMap::Find} may return {-1} if the signature is
// not found; it will simply never match any check. // not found; it will simply never match any check.
auto sig_index = instance->module()->signature_map.Find(sig); WasmInstanceObject* to_instance = WasmInstanceObject::cast(
sig_smi = Smi::FromInt(sig_index); dispatch_tables->get(i + kDispatchTableInstanceOffset));
code = *code_or_foreign; auto sig_id = to_instance->module()->signature_map.Find(sig);
FixedArray* function_table = FixedArray::cast(
dispatch_tables->get(i + kDispatchTableFunctionTableOffset));
function_table->set(compiler::FunctionTableSigOffset(table_index),
Smi::FromInt(sig_id));
function_table->set(compiler::FunctionTableCodeOffset(table_index),
*code_or_foreign);
}
}
}
void WasmTableObject::ClearDispatchTables(Handle<WasmTableObject> table,
int index) {
DisallowHeapAllocation no_gc;
FixedArray* dispatch_tables = table->dispatch_tables();
DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) {
if (WASM_CONTEXT_TABLES) {
constexpr int kInvalidSigIndex = -1; // TODO(titzer): move to header.
WasmInstanceObject* to_instance = WasmInstanceObject::cast(
dispatch_tables->get(i + kDispatchTableInstanceOffset));
DCHECK_LT(index, to_instance->wasm_context()->get()->table_size);
auto& entry = to_instance->wasm_context()->get()->table[index];
entry.sig_id = kInvalidSigIndex;
entry.context = nullptr;
entry.target = nullptr;
} else { } else {
DCHECK(code_or_foreign.is_null()); FixedArray* function_table = FixedArray::cast(
dispatch_tables->get(i + kDispatchTableFunctionTableOffset));
function_table->set(compiler::FunctionTableSigOffset(index),
Smi::FromInt(-1));
function_table->set(compiler::FunctionTableCodeOffset(index), Smi::kZero);
} }
function_table->set(compiler::FunctionTableSigOffset(index), sig_smi);
function_table->set(compiler::FunctionTableCodeOffset(index), code);
} }
} }
namespace { namespace {
Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate, Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
Handle<JSArrayBuffer> old_buffer, Handle<JSArrayBuffer> old_buffer,
uint32_t pages, uint32_t maximum_pages, uint32_t pages, uint32_t maximum_pages,
...@@ -1388,9 +1438,6 @@ Handle<WasmCompiledModule> WasmCompiledModule::New( ...@@ -1388,9 +1438,6 @@ Handle<WasmCompiledModule> WasmCompiledModule::New(
// has_code_table and pass undefined. // has_code_table and pass undefined.
compiled_module->set_code_table(*code_table); compiled_module->set_code_table(*code_table);
native_module->function_tables() = function_tables;
native_module->empty_function_tables() = function_tables;
int function_count = static_cast<int>(module->functions.size()); int function_count = static_cast<int>(module->functions.size());
Handle<FixedArray> handler_table = Handle<FixedArray> handler_table =
isolate->factory()->NewFixedArray(function_count, TENURED); isolate->factory()->NewFixedArray(function_count, TENURED);
...@@ -1517,22 +1564,6 @@ void WasmCompiledModule::Reset(Isolate* isolate, ...@@ -1517,22 +1564,6 @@ void WasmCompiledModule::Reset(Isolate* isolate,
i, isolate->heap()->undefined_value()); i, isolate->heap()->undefined_value());
} }
} }
// Reset function tables.
if (native_module->function_tables().size() > 0) {
std::vector<GlobalHandleAddress>& function_tables =
native_module->function_tables();
std::vector<GlobalHandleAddress>& empty_function_tables =
native_module->empty_function_tables();
if (function_tables != empty_function_tables) {
DCHECK_EQ(function_tables.size(), empty_function_tables.size());
for (size_t i = 0, e = function_tables.size(); i < e; ++i) {
code_specialization.RelocatePointer(function_tables[i],
empty_function_tables[i]);
}
native_module->function_tables() = empty_function_tables;
}
}
for (uint32_t i = native_module->num_imported_functions(), for (uint32_t i = native_module->num_imported_functions(),
end = native_module->FunctionCount(); end = native_module->FunctionCount();
...@@ -1672,30 +1703,23 @@ void WasmCompiledModule::ReinitializeAfterDeserialization( ...@@ -1672,30 +1703,23 @@ void WasmCompiledModule::ReinitializeAfterDeserialization(
} }
size_t function_table_count = size_t function_table_count =
compiled_module->shared()->module()->function_tables.size(); compiled_module->shared()->module()->function_tables.size();
wasm::NativeModule* native_module = compiled_module->GetNativeModule();
if (function_table_count > 0) { if (function_table_count > 0) {
// The tables are of the right size, but contain bogus global handle // The tables are of the right size, but contain bogus global handle
// addresses. Produce new global handles for the empty tables, then reset, // addresses. Produce new global handles for the empty tables, then reset,
// which will relocate the code. We end up with a WasmCompiledModule as-if // which will relocate the code. We end up with a WasmCompiledModule as-if
// it were just compiled. // it were just compiled.
Handle<FixedArray> function_tables; if (!WASM_CONTEXT_TABLES) {
if (!FLAG_wasm_jit_to_native) {
DCHECK(compiled_module->has_function_tables()); DCHECK(compiled_module->has_function_tables());
function_tables = Handle<FixedArray> function_tables(
handle(compiled_module->empty_function_tables(), isolate); compiled_module->empty_function_tables(), isolate);
} else {
DCHECK_GT(native_module->function_tables().size(), 0);
}
for (size_t i = 0; i < function_table_count; ++i) { for (size_t i = 0; i < function_table_count; ++i) {
Handle<Object> global_func_table_handle = Handle<Object> global_func_table_handle =
isolate->global_handles()->Create(isolate->heap()->undefined_value()); isolate->global_handles()->Create(
isolate->heap()->undefined_value());
GlobalHandleAddress new_func_table = global_func_table_handle.address(); GlobalHandleAddress new_func_table = global_func_table_handle.address();
if (!FLAG_wasm_jit_to_native) {
SetTableValue(isolate, function_tables, static_cast<int>(i), SetTableValue(isolate, function_tables, static_cast<int>(i),
new_func_table); new_func_table);
} else {
native_module->empty_function_tables()[i] = new_func_table;
} }
} }
} }
......
...@@ -38,6 +38,8 @@ class WasmCompiledModule; ...@@ -38,6 +38,8 @@ class WasmCompiledModule;
class WasmDebugInfo; class WasmDebugInfo;
class WasmInstanceObject; class WasmInstanceObject;
#define WASM_CONTEXT_TABLES FLAG_wasm_jit_to_native
#define DECL_OOL_QUERY(type) static bool Is##type(Object* object); #define DECL_OOL_QUERY(type) static bool Is##type(Object* object);
#define DECL_OOL_CAST(type) static type* cast(Object* object); #define DECL_OOL_CAST(type) static type* cast(Object* object);
...@@ -55,6 +57,15 @@ class WasmInstanceObject; ...@@ -55,6 +57,15 @@ class WasmInstanceObject;
static const int k##name##Offset = \ static const int k##name##Offset = \
kSize + (k##name##Index - kFieldCount) * kPointerSize; kSize + (k##name##Index - kFieldCount) * kPointerSize;
// An entry in an indirect dispatch table.
struct IndirectFunctionTableEntry {
int32_t sig_id = 0;
WasmContext* context = nullptr;
Address target = nullptr;
MOVE_ONLY_WITH_DEFAULT_CONSTRUCTORS(IndirectFunctionTableEntry)
};
// Wasm context used to store the mem_size and mem_start address of the linear // Wasm context used to store the mem_size and mem_start address of the linear
// memory. These variables can be accessed at C++ level at graph build time // memory. These variables can be accessed at C++ level at graph build time
// (e.g., initialized during instance building / changed at runtime by // (e.g., initialized during instance building / changed at runtime by
...@@ -67,14 +78,27 @@ struct WasmContext { ...@@ -67,14 +78,27 @@ struct WasmContext {
uint32_t mem_size = 0; // TODO(titzer): uintptr_t? uint32_t mem_size = 0; // TODO(titzer): uintptr_t?
uint32_t mem_mask = 0; // TODO(titzer): uintptr_t? uint32_t mem_mask = 0; // TODO(titzer): uintptr_t?
byte* globals_start = nullptr; byte* globals_start = nullptr;
// TODO(wasm): pad these entries to a power of two.
IndirectFunctionTableEntry* table = nullptr;
uint32_t table_size = 0;
inline void SetRawMemory(void* mem_start, size_t mem_size) { void SetRawMemory(void* mem_start, size_t mem_size) {
DCHECK_LE(mem_size, wasm::kV8MaxWasmMemoryPages * wasm::kWasmPageSize); DCHECK_LE(mem_size, wasm::kV8MaxWasmMemoryPages * wasm::kWasmPageSize);
this->mem_start = static_cast<byte*>(mem_start); this->mem_start = static_cast<byte*>(mem_start);
this->mem_size = static_cast<uint32_t>(mem_size); this->mem_size = static_cast<uint32_t>(mem_size);
this->mem_mask = base::bits::RoundUpToPowerOfTwo32(this->mem_size) - 1; this->mem_mask = base::bits::RoundUpToPowerOfTwo32(this->mem_size) - 1;
DCHECK_LE(mem_size, this->mem_mask + 1); DCHECK_LE(mem_size, this->mem_mask + 1);
} }
~WasmContext() {
if (table) free(table);
mem_start = nullptr;
mem_size = 0;
mem_mask = 0;
globals_start = nullptr;
table = nullptr;
table_size = 0;
}
}; };
// Representation of a WebAssembly.Module JavaScript-level object. // Representation of a WebAssembly.Module JavaScript-level object.
...@@ -137,9 +161,13 @@ class WasmTableObject : public JSObject { ...@@ -137,9 +161,13 @@ class WasmTableObject : public JSObject {
static void Set(Isolate* isolate, Handle<WasmTableObject> table, static void Set(Isolate* isolate, Handle<WasmTableObject> table,
int32_t index, Handle<JSFunction> function); int32_t index, Handle<JSFunction> function);
static void UpdateDispatchTables(Handle<WasmTableObject> table, int index, static void UpdateDispatchTables(Isolate* isolate,
wasm::FunctionSig* sig, Handle<WasmTableObject> table,
Handle<Object> code_or_foreign); int table_index, wasm::FunctionSig* sig,
Handle<WasmInstanceObject> from_instance,
WasmCodeWrapper wasm_code, int func_index);
static void ClearDispatchTables(Handle<WasmTableObject> table, int index);
}; };
// Representation of a WebAssembly.Memory JavaScript-level object. // Representation of a WebAssembly.Memory JavaScript-level object.
...@@ -481,9 +509,7 @@ class WasmCompiledModule : public FixedArray { ...@@ -481,9 +509,7 @@ class WasmCompiledModule : public FixedArray {
MACRO(SMALL_CONST_NUMBER, uint32_t, num_imported_functions) \ MACRO(SMALL_CONST_NUMBER, uint32_t, num_imported_functions) \
MACRO(CONST_OBJECT, FixedArray, code_table) \ MACRO(CONST_OBJECT, FixedArray, code_table) \
MACRO(OBJECT, FixedArray, function_tables) \ MACRO(OBJECT, FixedArray, function_tables) \
MACRO(OBJECT, FixedArray, signature_tables) \ MACRO(CONST_OBJECT, FixedArray, empty_function_tables)
MACRO(CONST_OBJECT, FixedArray, empty_function_tables) \
MACRO(CONST_OBJECT, FixedArray, empty_signature_tables)
// TODO(mtrofin): this is unnecessary when we stop needing // TODO(mtrofin): this is unnecessary when we stop needing
// FLAG_wasm_jit_to_native, because we have instance_id on NativeModule. // FLAG_wasm_jit_to_native, because we have instance_id on NativeModule.
......
...@@ -209,12 +209,7 @@ NativeModuleSerializer::NativeModuleSerializer(Isolate* isolate, ...@@ -209,12 +209,7 @@ NativeModuleSerializer::NativeModuleSerializer(Isolate* isolate,
size_t NativeModuleSerializer::MeasureHeader() const { size_t NativeModuleSerializer::MeasureHeader() const {
return sizeof(uint32_t) + // total wasm fct count return sizeof(uint32_t) + // total wasm fct count
sizeof( sizeof(uint32_t); // imported fcts - i.e. index of first wasm function
uint32_t) + // imported fcts - i.e. index of first wasm function
sizeof(uint32_t) + // table count
native_module_->specialization_data_.function_tables.size()
// function table, containing pointers
* sizeof(GlobalHandleAddress);
} }
void NativeModuleSerializer::BufferHeader() { void NativeModuleSerializer::BufferHeader() {
...@@ -224,13 +219,6 @@ void NativeModuleSerializer::BufferHeader() { ...@@ -224,13 +219,6 @@ void NativeModuleSerializer::BufferHeader() {
Writer writer(remaining_); Writer writer(remaining_);
writer.Write(native_module_->FunctionCount()); writer.Write(native_module_->FunctionCount());
writer.Write(native_module_->num_imported_functions()); writer.Write(native_module_->num_imported_functions());
writer.Write(static_cast<uint32_t>(
native_module_->specialization_data_.function_tables.size()));
for (size_t i = 0,
e = native_module_->specialization_data_.function_tables.size();
i < e; ++i) {
writer.Write(native_module_->specialization_data_.function_tables[i]);
}
} }
size_t NativeModuleSerializer::GetCodeHeaderSize() { size_t NativeModuleSerializer::GetCodeHeaderSize() {
...@@ -554,16 +542,6 @@ bool NativeModuleDeserializer::ReadHeader() { ...@@ -554,16 +542,6 @@ bool NativeModuleDeserializer::ReadHeader() {
bool ok = functions == native_module_->FunctionCount() && bool ok = functions == native_module_->FunctionCount() &&
imports == native_module_->num_imported_functions(); imports == native_module_->num_imported_functions();
if (!ok) return false; if (!ok) return false;
size_t table_count = reader.Read<uint32_t>();
std::vector<GlobalHandleAddress> funcs(table_count);
for (size_t i = 0; i < table_count; ++i) {
funcs[i] = reader.Read<GlobalHandleAddress>();
}
native_module_->function_tables() = funcs;
// resize, so that from here on the native module can be
// asked about num_function_tables().
native_module_->empty_function_tables().resize(table_count);
unread_ = unread_ + (start_size - reader.current_buffer().size()); unread_ = unread_ + (start_size - reader.current_buffer().size());
return true; return true;
......
...@@ -159,11 +159,22 @@ void TestingModuleBuilder::AddIndirectFunctionTable( ...@@ -159,11 +159,22 @@ void TestingModuleBuilder::AddIndirectFunctionTable(
table_size * compiler::kFunctionTableEntrySize); table_size * compiler::kFunctionTableEntrySize);
function_tables_.push_back( function_tables_.push_back(
isolate_->global_handles()->Create(func_table).address()); isolate_->global_handles()->Create(func_table).address());
if (WASM_CONTEXT_TABLES) {
WasmContext* wasm_context = instance_object()->wasm_context()->get();
wasm_context->table = reinterpret_cast<IndirectFunctionTableEntry*>(
calloc(table_size, sizeof(IndirectFunctionTableEntry)));
wasm_context->table_size = table_size;
for (uint32_t i = 0; i < table_size; i++) {
wasm_context->table[i].sig_id = -1;
}
}
} }
void TestingModuleBuilder::PopulateIndirectFunctionTable() { void TestingModuleBuilder::PopulateIndirectFunctionTable() {
if (interpret()) return; if (interpret()) return;
// Initialize the fixed arrays in instance->function_tables. // Initialize the fixed arrays in instance->function_tables.
WasmContext* wasm_context = instance_object()->wasm_context()->get();
for (uint32_t i = 0; i < function_tables_.size(); i++) { for (uint32_t i = 0; i < function_tables_.size(); i++) {
WasmIndirectFunctionTable& table = test_module_.function_tables[i]; WasmIndirectFunctionTable& table = test_module_.function_tables[i];
Handle<FixedArray> function_table( Handle<FixedArray> function_table(
...@@ -171,17 +182,16 @@ void TestingModuleBuilder::PopulateIndirectFunctionTable() { ...@@ -171,17 +182,16 @@ void TestingModuleBuilder::PopulateIndirectFunctionTable() {
int table_size = static_cast<int>(table.values.size()); int table_size = static_cast<int>(table.values.size());
for (int j = 0; j < table_size; j++) { for (int j = 0; j < table_size; j++) {
WasmFunction& function = test_module_.functions[table.values[j]]; WasmFunction& function = test_module_.functions[table.values[j]];
function_table->set( int sig_id = test_module_.signature_map.Find(function.sig);
compiler::FunctionTableSigOffset(j), function_table->set(compiler::FunctionTableSigOffset(j),
Smi::FromInt(test_module_.signature_map.Find(function.sig))); Smi::FromInt(sig_id));
if (FLAG_wasm_jit_to_native) { if (WASM_CONTEXT_TABLES) {
Handle<Foreign> foreign_holder = isolate_->factory()->NewForeign( auto start = native_module_->GetCode(function.func_index)
native_module_->GetCode(function.func_index)
->instructions() ->instructions()
.start(), .start();
TENURED); wasm_context->table[j].context = wasm_context;
function_table->set(compiler::FunctionTableCodeOffset(j), wasm_context->table[j].sig_id = sig_id;
*foreign_holder); wasm_context->table[j].target = start;
} else { } else {
function_table->set(compiler::FunctionTableCodeOffset(j), function_table->set(compiler::FunctionTableCodeOffset(j),
*function_code_[function.func_index]); *function_code_[function.func_index]);
......
...@@ -33,7 +33,7 @@ function AddFunctions(builder) { ...@@ -33,7 +33,7 @@ function AddFunctions(builder) {
function js_div(a, b) { return (a / b) | 0; } function js_div(a, b) { return (a / b) | 0; }
(function ExportedTableTest() { (function ExportedTableTest() {
print("ExportedTableTest..."); print(arguments.callee.name);
let builder = new WasmModuleBuilder(); let builder = new WasmModuleBuilder();
...@@ -102,9 +102,9 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -102,9 +102,9 @@ function js_div(a, b) { return (a / b) | 0; }
})(); })();
(function ImportedTableTest() { (function ImportedTableTest1() {
let kTableSize = 10; let kTableSize = 10;
print("ImportedTableTest..."); print(arguments.callee.name);
var builder = new WasmModuleBuilder(); var builder = new WasmModuleBuilder();
let d = builder.addImport("q", "js_div", kSig_i_ii); let d = builder.addImport("q", "js_div", kSig_i_ii);
...@@ -172,9 +172,9 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -172,9 +172,9 @@ function js_div(a, b) { return (a / b) | 0; }
} }
})(); })();
(function ImportedTableTest() { (function ImportedTableTest2() {
let kTableSize = 10; let kTableSize = 10;
print("ManualTableTest..."); print(arguments.callee.name);
var builder = new WasmModuleBuilder(); var builder = new WasmModuleBuilder();
...@@ -240,7 +240,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -240,7 +240,7 @@ function js_div(a, b) { return (a / b) | 0; }
(function CumulativeTest() { (function CumulativeTest() {
print("CumulativeTest..."); print(arguments.callee.name);
let kTableSize = 10; let kTableSize = 10;
let table = new WebAssembly.Table( let table = new WebAssembly.Table(
...@@ -251,7 +251,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -251,7 +251,7 @@ function js_div(a, b) { return (a / b) | 0; }
builder.addImportedTable("x", "table", kTableSize, kTableSize); builder.addImportedTable("x", "table", kTableSize, kTableSize);
let g = builder.addImportedGlobal("x", "base", kWasmI32); let g = builder.addImportedGlobal("x", "base", kWasmI32);
let sig_index = builder.addType(kSig_i_v); let sig_index = builder.addType(kSig_i_v);
builder.addFunction("g", sig_index) let f = builder.addFunction("f", sig_index)
.addBody([ .addBody([
kExprGetGlobal, g kExprGetGlobal, g
]); ]);
...@@ -260,7 +260,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -260,7 +260,7 @@ function js_div(a, b) { return (a / b) | 0; }
kExprGetLocal, 0, kExprGetLocal, 0,
kExprCallIndirect, sig_index, kTableZero]) // -- kExprCallIndirect, sig_index, kTableZero]) // --
.exportAs("main"); .exportAs("main");
builder.addFunctionTableInit(g, true, [g]); builder.addFunctionTableInit(g, true, [f.index]);
let module = new WebAssembly.Module(builder.toBuffer()); let module = new WebAssembly.Module(builder.toBuffer());
...@@ -283,7 +283,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -283,7 +283,7 @@ function js_div(a, b) { return (a / b) | 0; }
})(); })();
(function TwoWayTest() { (function TwoWayTest() {
print("TwoWayTest..."); print(arguments.callee.name);
let kTableSize = 3; let kTableSize = 3;
// Module {m1} defines the table and exports it. // Module {m1} defines the table and exports it.
...@@ -342,7 +342,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -342,7 +342,7 @@ function js_div(a, b) { return (a / b) | 0; }
})(); })();
(function MismatchedTableSize() { (function MismatchedTableSize() {
print("MismatchedTableSize..."); print(arguments.callee.name);
let kTableSize = 5; let kTableSize = 5;
for (var expsize = 1; expsize < 4; expsize++) { for (var expsize = 1; expsize < 4; expsize++) {
...@@ -374,7 +374,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -374,7 +374,7 @@ function js_div(a, b) { return (a / b) | 0; }
})(); })();
(function TableGrowBoundsCheck() { (function TableGrowBoundsCheck() {
print("TableGrowBoundsCheck"); print(arguments.callee.name);
var kMaxSize = 30, kInitSize = 5; var kMaxSize = 30, kInitSize = 5;
let table = new WebAssembly.Table({element: "anyfunc", let table = new WebAssembly.Table({element: "anyfunc",
initial: kInitSize, maximum: kMaxSize}); initial: kInitSize, maximum: kMaxSize});
...@@ -398,7 +398,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -398,7 +398,7 @@ function js_div(a, b) { return (a / b) | 0; }
})(); })();
(function CumulativeGrowTest() { (function CumulativeGrowTest() {
print("CumulativeGrowTest..."); print(arguments.callee.name);
let table = new WebAssembly.Table({ let table = new WebAssembly.Table({
element: "anyfunc", initial: 10, maximum: 30}); element: "anyfunc", initial: 10, maximum: 30});
var builder = new WasmModuleBuilder(); var builder = new WasmModuleBuilder();
...@@ -460,7 +460,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -460,7 +460,7 @@ function js_div(a, b) { return (a / b) | 0; }
(function TestImportTooLarge() { (function TestImportTooLarge() {
print("TestImportTooLarge..."); print(arguments.callee.name);
let builder = new WasmModuleBuilder(); let builder = new WasmModuleBuilder();
builder.addImportedTable("t", "t", 1, 2); builder.addImportedTable("t", "t", 1, 2);
...@@ -478,7 +478,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -478,7 +478,7 @@ function js_div(a, b) { return (a / b) | 0; }
})(); })();
(function TableImportLargerThanCompiled() { (function TableImportLargerThanCompiled() {
print("TableImportLargerThanCompiled..."); print(arguments.callee.name);
var kMaxSize = 30, kInitSize = 5; var kMaxSize = 30, kInitSize = 5;
var builder = new WasmModuleBuilder(); var builder = new WasmModuleBuilder();
builder.addImportedTable("x", "table", 1, 35); builder.addImportedTable("x", "table", 1, 35);
...@@ -492,7 +492,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -492,7 +492,7 @@ function js_div(a, b) { return (a / b) | 0; }
})(); })();
(function ModulesShareTableAndGrow() { (function ModulesShareTableAndGrow() {
print("ModulesShareTableAndGrow..."); print(arguments.callee.name);
let module1 = (() => { let module1 = (() => {
let builder = new WasmModuleBuilder(); let builder = new WasmModuleBuilder();
builder.addImportedTable("x", "table", 1, 35); builder.addImportedTable("x", "table", 1, 35);
...@@ -525,7 +525,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -525,7 +525,7 @@ function js_div(a, b) { return (a / b) | 0; }
(function MultipleElementSegments() { (function MultipleElementSegments() {
let kTableSize = 10; let kTableSize = 10;
print("MultipleElementSegments..."); print(arguments.callee.name);
let mul = (a, b) => a * b; let mul = (a, b) => a * b;
let add = (a, b) => a + b; let add = (a, b) => a + b;
...@@ -603,7 +603,8 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -603,7 +603,8 @@ function js_div(a, b) { return (a / b) | 0; }
})(); })();
(function IndirectCallIntoOtherInstance() { (function IndirectCallIntoOtherInstance() {
print("IndirectCallIntoOtherInstance..."); print(arguments.callee.name);
var mem_1 = new WebAssembly.Memory({initial: 1}); var mem_1 = new WebAssembly.Memory({initial: 1});
var mem_2 = new WebAssembly.Memory({initial: 1}); var mem_2 = new WebAssembly.Memory({initial: 1});
var view_1 = new Int32Array(mem_1.buffer); var view_1 = new Int32Array(mem_1.buffer);
...@@ -644,7 +645,7 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -644,7 +645,7 @@ function js_div(a, b) { return (a / b) | 0; }
(function ImportedFreestandingTable() { (function ImportedFreestandingTable() {
print("ImportedFreestandingTable..."); print(arguments.callee.name);
function forceGc() { function forceGc() {
gc(); gc();
...@@ -709,7 +710,8 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -709,7 +710,8 @@ function js_div(a, b) { return (a / b) | 0; }
// Remove this test when v8:7232 is addressed comprehensively. // Remove this test when v8:7232 is addressed comprehensively.
(function TablesAreImmutableInWasmCallstacks() { (function TablesAreImmutableInWasmCallstacks() {
print('TablesAreImmutableInWasmCallstacks...'); print(arguments.callee.name);
let table = new WebAssembly.Table({initial:2, element:'anyfunc'}); let table = new WebAssembly.Table({initial:2, element:'anyfunc'});
let builder = new WasmModuleBuilder(); let builder = new WasmModuleBuilder();
...@@ -743,3 +745,93 @@ function js_div(a, b) { return (a / b) | 0; } ...@@ -743,3 +745,93 @@ function js_div(a, b) { return (a / b) | 0; }
table.set(0, null); table.set(0, null);
assertEquals(null, table.get(0)); assertEquals(null, table.get(0));
})(); })();
(function ImportedWasmFunctionPutIntoTable() {
print(arguments.callee.name);
let wasm_mul = (() => {
let builder = new WasmModuleBuilder();
builder.addFunction("mul", kSig_i_ii)
.addBody(
[kExprGetLocal, 0,
kExprGetLocal, 1,
kExprI32Mul])
.exportFunc();
return builder.instantiate().exports.mul;
})();
let builder = new WasmModuleBuilder();
let j = builder.addImport("q", "js_div", kSig_i_ii);
let w = builder.addImport("q", "wasm_mul", kSig_i_ii);
builder.addFunction("main", kSig_i_ii)
.addBody([
kExprI32Const, 33, // --
kExprGetLocal, 0, // --
kExprGetLocal, 1, // --
kExprCallIndirect, 0, kTableZero]) // --
.exportAs("main");
builder.setFunctionTableBounds(10, 10);
let g = builder.addImportedGlobal("q", "base", kWasmI32);
builder.addFunctionTableInit(g, true, [j, w]);
let module = new WebAssembly.Module(builder.toBuffer());
for (var i = 0; i < 5; i++) {
let instance = new WebAssembly.Instance(module, {q: {base: i, js_div: js_div, wasm_mul: wasm_mul}});
let j = i + 1;
assertThrows(() => {instance.exports.main(j, i-1)});
assertEquals((33/j)|0, instance.exports.main(j, i+0));
assertEquals((33*j)|0, instance.exports.main(j, i+1));
assertThrows(() => {instance.exports.main(j, i+2)});
}
})();
(function ImportedWasmFunctionPutIntoImportedTable() {
print(arguments.callee.name);
let kTableSize = 10;
let wasm_mul = (() => {
let builder = new WasmModuleBuilder();
builder.addFunction("mul", kSig_i_ii)
.addBody(
[kExprGetLocal, 0,
kExprGetLocal, 1,
kExprI32Mul])
.exportFunc();
return builder.instantiate().exports.mul;
})();
let table = new WebAssembly.Table({element: "anyfunc",
initial: kTableSize,
maximum: kTableSize});
let builder = new WasmModuleBuilder();
let j = builder.addImport("q", "js_div", kSig_i_ii);
let w = builder.addImport("q", "wasm_mul", kSig_i_ii);
builder.addImportedTable("q", "table", kTableSize, kTableSize);
builder.addFunction("main", kSig_i_ii)
.addBody([
kExprI32Const, 44, // --
kExprGetLocal, 0, // --
kExprGetLocal, 1, // --
kExprCallIndirect, 0, kTableZero]) // --
.exportAs("main");
let g = builder.addImportedGlobal("q", "base", kWasmI32);
builder.addFunctionTableInit(g, true, [j, w]);
let module = new WebAssembly.Module(builder.toBuffer());
for (var i = 0; i < 5; i++) {
let instance = new WebAssembly.Instance(module, {q: {base: i, js_div: js_div, wasm_mul: wasm_mul, table: table}});
let j = i + 1;
assertEquals((44/j)|0, instance.exports.main(j, i+0));
assertEquals((44*j)|0, instance.exports.main(j, i+1));
assertThrows(() => {instance.exports.main(j, i+2)});
}
})();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment