Commit 4b3043ef authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[Liftoff] Implement memory operations

Add support for loading and storing i32 values to and from memory.
Support for f32 and for non-traphandler configurations (i.e. emitting
bounds checks) will be added in a follow-up CL.

R=titzer@chromium.org

Bug: v8:6600
Change-Id: I43c44ce61f5acbac325261212374eb0f48c6ee89
Reviewed-on: https://chromium-review.googlesource.com/809164Reviewed-by: 's avatarBen Titzer <titzer@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#50068}
parent 60247ea2
......@@ -21,12 +21,12 @@ void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
void LiftoffAssembler::SpillContext(Register context) {}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
uint32_t offset_imm, int size,
LiftoffRegList pinned) {}
Register offset_reg, uint32_t offset_imm,
LoadType type, LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, uint32_t offset_imm,
LiftoffRegister src, int size,
LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
uint32_t offset_imm, LiftoffRegister src,
StoreType type, LiftoffRegList pinned) {}
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
uint32_t caller_slot_idx) {}
......@@ -64,8 +64,13 @@ void LiftoffAssembler::emit_f32_sub(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::emit_f32_mul(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::JumpIfZero(Register reg, Label* label) {}
void LiftoffAssembler::CallTrapCallbackForTesting() {}
void LiftoffAssembler::AssertUnreachable(BailoutReason reason) {}
} // namespace wasm
} // namespace internal
} // namespace v8
......
......@@ -21,12 +21,12 @@ void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
void LiftoffAssembler::SpillContext(Register context) {}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
uint32_t offset_imm, int size,
LiftoffRegList pinned) {}
Register offset_reg, uint32_t offset_imm,
LoadType type, LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, uint32_t offset_imm,
LiftoffRegister src, int size,
LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
uint32_t offset_imm, LiftoffRegister src,
StoreType type, LiftoffRegList pinned) {}
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
uint32_t caller_slot_idx) {}
......@@ -64,8 +64,13 @@ void LiftoffAssembler::emit_f32_sub(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::emit_f32_mul(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::JumpIfZero(Register reg, Label* label) {}
void LiftoffAssembler::CallTrapCallbackForTesting() {}
void LiftoffAssembler::AssertUnreachable(BailoutReason reason) {}
} // namespace wasm
} // namespace internal
} // namespace v8
......
......@@ -66,33 +66,71 @@ void LiftoffAssembler::SpillContext(Register context) {
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
uint32_t offset_imm, int size,
LiftoffRegList pinned) {
Operand src_op = Operand(src_addr, offset_imm);
Register offset_reg, uint32_t offset_imm,
LoadType type, LiftoffRegList pinned) {
Operand src_op = offset_reg == no_reg
? Operand(src_addr, offset_imm)
: Operand(src_addr, offset_reg, times_1, offset_imm);
if (offset_imm > kMaxInt) {
// The immediate can not be encoded in the operand. Load it to a register
// first.
Register src = GetUnusedRegister(kGpReg, pinned).gp();
mov(src, Immediate(offset_imm));
if (offset_reg != no_reg) {
emit_ptrsize_add(src, src, offset_reg);
}
src_op = Operand(src_addr, src, times_1, 0);
}
DCHECK_EQ(4, size);
mov(dst.gp(), src_op);
switch (type.value()) {
case LoadType::kI32Load8U:
movzx_b(dst.gp(), src_op);
break;
case LoadType::kI32Load8S:
movsx_b(dst.gp(), src_op);
break;
case LoadType::kI32Load16U:
movzx_w(dst.gp(), src_op);
break;
case LoadType::kI32Load16S:
movsx_w(dst.gp(), src_op);
break;
case LoadType::kI32Load:
mov(dst.gp(), src_op);
break;
default:
UNREACHABLE();
}
}
void LiftoffAssembler::Store(Register dst_addr, uint32_t offset_imm,
LiftoffRegister src, int size,
LiftoffRegList pinned) {
Operand dst_op = Operand(dst_addr, offset_imm);
void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
uint32_t offset_imm, LiftoffRegister src,
StoreType type, LiftoffRegList pinned) {
Operand dst_op = offset_reg == no_reg
? Operand(dst_addr, offset_imm)
: Operand(dst_addr, offset_reg, times_1, offset_imm);
if (offset_imm > kMaxInt) {
// The immediate can not be encoded in the operand. Load it to a register
// first.
Register dst = GetUnusedRegister(kGpReg, pinned).gp();
mov(dst, Immediate(offset_imm));
if (offset_reg != no_reg) {
emit_ptrsize_add(dst, dst, offset_reg);
}
dst_op = Operand(dst_addr, dst, times_1, 0);
}
DCHECK_EQ(4, size);
mov(dst_op, src.gp());
switch (type.value()) {
case StoreType::kI32Store8:
mov_b(dst_op, src.gp());
break;
case StoreType::kI32Store16:
mov_w(dst_op, src.gp());
break;
case StoreType::kI32Store:
mov(dst_op, src.gp());
break;
default:
UNREACHABLE();
}
}
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
......@@ -183,6 +221,11 @@ void LiftoffAssembler::emit_i32_add(Register dst, Register lhs, Register rhs) {
}
}
void LiftoffAssembler::emit_ptrsize_add(Register dst, Register lhs,
Register rhs) {
emit_i32_add(dst, lhs, rhs);
}
void LiftoffAssembler::emit_i32_sub(Register dst, Register lhs, Register rhs) {
if (dst == rhs) {
neg(dst);
......@@ -218,9 +261,7 @@ void LiftoffAssembler::emit_f32_add(DoubleRegister dst, DoubleRegister lhs,
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vaddss(dst, lhs, rhs);
return;
}
if (dst == rhs) {
} else if (dst == rhs) {
addss(dst, lhs);
} else {
if (dst != lhs) movss(dst, lhs);
......@@ -233,9 +274,7 @@ void LiftoffAssembler::emit_f32_sub(DoubleRegister dst, DoubleRegister lhs,
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vsubss(dst, lhs, rhs);
return;
}
if (dst == rhs) {
} else if (dst == rhs) {
movss(kScratchDoubleReg, rhs);
movss(dst, lhs);
subss(dst, kScratchDoubleReg);
......@@ -250,9 +289,7 @@ void LiftoffAssembler::emit_f32_mul(DoubleRegister dst, DoubleRegister lhs,
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmulss(dst, lhs, rhs);
return;
}
if (dst == rhs) {
} else if (dst == rhs) {
mulss(dst, lhs);
} else {
if (dst != lhs) movss(dst, lhs);
......@@ -265,6 +302,16 @@ void LiftoffAssembler::JumpIfZero(Register reg, Label* label) {
j(zero, label);
}
void LiftoffAssembler::CallTrapCallbackForTesting() {
PrepareCallCFunction(0, GetUnusedRegister(kGpReg).gp());
CallCFunction(
ExternalReference::wasm_call_trap_callback_for_testing(isolate()), 0);
}
void LiftoffAssembler::AssertUnreachable(BailoutReason reason) {
TurboAssembler::AssertUnreachable(reason);
}
} // namespace wasm
} // namespace internal
} // namespace v8
......
......@@ -248,10 +248,12 @@ class LiftoffAssembler : public TurboAssembler {
inline void LoadConstant(LiftoffRegister, WasmValue);
inline void LoadFromContext(Register dst, uint32_t offset, int size);
inline void SpillContext(Register context);
inline void Load(LiftoffRegister dst, Register src_addr, uint32_t offset_imm,
int size, LiftoffRegList = {});
inline void Store(Register dst_addr, uint32_t offset_imm, LiftoffRegister src,
int size, LiftoffRegList = {});
inline void Load(LiftoffRegister dst, Register src_addr, Register offset_reg,
uint32_t offset_imm, LoadType type,
LiftoffRegList pinned = {});
inline void Store(Register dst_addr, Register offset_reg, uint32_t offset_imm,
LiftoffRegister src, StoreType type,
LiftoffRegList pinned = {});
inline void LoadCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx);
inline void MoveStackValue(uint32_t dst_index, uint32_t src_index);
......@@ -270,6 +272,8 @@ class LiftoffAssembler : public TurboAssembler {
inline void emit_i32_or(Register dst, Register lhs, Register rhs);
inline void emit_i32_xor(Register dst, Register lhs, Register rhs);
inline void emit_ptrsize_add(Register dst, Register lhs, Register rhs);
inline void emit_f32_add(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs);
inline void emit_f32_sub(DoubleRegister dst, DoubleRegister lhs,
......@@ -279,6 +283,10 @@ class LiftoffAssembler : public TurboAssembler {
inline void JumpIfZero(Register, Label*);
inline void CallTrapCallbackForTesting();
inline void AssertUnreachable(BailoutReason reason);
////////////////////////////////////
// End of platform-specific part. //
////////////////////////////////////
......
......@@ -72,10 +72,12 @@ class LiftoffCompiler {
using Decoder = WasmFullDecoder<validate, LiftoffCompiler>;
LiftoffCompiler(LiftoffAssembler* liftoff_asm,
compiler::CallDescriptor* call_desc, compiler::ModuleEnv* env)
compiler::CallDescriptor* call_desc, compiler::ModuleEnv* env,
compiler::RuntimeExceptionSupport runtime_exception_support)
: asm_(liftoff_asm),
call_desc_(call_desc),
env_(env),
runtime_exception_support_(runtime_exception_support),
compilation_zone_(liftoff_asm->isolate()->allocator(),
"liftoff compilation"),
safepoint_table_builder_(&compilation_zone_) {}
......@@ -101,6 +103,9 @@ class LiftoffCompiler {
Label* label = decoder->control_at(i)->label.get();
if (!label->is_bound()) __ bind(label);
}
for (auto& trap : trap_ool_code_) {
if (!trap.label.get()->is_bound()) __ bind(trap.label.get());
}
#endif
}
......@@ -159,6 +164,7 @@ class LiftoffCompiler {
return;
}
__ EnterFrame(StackFrame::WASM_COMPILED);
__ set_has_frame(true);
__ ReserveStackSpace(__ GetTotalFrameSlotCount());
// Parameter 0 is the wasm context.
uint32_t num_params =
......@@ -222,7 +228,43 @@ class LiftoffCompiler {
CheckStackSizeLimit(decoder);
}
static Builtins::Name GetBuiltinIdForTrap(wasm::TrapReason reason) {
switch (reason) {
#define TRAPREASON_TO_MESSAGE(name) \
case wasm::k##name: \
return Builtins::kThrowWasm##name;
FOREACH_WASM_TRAPREASON(TRAPREASON_TO_MESSAGE)
#undef TRAPREASON_TO_MESSAGE
default:
UNREACHABLE();
}
}
void GenerateTrap(wasm::TrapReason reason, wasm::WasmCodePosition position) {
if (!runtime_exception_support_) {
// We cannot test calls to the runtime in cctest/test-run-wasm.
// Therefore we emit a call to C here instead of a call to the runtime.
__ CallTrapCallbackForTesting();
__ LeaveFrame(StackFrame::WASM_COMPILED);
__ set_has_frame(false);
__ Ret();
return;
}
DCHECK(runtime_exception_support_);
source_position_table_builder_.AddPosition(__ pc_offset(),
SourcePosition(position), true);
Builtins::Name trap_id = GetBuiltinIdForTrap(reason);
__ Call(__ isolate()->builtins()->builtin_handle(trap_id),
RelocInfo::CODE_TARGET);
__ AssertUnreachable(kUnexpectedReturnFromWasmTrap);
}
void FinishFunction(Decoder* decoder) {
for (auto& trap : trap_ool_code_) {
__ bind(trap.label.get());
GenerateTrap(trap.reason, trap.position);
}
safepoint_table_builder_.Emit(asm_, __ GetTotalFrameSlotCount());
}
......@@ -463,10 +505,11 @@ class LiftoffCompiler {
kPointerSize);
LiftoffRegister value =
pinned.set(__ GetUnusedRegister(reg_class_for(global->type), pinned));
int size = 1 << ElementSizeLog2Of(global->type);
if (size > kPointerSize)
LoadType type =
global->type == kWasmI32 ? LoadType::kI32Load : LoadType::kI64Load;
if (type.size() > kPointerSize)
return unsupported(decoder, "global > kPointerSize");
__ Load(value, addr, global->offset, size, pinned);
__ Load(value, addr, no_reg, global->offset, type, pinned);
__ PushRegister(global->type, value);
CheckStackSizeLimit(decoder);
}
......@@ -481,8 +524,9 @@ class LiftoffCompiler {
kPointerSize);
LiftoffRegister reg =
pinned.set(__ PopToRegister(reg_class_for(global->type), pinned));
int size = 1 << ElementSizeLog2Of(global->type);
__ Store(addr, global->offset, reg, size, pinned);
StoreType type =
global->type == kWasmI32 ? StoreType::kI32Store : StoreType::kI64Store;
__ Store(addr, no_reg, global->offset, reg, type, pinned);
}
void Unreachable(Decoder* decoder) { unsupported(decoder, "unreachable"); }
......@@ -524,15 +568,42 @@ class LiftoffCompiler {
unsupported(decoder, "else");
}
void LoadMem(Decoder* decoder, LoadType type,
const MemoryAccessOperand<validate>& operand, const Value& index,
Value* result) {
unsupported(decoder, "memory load");
const MemoryAccessOperand<validate>& operand,
const Value& index_val, Value* result) {
ValueType value_type = type.value_type();
if (value_type != kWasmI32) return unsupported(decoder, "non-i32 load");
LiftoffRegList pinned;
Register index = pinned.set(__ PopToRegister(kGpReg)).gp();
if (!env_->use_trap_handler) {
return unsupported(decoder, "non-traphandler");
}
Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
__ LoadFromContext(addr, offsetof(WasmContext, mem_start), kPointerSize);
RegClass rc = reg_class_for(value_type);
LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned));
__ Load(value, addr, index, operand.offset, type, pinned);
__ PushRegister(value_type, value);
CheckStackSizeLimit(decoder);
}
void StoreMem(Decoder* decoder, StoreType type,
const MemoryAccessOperand<validate>& operand,
const Value& index, const Value& value) {
unsupported(decoder, "memory store");
const Value& index_val, const Value& value_val) {
ValueType value_type = type.value_type();
if (value_type != kWasmI32) return unsupported(decoder, "non-i32 store");
if (!env_->use_trap_handler) {
return unsupported(decoder, "non-traphandler");
}
RegClass rc = reg_class_for(value_type);
LiftoffRegList pinned;
LiftoffRegister value = pinned.set(__ PopToRegister(rc));
Register index = pinned.set(__ PopToRegister(kGpReg, pinned)).gp();
Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
__ LoadFromContext(addr, offsetof(WasmContext, mem_start), kPointerSize);
__ Store(addr, index, operand.offset, value, type, pinned);
__ PushRegister(value_type, value);
}
void CurrentMemoryPages(Decoder* decoder, Value* result) {
unsupported(decoder, "current_memory");
}
......@@ -584,10 +655,21 @@ class LiftoffCompiler {
}
private:
struct TrapOolCode {
MovableLabel label;
wasm::TrapReason reason;
wasm::WasmCodePosition position;
TrapOolCode(wasm::TrapReason r, wasm::WasmCodePosition pos)
: reason(r), position(pos) {}
};
LiftoffAssembler* const asm_;
compiler::CallDescriptor* const call_desc_;
compiler::ModuleEnv* const env_;
compiler::RuntimeExceptionSupport runtime_exception_support_;
bool ok_ = true;
std::vector<TrapOolCode> trap_ool_code_;
SourcePositionTableBuilder source_position_table_builder_;
// Zone used to store information during compilation. The result will be
// stored independently, such that this zone can die together with the
// LiftoffCompiler after compilation.
......@@ -628,6 +710,11 @@ class LiftoffCompiler {
PrintF("\n");
#endif
}
Label* AddTrapCode(wasm::TrapReason reason, wasm::WasmCodePosition pos) {
trap_ool_code_.emplace_back(reason, pos);
return trap_ool_code_.back().label.get();
}
};
} // namespace
......@@ -643,7 +730,8 @@ bool compiler::WasmCompilationUnit::ExecuteLiftoffCompilation() {
const wasm::WasmModule* module = env_ ? env_->module : nullptr;
auto* call_desc = compiler::GetWasmCallDescriptor(&zone, func_body_.sig);
wasm::WasmFullDecoder<wasm::Decoder::kValidate, wasm::LiftoffCompiler>
decoder(&zone, module, func_body_, &liftoff_.asm_, call_desc, env_);
decoder(&zone, module, func_body_, &liftoff_.asm_, call_desc, env_,
runtime_exception_support_);
decoder.Decode();
if (!decoder.interface().ok()) {
// Liftoff compilation failed.
......
......@@ -21,12 +21,12 @@ void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
void LiftoffAssembler::SpillContext(Register context) {}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
uint32_t offset_imm, int size,
LiftoffRegList pinned) {}
Register offset_reg, uint32_t offset_imm,
LoadType type, LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, uint32_t offset_imm,
LiftoffRegister src, int size,
LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
uint32_t offset_imm, LiftoffRegister src,
StoreType type, LiftoffRegList pinned) {}
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
uint32_t caller_slot_idx) {}
......@@ -64,8 +64,13 @@ void LiftoffAssembler::emit_f32_sub(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::emit_f32_mul(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::JumpIfZero(Register reg, Label* label) {}
void LiftoffAssembler::CallTrapCallbackForTesting() {}
void LiftoffAssembler::AssertUnreachable(BailoutReason reason) {}
} // namespace wasm
} // namespace internal
} // namespace v8
......
......@@ -21,12 +21,12 @@ void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
void LiftoffAssembler::SpillContext(Register context) {}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
uint32_t offset_imm, int size,
LiftoffRegList pinned) {}
Register offset_reg, uint32_t offset_imm,
LoadType type, LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, uint32_t offset_imm,
LiftoffRegister src, int size,
LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
uint32_t offset_imm, LiftoffRegister src,
StoreType type, LiftoffRegList pinned) {}
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
uint32_t caller_slot_idx) {}
......@@ -64,8 +64,13 @@ void LiftoffAssembler::emit_f32_sub(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::emit_f32_mul(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::JumpIfZero(Register reg, Label* label) {}
void LiftoffAssembler::CallTrapCallbackForTesting() {}
void LiftoffAssembler::AssertUnreachable(BailoutReason reason) {}
} // namespace wasm
} // namespace internal
} // namespace v8
......
......@@ -21,12 +21,12 @@ void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
void LiftoffAssembler::SpillContext(Register context) {}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
uint32_t offset_imm, int size,
LiftoffRegList pinned) {}
Register offset_reg, uint32_t offset_imm,
LoadType type, LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, uint32_t offset_imm,
LiftoffRegister src, int size,
LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
uint32_t offset_imm, LiftoffRegister src,
StoreType type, LiftoffRegList pinned) {}
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
uint32_t caller_slot_idx) {}
......@@ -64,8 +64,13 @@ void LiftoffAssembler::emit_f32_sub(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::emit_f32_mul(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::JumpIfZero(Register reg, Label* label) {}
void LiftoffAssembler::CallTrapCallbackForTesting() {}
void LiftoffAssembler::AssertUnreachable(BailoutReason reason) {}
} // namespace wasm
} // namespace internal
} // namespace v8
......
......@@ -21,12 +21,12 @@ void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
void LiftoffAssembler::SpillContext(Register context) {}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
uint32_t offset_imm, int size,
LiftoffRegList pinned) {}
Register offset_reg, uint32_t offset_imm,
LoadType type, LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, uint32_t offset_imm,
LiftoffRegister src, int size,
LiftoffRegList pinned) {}
void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
uint32_t offset_imm, LiftoffRegister src,
StoreType type, LiftoffRegList pinned) {}
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
uint32_t caller_slot_idx) {}
......@@ -64,8 +64,13 @@ void LiftoffAssembler::emit_f32_sub(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::emit_f32_mul(DoubleRegister dst, DoubleRegister lhs,
DoubleRegister rhs) {}
void LiftoffAssembler::JumpIfZero(Register reg, Label* label) {}
void LiftoffAssembler::CallTrapCallbackForTesting() {}
void LiftoffAssembler::AssertUnreachable(BailoutReason reason) {}
} // namespace wasm
} // namespace internal
} // namespace v8
......
......@@ -31,7 +31,7 @@ inline Operand GetContextOperand() { return Operand(rbp, -16); }
void LiftoffAssembler::ReserveStackSpace(uint32_t space) {
stack_space_ = space;
subl(rsp, Immediate(space));
subp(rsp, Immediate(space));
}
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value) {
......@@ -68,41 +68,76 @@ void LiftoffAssembler::SpillContext(Register context) {
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
uint32_t offset_imm, int size,
LiftoffRegList pinned) {
Operand src_op = Operand(src_addr, offset_imm);
Register offset_reg, uint32_t offset_imm,
LoadType type, LiftoffRegList pinned) {
Operand src_op = offset_reg == no_reg
? Operand(src_addr, offset_imm)
: Operand(src_addr, offset_reg, times_1, offset_imm);
if (offset_imm > kMaxInt) {
// The immediate can not be encoded in the operand. Load it to a register
// first.
Register src = GetUnusedRegister(kGpReg, pinned).gp();
movl(src, Immediate(offset_imm));
if (offset_reg != no_reg) {
emit_ptrsize_add(src, src, offset_reg);
}
src_op = Operand(src_addr, src, times_1, 0);
}
DCHECK(size == 4 || size == 8);
if (size == 4) {
movl(dst.gp(), src_op);
} else {
movq(dst.gp(), src_op);
switch (type.value()) {
case LoadType::kI32Load8U:
movzxbl(dst.gp(), src_op);
break;
case LoadType::kI32Load8S:
movsxbl(dst.gp(), src_op);
break;
case LoadType::kI32Load16U:
movzxwl(dst.gp(), src_op);
break;
case LoadType::kI32Load16S:
movsxwl(dst.gp(), src_op);
break;
case LoadType::kI32Load:
movl(dst.gp(), src_op);
break;
case LoadType::kI64Load:
movq(dst.gp(), src_op);
break;
default:
UNREACHABLE();
}
}
void LiftoffAssembler::Store(Register dst_addr, uint32_t offset_imm,
LiftoffRegister src, int size,
LiftoffRegList pinned) {
Operand dst_op = Operand(dst_addr, offset_imm);
void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
uint32_t offset_imm, LiftoffRegister src,
StoreType type, LiftoffRegList pinned) {
Operand dst_op = offset_reg == no_reg
? Operand(dst_addr, offset_imm)
: Operand(dst_addr, offset_reg, times_1, offset_imm);
if (offset_imm > kMaxInt) {
// The immediate can not be encoded in the operand. Load it to a register
// first.
Register dst = GetUnusedRegister(kGpReg, pinned).gp();
movl(dst, Immediate(offset_imm));
if (offset_reg != no_reg) {
emit_ptrsize_add(dst, dst, offset_reg);
}
dst_op = Operand(dst_addr, dst, times_1, 0);
}
DCHECK(size == 4 || size == 8);
if (src.is_fp()) UNIMPLEMENTED();
if (size == 4) {
movl(dst_op, src.gp());
} else {
movp(dst_op, src.gp());
switch (type.value()) {
case StoreType::kI32Store8:
movb(dst_op, src.gp());
break;
case StoreType::kI32Store16:
movw(dst_op, src.gp());
break;
case StoreType::kI32Store:
movl(dst_op, src.gp());
break;
case StoreType::kI64Store:
movq(dst_op, src.gp());
break;
default:
UNREACHABLE();
}
}
......@@ -194,6 +229,15 @@ void LiftoffAssembler::emit_i32_add(Register dst, Register lhs, Register rhs) {
}
}
void LiftoffAssembler::emit_ptrsize_add(Register dst, Register lhs,
Register rhs) {
if (lhs != dst) {
leap(dst, Operand(lhs, rhs, times_1, 0));
} else {
addp(dst, rhs);
}
}
void LiftoffAssembler::emit_i32_sub(Register dst, Register lhs, Register rhs) {
if (dst == rhs) {
negl(dst);
......@@ -270,6 +314,16 @@ void LiftoffAssembler::JumpIfZero(Register reg, Label* label) {
j(zero, label);
}
void LiftoffAssembler::CallTrapCallbackForTesting() {
PrepareCallCFunction(0);
CallCFunction(
ExternalReference::wasm_call_trap_callback_for_testing(isolate()), 0);
}
void LiftoffAssembler::AssertUnreachable(BailoutReason reason) {
TurboAssembler::AssertUnreachable(reason);
}
} // namespace wasm
} // namespace internal
} // namespace v8
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment