Commit d5b3d8e9 authored by Andreas Haas's avatar Andreas Haas Committed by V8 LUCI CQ

[wasm][arm][ia32] Push instance only twice in lazy-compile builtin

This change already landed for x64, now come arm and ia32. The code
already existed for arm64.

The wasm instance got pushed three times in the lazy-compile builtin:
1) as part of the parameters;
2) as a parameter for the runtime function;
3) to load the jump table address after the runtime function;

The third push can be avoided by loading the jump table address after
all parameters get loaded from the stack again.

R=clemensb@chromium.org

Bug: v8:13049, v8:12926
Change-Id: Ifdbe943520c031ec5c480798694bcacc490a64bc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3764348Reviewed-by: 's avatarClemens Backes <clemensb@chromium.org>
Commit-Queue: Andreas Haas <ahaas@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81782}
parent 2c740c12
...@@ -2695,12 +2695,7 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) { ...@@ -2695,12 +2695,7 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
__ stm(db_w, sp, gp_regs); __ stm(db_w, sp, gp_regs);
__ vstm(db_w, sp, lowest_fp_reg, highest_fp_reg); __ vstm(db_w, sp, lowest_fp_reg, highest_fp_reg);
// Push the Wasm instance for loading the jump table address after the // Push the Wasm instance as an explicit argument to the runtime function.
// runtime call.
__ push(kWasmInstanceRegister);
// Push the Wasm instance again as an explicit argument to the runtime
// function.
__ push(kWasmInstanceRegister); __ push(kWasmInstanceRegister);
// Push the function index as second argument. // Push the function index as second argument.
__ push(kWasmCompileLazyFuncIndexRegister); __ push(kWasmCompileLazyFuncIndexRegister);
...@@ -2710,16 +2705,18 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) { ...@@ -2710,16 +2705,18 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
__ CallRuntime(Runtime::kWasmCompileLazy, 2); __ CallRuntime(Runtime::kWasmCompileLazy, 2);
// The runtime function returns the jump table slot offset as a Smi. Use // The runtime function returns the jump table slot offset as a Smi. Use
// that to compute the jump target in r8. // that to compute the jump target in r8.
__ pop(kWasmInstanceRegister); __ mov(r8, Operand::SmiUntag(kReturnRegister0));
__ ldr(r8, MemOperand(
kWasmInstanceRegister,
WasmInstanceObject::kJumpTableStartOffset - kHeapObjectTag));
__ add(r8, r8, Operand::SmiUntag(kReturnRegister0));
// r8 now holds the jump table slot where we want to jump to in the end.
// Restore registers. // Restore registers.
__ vldm(ia_w, sp, lowest_fp_reg, highest_fp_reg); __ vldm(ia_w, sp, lowest_fp_reg, highest_fp_reg);
__ ldm(ia_w, sp, gp_regs); __ ldm(ia_w, sp, gp_regs);
// After the instance register has been restored, we can add the jump table
// start to the jump table offset already stored in r8.
__ ldr(r9, MemOperand(
kWasmInstanceRegister,
WasmInstanceObject::kJumpTableStartOffset - kHeapObjectTag));
__ add(r8, r8, r9);
} }
// Finally, jump to the jump table slot for the function. // Finally, jump to the jump table slot for the function.
......
...@@ -2931,12 +2931,7 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) { ...@@ -2931,12 +2931,7 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
offset += kSimd128Size; offset += kSimd128Size;
} }
// Push the Wasm instance for loading the jump table address after the // Push the Wasm instance as an explicit argument to the runtime function.
// runtime call.
__ Push(kWasmInstanceRegister);
// Push the Wasm instance again as an explicit argument to the runtime
// function.
__ Push(kWasmInstanceRegister); __ Push(kWasmInstanceRegister);
// Push the function index as second argument. // Push the function index as second argument.
__ Push(kWasmCompileLazyFuncIndexRegister); __ Push(kWasmCompileLazyFuncIndexRegister);
...@@ -2946,13 +2941,8 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) { ...@@ -2946,13 +2941,8 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
__ CallRuntime(Runtime::kWasmCompileLazy, 2); __ CallRuntime(Runtime::kWasmCompileLazy, 2);
// The runtime function returns the jump table slot offset as a Smi. Use // The runtime function returns the jump table slot offset as a Smi. Use
// that to compute the jump target in edi. // that to compute the jump target in edi.
__ Pop(kWasmInstanceRegister);
__ mov(edi, MemOperand(kWasmInstanceRegister,
WasmInstanceObject::kJumpTableStartOffset -
kHeapObjectTag));
__ SmiUntag(kReturnRegister0); __ SmiUntag(kReturnRegister0);
__ add(edi, kReturnRegister0); __ mov(edi, kReturnRegister0);
// edi now holds the jump table slot where we want to jump to in the end.
// Restore registers. // Restore registers.
for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) { for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
...@@ -2964,6 +2954,12 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) { ...@@ -2964,6 +2954,12 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
for (Register reg : base::Reversed(wasm::kGpParamRegisters)) { for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
__ Pop(reg); __ Pop(reg);
} }
// After the instance register has been restored, we can add the jump table
// start to the jump table offset already stored in edi.
__ add(edi, MemOperand(kWasmInstanceRegister,
WasmInstanceObject::kJumpTableStartOffset -
kHeapObjectTag));
} }
// Finally, jump to the jump table slot for the function. // Finally, jump to the jump table slot for the function.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment