Commit ce2bfb8e authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[Liftoff][arm] Avoid use of temp registers

The temp registers might be needed by the assembler, so avoid using them
in LiftoffAssembler. Use Liftoff cache registers instead. This might
introduce additional spills if all registers are in use, but this is
unlikely.

This also simplifies the logic to ensure non-aliasing of certain
registers.

R=ahaas@chromium.org

Bug: chromium:922933, v8:6600
Change-Id: Ie929d9de0b6f4f41c6117d820b6a367dd0a342f7
Reviewed-on: https://chromium-review.googlesource.com/c/1424862Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58961}
parent 0d37b80d
......@@ -139,45 +139,34 @@ inline void I64Binop(LiftoffAssembler* assm, LiftoffRegister dst,
}
}
inline Register GetNonAliasingRegister(LiftoffAssembler* assm,
UseScratchRegisterScope* temps,
Register src, Register alternative,
Register src_cannot_alias,
Register alternative_cannot_alias) {
if (src != src_cannot_alias) return src;
Register result =
alternative == alternative_cannot_alias ? temps->Acquire() : alternative;
assm->TurboAssembler::Move(result, src);
return result;
}
template <void (TurboAssembler::*op)(Register, Register, Register, Register,
Register),
bool is_left_shift>
inline void I64Shiftop(LiftoffAssembler* assm, LiftoffRegister dst,
LiftoffRegister src, Register amount,
LiftoffRegList pinned) {
// safe_amount_reg is the register in which the register holding the shift
// amount can be held without being clobbered, thus the original register
// holding the shift amount can be moved into it if required.
Register safe_amount_reg = is_left_shift ? dst.low_gp() : dst.high_gp();
Register other_reg = is_left_shift ? dst.high_gp() : dst.low_gp();
pinned.set(other_reg);
pinned.set(src.low_gp());
pinned.set(src.high_gp());
Register scratch = assm->GetUnusedRegister(kGpReg, pinned).gp();
assm->and_(scratch, amount, Operand(0x3F));
UseScratchRegisterScope temps(assm);
if (is_left_shift) {
Register src_low = GetNonAliasingRegister(
assm, &temps, src.low_gp(), safe_amount_reg, other_reg, src.high_gp());
(assm->*op)(dst.low_gp(), dst.high_gp(), src_low, src.high_gp(), scratch);
} else {
Register src_high = GetNonAliasingRegister(
assm, &temps, src.high_gp(), safe_amount_reg, other_reg, src.low_gp());
(assm->*op)(dst.low_gp(), dst.high_gp(), src.low_gp(), src_high, scratch);
Register src_low = src.low_gp();
Register src_high = src.high_gp();
Register dst_low = dst.low_gp();
Register dst_high = dst.high_gp();
// Left shift writes {dst_high} then {dst_low}, right shifts write {dst_low}
// then {dst_high}.
Register clobbered_dst_reg = is_left_shift ? dst_high : dst_low;
pinned.set(clobbered_dst_reg);
pinned.set(src);
Register amount_capped =
pinned.set(assm->GetUnusedRegister(kGpReg, pinned)).gp();
assm->and_(amount_capped, amount, Operand(0x3F));
// Ensure that writing the first half of {dst} does not overwrite the still
// needed half of {src}.
Register* later_src_reg = is_left_shift ? &src_low : &src_high;
if (*later_src_reg == clobbered_dst_reg) {
*later_src_reg = assm->GetUnusedRegister(kGpReg, pinned).gp();
assm->TurboAssembler::Move(*later_src_reg, clobbered_dst_reg);
}
(assm->*op)(dst_low, dst_high, src_low, src_high, amount_capped);
}
inline FloatRegister GetFloatRegister(DoubleRegister reg) {
......@@ -873,8 +862,13 @@ void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
int amount) {
DCHECK(is_uint6(amount));
UseScratchRegisterScope temps(this);
Register src_high = liftoff::GetNonAliasingRegister(
this, &temps, src.high_gp(), dst.high_gp(), dst.low_gp(), src.low_gp());
Register src_high = src.high_gp();
// {src.high_gp()} will still be needed after writing {dst.low_gp()}.
if (src_high == dst.low_gp()) {
src_high = GetUnusedRegister(kGpReg).gp();
TurboAssembler::Move(src_high, dst.low_gp());
}
LsrPair(dst.low_gp(), dst.high_gp(), src.low_gp(), src_high, amount);
}
......
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
load('test/mjsunit/wasm/wasm-constants.js');
load('test/mjsunit/wasm/wasm-module-builder.js');
const builder = new WasmModuleBuilder();
const sig = builder.addType(makeSig([kWasmI64], [kWasmI64]));
builder.addFunction(undefined, sig)
.addLocals({i32_count: 14}).addLocals({i64_count: 17}).addLocals({f32_count: 14})
.addBody([
kExprBlock, kWasmStmt,
kExprBr, 0x00,
kExprEnd,
kExprBlock, kWasmStmt,
kExprI32Const, 0x00,
kExprSetLocal, 0x09,
kExprI32Const, 0x00,
kExprIf, kWasmStmt,
kExprBlock, kWasmStmt,
kExprI32Const, 0x00,
kExprSetLocal, 0x0a,
kExprBr, 0x00,
kExprEnd,
kExprBlock, kWasmStmt,
kExprBlock, kWasmStmt,
kExprGetLocal, 0x00,
kExprSetLocal, 0x12,
kExprBr, 0x00,
kExprEnd,
kExprGetLocal, 0x16,
kExprSetLocal, 0x0f,
kExprGetLocal, 0x0f,
kExprSetLocal, 0x17,
kExprGetLocal, 0x0f,
kExprSetLocal, 0x18,
kExprGetLocal, 0x17,
kExprGetLocal, 0x18,
kExprI64ShrS,
kExprSetLocal, 0x19,
kExprUnreachable,
kExprEnd,
kExprUnreachable,
kExprElse,
kExprUnreachable,
kExprEnd,
kExprUnreachable,
kExprEnd,
kExprUnreachable
]);
builder.instantiate();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment