Commit 8e755eaa authored by Clemens Backes's avatar Clemens Backes Committed by Commit Bot

[liftoff] Inline simple cmp op macros

This increases source code size, but the code is simple and repetitive.
Complexity is reduced by removing the macros.

R=zhin@chromium.org

Bug: v8:10364
Change-Id: I989c4d78aaebe00c7fbb525d88a6670cd0fe3b92
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2141732Reviewed-by: 's avatarZhi An Ng <zhin@chromium.org>
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67078}
parent db8491c3
...@@ -1199,18 +1199,6 @@ class LiftoffCompiler { ...@@ -1199,18 +1199,6 @@ class LiftoffCompiler {
return EmitBinOp<kI32, kI32>( \ return EmitBinOp<kI32, kI32>( \
BindFirst(&LiftoffAssembler::emit_i32_set_cond, \ BindFirst(&LiftoffAssembler::emit_i32_set_cond, \
GetCompareCondition(kExpr##opcode))); GetCompareCondition(kExpr##opcode)));
#define CASE_I64_CMPOP(opcode, cond) \
case kExpr##opcode: \
return EmitBinOp<kI64, kI32>( \
BindFirst(&LiftoffAssembler::emit_i64_set_cond, cond));
#define CASE_F32_CMPOP(opcode, cond) \
case kExpr##opcode: \
return EmitBinOp<kF32, kI32>( \
BindFirst(&LiftoffAssembler::emit_f32_set_cond, cond));
#define CASE_F64_CMPOP(opcode, cond) \
case kExpr##opcode: \
return EmitBinOp<kF64, kI32>( \
BindFirst(&LiftoffAssembler::emit_f64_set_cond, cond));
#define CASE_I64_SHIFTOP(opcode, fn) \ #define CASE_I64_SHIFTOP(opcode, fn) \
case kExpr##opcode: \ case kExpr##opcode: \
return EmitBinOpImm<kI64, kI64>( \ return EmitBinOpImm<kI64, kI64>( \
...@@ -1275,28 +1263,72 @@ class LiftoffCompiler { ...@@ -1275,28 +1263,72 @@ class LiftoffCompiler {
case kExprI64Xor: case kExprI64Xor:
return EmitBinOpImm<kI64, kI64>(&LiftoffAssembler::emit_i64_xor, return EmitBinOpImm<kI64, kI64>(&LiftoffAssembler::emit_i64_xor,
&LiftoffAssembler::emit_i64_xori); &LiftoffAssembler::emit_i64_xori);
CASE_I64_CMPOP(I64Eq, kEqual) case kExprI64Eq:
CASE_I64_CMPOP(I64Ne, kUnequal) return EmitBinOp<kI64, kI32>(
CASE_I64_CMPOP(I64LtS, kSignedLessThan) BindFirst(&LiftoffAssembler::emit_i64_set_cond, kEqual));
CASE_I64_CMPOP(I64LtU, kUnsignedLessThan) case kExprI64Ne:
CASE_I64_CMPOP(I64GtS, kSignedGreaterThan) return EmitBinOp<kI64, kI32>(
CASE_I64_CMPOP(I64GtU, kUnsignedGreaterThan) BindFirst(&LiftoffAssembler::emit_i64_set_cond, kUnequal));
CASE_I64_CMPOP(I64LeS, kSignedLessEqual) case kExprI64LtS:
CASE_I64_CMPOP(I64LeU, kUnsignedLessEqual) return EmitBinOp<kI64, kI32>(
CASE_I64_CMPOP(I64GeS, kSignedGreaterEqual) BindFirst(&LiftoffAssembler::emit_i64_set_cond, kSignedLessThan));
CASE_I64_CMPOP(I64GeU, kUnsignedGreaterEqual) case kExprI64LtU:
CASE_F32_CMPOP(F32Eq, kEqual) return EmitBinOp<kI64, kI32>(
CASE_F32_CMPOP(F32Ne, kUnequal) BindFirst(&LiftoffAssembler::emit_i64_set_cond, kUnsignedLessThan));
CASE_F32_CMPOP(F32Lt, kUnsignedLessThan) case kExprI64GtS:
CASE_F32_CMPOP(F32Gt, kUnsignedGreaterThan) return EmitBinOp<kI64, kI32>(BindFirst(
CASE_F32_CMPOP(F32Le, kUnsignedLessEqual) &LiftoffAssembler::emit_i64_set_cond, kSignedGreaterThan));
CASE_F32_CMPOP(F32Ge, kUnsignedGreaterEqual) case kExprI64GtU:
CASE_F64_CMPOP(F64Eq, kEqual) return EmitBinOp<kI64, kI32>(BindFirst(
CASE_F64_CMPOP(F64Ne, kUnequal) &LiftoffAssembler::emit_i64_set_cond, kUnsignedGreaterThan));
CASE_F64_CMPOP(F64Lt, kUnsignedLessThan) case kExprI64LeS:
CASE_F64_CMPOP(F64Gt, kUnsignedGreaterThan) return EmitBinOp<kI64, kI32>(
CASE_F64_CMPOP(F64Le, kUnsignedLessEqual) BindFirst(&LiftoffAssembler::emit_i64_set_cond, kSignedLessEqual));
CASE_F64_CMPOP(F64Ge, kUnsignedGreaterEqual) case kExprI64LeU:
return EmitBinOp<kI64, kI32>(BindFirst(
&LiftoffAssembler::emit_i64_set_cond, kUnsignedLessEqual));
case kExprI64GeS:
return EmitBinOp<kI64, kI32>(BindFirst(
&LiftoffAssembler::emit_i64_set_cond, kSignedGreaterEqual));
case kExprI64GeU:
return EmitBinOp<kI64, kI32>(BindFirst(
&LiftoffAssembler::emit_i64_set_cond, kUnsignedGreaterEqual));
case kExprF32Eq:
return EmitBinOp<kF32, kI32>(
BindFirst(&LiftoffAssembler::emit_f32_set_cond, kEqual));
case kExprF32Ne:
return EmitBinOp<kF32, kI32>(
BindFirst(&LiftoffAssembler::emit_f32_set_cond, kUnequal));
case kExprF32Lt:
return EmitBinOp<kF32, kI32>(
BindFirst(&LiftoffAssembler::emit_f32_set_cond, kUnsignedLessThan));
case kExprF32Gt:
return EmitBinOp<kF32, kI32>(BindFirst(
&LiftoffAssembler::emit_f32_set_cond, kUnsignedGreaterThan));
case kExprF32Le:
return EmitBinOp<kF32, kI32>(BindFirst(
&LiftoffAssembler::emit_f32_set_cond, kUnsignedLessEqual));
case kExprF32Ge:
return EmitBinOp<kF32, kI32>(BindFirst(
&LiftoffAssembler::emit_f32_set_cond, kUnsignedGreaterEqual));
case kExprF64Eq:
return EmitBinOp<kF64, kI32>(
BindFirst(&LiftoffAssembler::emit_f64_set_cond, kEqual));
case kExprF64Ne:
return EmitBinOp<kF64, kI32>(
BindFirst(&LiftoffAssembler::emit_f64_set_cond, kUnequal));
case kExprF64Lt:
return EmitBinOp<kF64, kI32>(
BindFirst(&LiftoffAssembler::emit_f64_set_cond, kUnsignedLessThan));
case kExprF64Gt:
return EmitBinOp<kF64, kI32>(BindFirst(
&LiftoffAssembler::emit_f64_set_cond, kUnsignedGreaterThan));
case kExprF64Le:
return EmitBinOp<kF64, kI32>(BindFirst(
&LiftoffAssembler::emit_f64_set_cond, kUnsignedLessEqual));
case kExprF64Ge:
return EmitBinOp<kF64, kI32>(BindFirst(
&LiftoffAssembler::emit_f64_set_cond, kUnsignedGreaterEqual));
case kExprI32Shl: case kExprI32Shl:
return EmitBinOpImm<kI32, kI32>(&LiftoffAssembler::emit_i32_shl, return EmitBinOpImm<kI32, kI32>(&LiftoffAssembler::emit_i32_shl,
&LiftoffAssembler::emit_i32_shli); &LiftoffAssembler::emit_i32_shli);
...@@ -1436,9 +1468,6 @@ class LiftoffCompiler { ...@@ -1436,9 +1468,6 @@ class LiftoffCompiler {
UNREACHABLE(); UNREACHABLE();
} }
#undef CASE_I32_CMPOP #undef CASE_I32_CMPOP
#undef CASE_I64_CMPOP
#undef CASE_F32_CMPOP
#undef CASE_F64_CMPOP
#undef CASE_I64_SHIFTOP #undef CASE_I64_SHIFTOP
#undef CASE_CCALL_BINOP #undef CASE_CCALL_BINOP
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment