Commit 25596e80 authored by Jakob Gruber's avatar Jakob Gruber Committed by Commit Bot

Define a constant for the return address stack slot count

... to avoid the repeated pattern of calculating it.

Bug: v8:8888
Change-Id: I4af5264aae6cfb8b6232b5aaf9ceb2cb568c29d0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2362692
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Auto-Submit: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarClemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69469}
parent 817c75e5
...@@ -199,6 +199,8 @@ ...@@ -199,6 +199,8 @@
#else #else
#define V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK false #define V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK false
#endif #endif
constexpr int kReturnAddressStackSlotCount =
V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
// Number of bits to represent the page size for paged spaces. // Number of bits to represent the page size for paged spaces.
#if defined(V8_TARGET_ARCH_PPC) || defined(V8_TARGET_ARCH_PPC64) #if defined(V8_TARGET_ARCH_PPC) || defined(V8_TARGET_ARCH_PPC64)
......
...@@ -597,6 +597,8 @@ bool CodeGenerator::IsValidPush(InstructionOperand source, ...@@ -597,6 +597,8 @@ bool CodeGenerator::IsValidPush(InstructionOperand source,
void CodeGenerator::GetPushCompatibleMoves(Instruction* instr, void CodeGenerator::GetPushCompatibleMoves(Instruction* instr,
PushTypeFlags push_type, PushTypeFlags push_type,
ZoneVector<MoveOperands*>* pushes) { ZoneVector<MoveOperands*>* pushes) {
static constexpr int first_push_compatible_index =
kReturnAddressStackSlotCount;
pushes->clear(); pushes->clear();
for (int i = Instruction::FIRST_GAP_POSITION; for (int i = Instruction::FIRST_GAP_POSITION;
i <= Instruction::LAST_GAP_POSITION; ++i) { i <= Instruction::LAST_GAP_POSITION; ++i) {
...@@ -607,8 +609,6 @@ void CodeGenerator::GetPushCompatibleMoves(Instruction* instr, ...@@ -607,8 +609,6 @@ void CodeGenerator::GetPushCompatibleMoves(Instruction* instr,
for (auto move : *parallel_move) { for (auto move : *parallel_move) {
InstructionOperand source = move->source(); InstructionOperand source = move->source();
InstructionOperand destination = move->destination(); InstructionOperand destination = move->destination();
int first_push_compatible_index =
V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
// If there are any moves from slots that will be overridden by pushes, // If there are any moves from slots that will be overridden by pushes,
// then the full gap resolver must be used since optimization with // then the full gap resolver must be used since optimization with
// pushes don't participate in the parallel move and might clobber // pushes don't participate in the parallel move and might clobber
......
...@@ -2972,9 +2972,8 @@ void InstructionSelector::VisitTailCall(Node* node) { ...@@ -2972,9 +2972,8 @@ void InstructionSelector::VisitTailCall(Node* node) {
int optional_padding_slot = callee->GetFirstUnusedStackSlot(); int optional_padding_slot = callee->GetFirstUnusedStackSlot();
buffer.instruction_args.push_back(g.TempImmediate(optional_padding_slot)); buffer.instruction_args.push_back(g.TempImmediate(optional_padding_slot));
int first_unused_stack_slot = const int first_unused_stack_slot =
(V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? true : false) + kReturnAddressStackSlotCount + stack_param_delta;
stack_param_delta;
buffer.instruction_args.push_back(g.TempImmediate(first_unused_stack_slot)); buffer.instruction_args.push_back(g.TempImmediate(first_unused_stack_slot));
// Emit the tailcall instruction. // Emit the tailcall instruction.
......
...@@ -1137,8 +1137,7 @@ class CodeGeneratorTester { ...@@ -1137,8 +1137,7 @@ class CodeGeneratorTester {
sequence->AddInstruction(Instruction::New(zone_, kArchPrepareTailCall)); sequence->AddInstruction(Instruction::New(zone_, kArchPrepareTailCall));
// We use either zero or one slots. // We use either zero or one slots.
int first_unused_stack_slot = static constexpr int first_unused_stack_slot = kReturnAddressStackSlotCount;
V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
int optional_padding_slot = first_unused_stack_slot; int optional_padding_slot = first_unused_stack_slot;
InstructionOperand callee[] = { InstructionOperand callee[] = {
AllocatedOperand(LocationOperand::REGISTER, AllocatedOperand(LocationOperand::REGISTER,
...@@ -1323,7 +1322,7 @@ TEST(AssembleTailCallGap) { ...@@ -1323,7 +1322,7 @@ TEST(AssembleTailCallGap) {
MachineRepresentation::kTagged, -1); MachineRepresentation::kTagged, -1);
// Avoid slot 0 for architectures which use it store the return address. // Avoid slot 0 for architectures which use it store the return address.
int first_slot = V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0; static constexpr int first_slot = kReturnAddressStackSlotCount;
auto slot_0 = AllocatedOperand(LocationOperand::STACK_SLOT, auto slot_0 = AllocatedOperand(LocationOperand::STACK_SLOT,
MachineRepresentation::kTagged, first_slot); MachineRepresentation::kTagged, first_slot);
auto slot_1 = auto slot_1 =
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment