Commit f7f370d2 authored by Milad Farazmand's avatar Milad Farazmand Committed by Commit Bot

PPC/s390: [compiler] Refactor stack check handling

Port 0aa204fe

Original Commit Message:

    This CL unifies how stack checks are handled in the Turbofan pipeline
    across architectures, in preparation for properly handling stack
    overflows caused by deoptimization in follow-up work. It will also
    open up possibilities to simplify related logic.

    How this used to work: JSStackCheck was lowered to a UintLessThan
    with the stack pointer (sp) and stack limit as inputs. On x64 and ia32,
    this node pattern was later recognized during instruction selection
    and rewritten to dedicated operators. On other platforms, including
    arm and arm64, special logic exists to avoid useless
    register-to-register moves when accessing the sp.

    This CL introduces a new StackPointerGreaterThan operator, which takes
    the stack limit as its sole input. This is what JSStackCheck now lowers
    to. This is threaded through to code generation, where we emit the
    appropriate code (in the future, we will apply an additional offset to
    the sp here).

    In follow-up CLs, we can remove or replace remaining uses of
    LoadStackPointer in CSA, Wasm, and the interpreter; and then remove
    the LoadStackPointer operator, related node matchers, related register
    constraints, and the pseudo-smi stack limit roots.

R=jgruber@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=
LOG=N

Change-Id: I175c110d30190bb543001b6fa77cd65cf22e5874
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1748002Reviewed-by: 's avatarJunliang Yan <jyan@ca.ibm.com>
Commit-Queue: Milad Farazmand <miladfar@ca.ibm.com>
Cr-Commit-Position: refs/heads/master@{#63167}
parent 5b2ab2f6
...@@ -1129,6 +1129,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1129,6 +1129,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ mr(i.OutputRegister(), fp); __ mr(i.OutputRegister(), fp);
} }
break; break;
case kArchStackPointerGreaterThan: {
constexpr size_t kValueIndex = 0;
DCHECK(instr->InputAt(kValueIndex)->IsRegister());
__ cmpl(sp, i.InputRegister(kValueIndex));
break;
}
case kArchTruncateDoubleToI: case kArchTruncateDoubleToI:
__ TruncateDoubleToI(isolate(), zone(), i.OutputRegister(), __ TruncateDoubleToI(isolate(), zone(), i.OutputRegister(),
i.InputDoubleRegister(0), DetermineStubCallMode()); i.InputDoubleRegister(0), DetermineStubCallMode());
......
...@@ -69,6 +69,7 @@ class PPCOperandGenerator final : public OperandGenerator { ...@@ -69,6 +69,7 @@ class PPCOperandGenerator final : public OperandGenerator {
// Use the stack pointer if the node is LoadStackPointer, otherwise assign a // Use the stack pointer if the node is LoadStackPointer, otherwise assign a
// register. // register.
InstructionOperand UseRegisterOrStackPointer(Node* node) { InstructionOperand UseRegisterOrStackPointer(Node* node) {
// TODO(miladfar): Remove this once LoadStackPointer has been removed.
if (node->opcode() == IrOpcode::kLoadStackPointer) { if (node->opcode() == IrOpcode::kLoadStackPointer) {
return LocationOperand(LocationOperand::EXPLICIT, return LocationOperand(LocationOperand::EXPLICIT,
LocationOperand::REGISTER, LocationOperand::REGISTER,
...@@ -559,6 +560,15 @@ void InstructionSelector::VisitWord32Xor(Node* node) { ...@@ -559,6 +560,15 @@ void InstructionSelector::VisitWord32Xor(Node* node) {
} }
} }
void InstructionSelector::VisitStackPointerGreaterThan(
Node* node, FlagsContinuation* cont) {
Node* const value = node->InputAt(0);
InstructionCode opcode = kArchStackPointerGreaterThan;
PPCOperandGenerator g(this);
EmitWithContinuation(opcode, g.UseRegister(value), cont);
}
#if V8_TARGET_ARCH_PPC64 #if V8_TARGET_ARCH_PPC64
void InstructionSelector::VisitWord64Xor(Node* node) { void InstructionSelector::VisitWord64Xor(Node* node) {
PPCOperandGenerator g(this); PPCOperandGenerator g(this);
...@@ -1640,6 +1650,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, ...@@ -1640,6 +1650,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
// case IrOpcode::kWord64Shr: // case IrOpcode::kWord64Shr:
// case IrOpcode::kWord64Ror: // case IrOpcode::kWord64Ror:
#endif #endif
case IrOpcode::kStackPointerGreaterThan:
cont->OverwriteAndNegateIfEqual(kStackPointerGreaterThanCondition);
return VisitStackPointerGreaterThan(value, cont);
default: default:
break; break;
} }
......
...@@ -1600,6 +1600,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1600,6 +1600,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ LoadRR(i.OutputRegister(), fp); __ LoadRR(i.OutputRegister(), fp);
} }
break; break;
case kArchStackPointerGreaterThan: {
constexpr size_t kValueIndex = 0;
DCHECK(instr->InputAt(kValueIndex)->IsRegister());
__ CmpLogicalP(sp, i.InputRegister(kValueIndex));
break;
}
case kArchTruncateDoubleToI: case kArchTruncateDoubleToI:
__ TruncateDoubleToI(isolate(), zone(), i.OutputRegister(), __ TruncateDoubleToI(isolate(), zone(), i.OutputRegister(),
i.InputDoubleRegister(0), DetermineStubCallMode()); i.InputDoubleRegister(0), DetermineStubCallMode());
......
...@@ -247,6 +247,7 @@ class S390OperandGenerator final : public OperandGenerator { ...@@ -247,6 +247,7 @@ class S390OperandGenerator final : public OperandGenerator {
// Use the stack pointer if the node is LoadStackPointer, otherwise assign a // Use the stack pointer if the node is LoadStackPointer, otherwise assign a
// register. // register.
InstructionOperand UseRegisterOrStackPointer(Node* node) { InstructionOperand UseRegisterOrStackPointer(Node* node) {
// TODO(miladfar): Remove this once LoadStackPointer has been removed.
if (node->opcode() == IrOpcode::kLoadStackPointer) { if (node->opcode() == IrOpcode::kLoadStackPointer) {
return LocationOperand(LocationOperand::EXPLICIT, return LocationOperand(LocationOperand::EXPLICIT,
LocationOperand::REGISTER, LocationOperand::REGISTER,
...@@ -838,6 +839,15 @@ void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); } ...@@ -838,6 +839,15 @@ void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
// Architecture supports unaligned access, therefore VisitStore is used instead // Architecture supports unaligned access, therefore VisitStore is used instead
void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); } void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
void InstructionSelector::VisitStackPointerGreaterThan(
Node* node, FlagsContinuation* cont) {
Node* const value = node->InputAt(0);
InstructionCode opcode = kArchStackPointerGreaterThan;
S390OperandGenerator g(this);
EmitWithContinuation(opcode, g.UseRegister(value), cont);
}
#if 0 #if 0
static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) { static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
int mask_width = base::bits::CountPopulation(value); int mask_width = base::bits::CountPopulation(value);
...@@ -2009,6 +2019,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, ...@@ -2009,6 +2019,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
// doesn't generate cc, so ignore // doesn't generate cc, so ignore
break; break;
#endif #endif
case IrOpcode::kStackPointerGreaterThan:
cont->OverwriteAndNegateIfEqual(kStackPointerGreaterThanCondition);
return VisitStackPointerGreaterThan(value, cont);
default: default:
break; break;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment