Commit 0aa204fe authored by Jakob Gruber's avatar Jakob Gruber Committed by Commit Bot

[compiler] Refactor stack check handling

This CL unifies how stack checks are handled in the Turbofan pipeline
across architectures, in preparation for properly handling stack
overflows caused by deoptimization in follow-up work. It will also
open up possibilities to simplify related logic.

How this used to work: JSStackCheck was lowered to a UintLessThan
with the stack pointer (sp) and stack limit as inputs. On x64 and ia32,
this node pattern was later recognized during instruction selection
and rewritten to dedicated operators. On other platforms, including
arm and arm64, special logic exists to avoid useless
register-to-register moves when accessing the sp.

This CL introduces a new StackPointerGreaterThan operator, which takes
the stack limit as its sole input. This is what JSStackCheck now lowers
to. This is threaded through to code generation, where we emit the
appropriate code (in the future, we will apply an additional offset to
the sp here).

In follow-up CLs, we can remove or replace remaining uses of
LoadStackPointer in CSA, Wasm, and the interpreter; and then remove
the LoadStackPointer operator, related node matchers, related register
constraints, and the pseudo-smi stack limit roots.

Bug: v8:9534
Change-Id: I0e3f1beeed65b163c4ee5787600bed8c3cc671e1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1738863Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63156}
parent 408bafcb
......@@ -935,6 +935,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ mov(i.OutputRegister(), fp);
}
break;
case kArchStackPointerGreaterThan: {
constexpr size_t kValueIndex = 0;
DCHECK(instr->InputAt(kValueIndex)->IsRegister());
__ cmp(sp, i.InputRegister(kValueIndex));
break;
}
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(isolate(), zone(), i.OutputRegister(),
i.InputDoubleRegister(0), DetermineStubCallMode());
......
......@@ -78,6 +78,7 @@ class ArmOperandGenerator : public OperandGenerator {
// Use the stack pointer if the node is LoadStackPointer, otherwise assign a
// register.
InstructionOperand UseRegisterOrStackPointer(Node* node) {
// TODO(jgruber): Remove this once LoadStackPointer has been removed.
if (node->opcode() == IrOpcode::kLoadStackPointer) {
return LocationOperand(LocationOperand::EXPLICIT,
LocationOperand::REGISTER,
......@@ -897,6 +898,15 @@ void InstructionSelector::VisitWord32Xor(Node* node) {
VisitBinop(this, node, kArmEor, kArmEor);
}
void InstructionSelector::VisitStackPointerGreaterThan(
Node* node, FlagsContinuation* cont) {
Node* const value = node->InputAt(0);
InstructionCode opcode = kArchStackPointerGreaterThan;
ArmOperandGenerator g(this);
EmitWithContinuation(opcode, g.UseRegister(value), cont);
}
namespace {
template <typename TryMatchShift>
......@@ -1858,6 +1868,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
return VisitShift(this, value, TryMatchLSR, cont);
case IrOpcode::kWord32Ror:
return VisitShift(this, value, TryMatchROR, cont);
case IrOpcode::kStackPointerGreaterThan:
cont->OverwriteAndNegateIfEqual(kStackPointerGreaterThanCondition);
return VisitStackPointerGreaterThan(value, cont);
default:
break;
}
......
......@@ -841,6 +841,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ mov(i.OutputRegister(), fp);
}
break;
case kArchStackPointerGreaterThan: {
constexpr size_t kValueIndex = 0;
DCHECK(instr->InputAt(kValueIndex)->IsRegister());
__ Cmp(sp, i.InputRegister(kValueIndex));
break;
}
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(isolate(), zone(), i.OutputRegister(),
i.InputDoubleRegister(0), DetermineStubCallMode());
......
......@@ -51,6 +51,7 @@ class Arm64OperandGenerator final : public OperandGenerator {
// Use the stack pointer if the node is LoadStackPointer, otherwise assign a
// register.
InstructionOperand UseRegisterOrStackPointer(Node* node, bool sp_allowed) {
// TODO(jgruber): Remove this once LoadStackPointer has been removed.
if (sp_allowed && node->opcode() == IrOpcode::kLoadStackPointer)
return LocationOperand(LocationOperand::EXPLICIT,
LocationOperand::REGISTER,
......@@ -1014,6 +1015,15 @@ void InstructionSelector::VisitWord64Shl(Node* node) {
VisitRRO(this, kArm64Lsl, node, kShift64Imm);
}
void InstructionSelector::VisitStackPointerGreaterThan(
Node* node, FlagsContinuation* cont) {
Node* const value = node->InputAt(0);
InstructionCode opcode = kArchStackPointerGreaterThan;
Arm64OperandGenerator g(this);
EmitWithContinuation(opcode, g.UseRegister(value), cont);
}
namespace {
bool TryEmitBitfieldExtract32(InstructionSelector* selector, Node* node) {
......@@ -1841,6 +1851,7 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
Node* left = node->InputAt(0);
Node* right = node->InputAt(1);
// TODO(jgruber): Remove this once LoadStackPointer has been removed.
if (right->opcode() == IrOpcode::kLoadStackPointer ||
g.CanBeImmediate(left, immediate_mode)) {
if (!commutative) cont->Commute();
......@@ -2481,6 +2492,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
case IrOpcode::kWord64And:
return VisitWordCompare(this, value, kArm64Tst, cont, true,
kLogical64Imm);
case IrOpcode::kStackPointerGreaterThan:
cont->OverwriteAndNegateIfEqual(kStackPointerGreaterThanCondition);
return VisitStackPointerGreaterThan(value, cont);
default:
break;
}
......
......@@ -929,6 +929,15 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ mov(i.OutputRegister(), ebp);
}
break;
case kArchStackPointerGreaterThan: {
constexpr size_t kValueIndex = 0;
if (AddressingModeField::decode(instr->opcode()) != kMode_None) {
__ cmp(esp, i.MemoryOperand(kValueIndex));
} else {
__ cmp(esp, i.InputRegister(kValueIndex));
}
break;
}
case kArchTruncateDoubleToI: {
auto result = i.OutputRegister();
auto input = i.InputDoubleRegister(0);
......
......@@ -542,6 +542,35 @@ void InstructionSelector::VisitWord32Xor(Node* node) {
}
}
void InstructionSelector::VisitStackPointerGreaterThan(
Node* node, FlagsContinuation* cont) {
Node* const value = node->InputAt(0);
InstructionCode opcode = kArchStackPointerGreaterThan;
DCHECK(cont->IsBranch());
const int effect_level =
GetEffectLevel(cont->true_block()->PredecessorAt(0)->control_input());
IA32OperandGenerator g(this);
if (g.CanBeMemoryOperand(kIA32Cmp, node, value, effect_level)) {
DCHECK_EQ(IrOpcode::kLoad, value->opcode());
// GetEffectiveAddressMemoryOperand can create at most 3 inputs.
static constexpr int kMaxInputCount = 3;
size_t input_count = 0;
InstructionOperand inputs[kMaxInputCount];
AddressingMode addressing_mode =
g.GetEffectiveAddressMemoryOperand(value, inputs, &input_count);
opcode |= AddressingModeField::encode(addressing_mode);
DCHECK_LE(input_count, kMaxInputCount);
EmitWithContinuation(opcode, 0, nullptr, input_count, inputs, cont);
} else {
EmitWithContinuation(opcode, g.UseRegister(value), cont);
}
}
// Shared routine for multiple shift operations.
static inline void VisitShift(InstructionSelector* selector, Node* node,
ArchOpcode opcode) {
......@@ -1270,6 +1299,7 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
void VisitWordCompare(InstructionSelector* selector, Node* node,
FlagsContinuation* cont) {
if (selector->isolate() != nullptr) {
// TODO(jgruber): Remove this once LoadStackPointer has been removed.
StackCheckMatcher<Int32BinopMatcher, IrOpcode::kUint32LessThan> m(
selector->isolate(), node);
if (m.Matched()) {
......@@ -1459,6 +1489,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
return VisitWordCompare(this, value, cont);
case IrOpcode::kWord32And:
return VisitWordCompare(this, value, kIA32Test, cont);
case IrOpcode::kStackPointerGreaterThan:
cont->OverwriteAndNegateIfEqual(kStackPointerGreaterThanCondition);
return VisitStackPointerGreaterThan(value, cont);
default:
break;
}
......
......@@ -95,6 +95,7 @@ inline RecordWriteMode WriteBarrierKindToRecordWriteMode(
V(ArchStoreWithWriteBarrier) \
V(ArchStackSlot) \
V(ArchWordPoisonOnSpeculation) \
V(ArchStackPointerGreaterThan) \
V(Word32AtomicLoadInt8) \
V(Word32AtomicLoadUint8) \
V(Word32AtomicLoadInt16) \
......@@ -238,6 +239,9 @@ enum FlagsCondition {
kNegative
};
static constexpr FlagsCondition kStackPointerGreaterThanCondition =
kUnsignedGreaterThan;
inline FlagsCondition NegateFlagsCondition(FlagsCondition condition) {
return static_cast<FlagsCondition>(condition ^ 1);
}
......
......@@ -276,8 +276,10 @@ int InstructionScheduler::GetInstructionFlags(const Instruction* instr) const {
return kNoOpcodeFlags;
case kArchStackPointer:
// ArchStackPointer instruction loads the current stack pointer value and
// must not be reordered with instruction with side effects.
case kArchStackPointerGreaterThan:
// The ArchStackPointer and ArchStackPointerGreaterThan instructions load
// the current stack pointer value and must not be reordered with
// instructions with side effects.
return kIsLoadOperation;
case kArchWordPoisonOnSpeculation:
......
......@@ -1708,6 +1708,8 @@ void InstructionSelector::VisitNode(Node* node) {
return VisitStackSlot(node);
case IrOpcode::kLoadStackPointer:
return VisitLoadStackPointer(node);
case IrOpcode::kStackPointerGreaterThan:
return VisitStackPointerGreaterThan(node);
case IrOpcode::kLoadFramePointer:
return VisitLoadFramePointer(node);
case IrOpcode::kLoadParentFramePointer:
......@@ -2159,6 +2161,12 @@ void InstructionSelector::VisitLoadStackPointer(Node* node) {
Emit(kArchStackPointer, g.DefineAsRegister(node));
}
void InstructionSelector::VisitStackPointerGreaterThan(Node* node) {
FlagsContinuation cont =
FlagsContinuation::ForSet(kStackPointerGreaterThanCondition, node);
VisitStackPointerGreaterThan(node, &cont);
}
void InstructionSelector::VisitLoadFramePointer(Node* node) {
OperandGenerator g(this);
Emit(kArchFramePointer, g.DefineAsRegister(node));
......
......@@ -638,6 +638,8 @@ class V8_EXPORT_PRIVATE InstructionSelector final {
void VisitStaticAssert(Node* node);
void VisitDeadValue(Node* node);
void VisitStackPointerGreaterThan(Node* node, FlagsContinuation* cont);
void VisitWordCompareZero(Node* user, Node* value, FlagsContinuation* cont);
void EmitWordPoisonOnSpeculation(Node* node);
......
......@@ -1016,6 +1016,15 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ movq(i.OutputRegister(), rbp);
}
break;
case kArchStackPointerGreaterThan: {
constexpr size_t kValueIndex = 0;
if (AddressingModeField::decode(instr->opcode()) != kMode_None) {
__ cmpq(rsp, i.MemoryOperand(kValueIndex));
} else {
__ cmpq(rsp, i.InputRegister(kValueIndex));
}
break;
}
case kArchTruncateDoubleToI: {
auto result = i.OutputRegister();
auto input = i.InputDoubleRegister(0);
......
......@@ -529,6 +529,35 @@ void InstructionSelector::VisitWord64Xor(Node* node) {
}
}
void InstructionSelector::VisitStackPointerGreaterThan(
Node* node, FlagsContinuation* cont) {
Node* const value = node->InputAt(0);
InstructionCode opcode = kArchStackPointerGreaterThan;
DCHECK(cont->IsBranch());
const int effect_level =
GetEffectLevel(cont->true_block()->PredecessorAt(0)->control_input());
X64OperandGenerator g(this);
if (g.CanBeMemoryOperand(kX64Cmp, node, value, effect_level)) {
DCHECK_EQ(IrOpcode::kLoad, value->opcode());
// GetEffectiveAddressMemoryOperand can create at most 3 inputs.
static constexpr int kMaxInputCount = 3;
size_t input_count = 0;
InstructionOperand inputs[kMaxInputCount];
AddressingMode addressing_mode =
g.GetEffectiveAddressMemoryOperand(value, inputs, &input_count);
opcode |= AddressingModeField::encode(addressing_mode);
DCHECK_LE(input_count, kMaxInputCount);
EmitWithContinuation(opcode, 0, nullptr, input_count, inputs, cont);
} else {
EmitWithContinuation(opcode, g.UseRegister(value), cont);
}
}
namespace {
bool TryMergeTruncateInt64ToInt32IntoLoad(InstructionSelector* selector,
......@@ -1840,6 +1869,8 @@ void VisitWord64Compare(InstructionSelector* selector, Node* node,
}
}
if (selector->isolate() != nullptr) {
// TODO(jgruber): Remove this matcher once CSA stack checks have been
// updated.
StackCheckMatcher<Int64BinopMatcher, IrOpcode::kUint64LessThan> m(
selector->isolate(), node);
if (m.Matched()) {
......@@ -2158,6 +2189,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
return VisitWordCompare(this, value, kX64Cmp32, cont);
case IrOpcode::kWord32And:
return VisitWordCompare(this, value, kX64Test32, cont);
case IrOpcode::kStackPointerGreaterThan:
cont->OverwriteAndNegateIfEqual(kStackPointerGreaterThanCondition);
return VisitStackPointerGreaterThan(value, cont);
default:
break;
}
......
......@@ -828,9 +828,8 @@ void JSGenericLowering::LowerJSStackCheck(Node* node) {
jsgraph()->ExternalConstant(
ExternalReference::address_of_stack_limit(isolate())),
jsgraph()->IntPtrConstant(0), effect, control);
Node* pointer = graph()->NewNode(machine()->LoadStackPointer());
Node* check = graph()->NewNode(machine()->UintLessThan(), limit, pointer);
Node* check = graph()->NewNode(machine()->StackPointerGreaterThan(), limit);
Node* branch =
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
......
......@@ -89,6 +89,8 @@ MachineType AtomicOpType(Operator const* op) {
return OpParameter<MachineType>(op);
}
// The format is:
// V(Name, properties, value_input_count, control_input_count, output_count)
#define PURE_BINARY_OP_LIST_32(V) \
V(Word32And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
V(Word32Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
......@@ -112,6 +114,8 @@ MachineType AtomicOpType(Operator const* op) {
V(Uint32Mod, Operator::kNoProperties, 2, 1, 1) \
V(Uint32MulHigh, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)
// The format is:
// V(Name, properties, value_input_count, control_input_count, output_count)
#define PURE_BINARY_OP_LIST_64(V) \
V(Word64And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
V(Word64Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
......@@ -133,6 +137,8 @@ MachineType AtomicOpType(Operator const* op) {
V(Uint64LessThan, Operator::kNoProperties, 2, 0, 1) \
V(Uint64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)
// The format is:
// V(Name, properties, value_input_count, control_input_count, output_count)
#define MACHINE_PURE_OP_LIST(V) \
PURE_BINARY_OP_LIST_32(V) \
PURE_BINARY_OP_LIST_64(V) \
......@@ -385,8 +391,11 @@ MachineType AtomicOpType(Operator const* op) {
V(S1x8AnyTrue, Operator::kNoProperties, 1, 0, 1) \
V(S1x8AllTrue, Operator::kNoProperties, 1, 0, 1) \
V(S1x16AnyTrue, Operator::kNoProperties, 1, 0, 1) \
V(S1x16AllTrue, Operator::kNoProperties, 1, 0, 1)
V(S1x16AllTrue, Operator::kNoProperties, 1, 0, 1) \
V(StackPointerGreaterThan, Operator::kNoProperties, 1, 0, 1)
// The format is:
// V(Name, properties, value_input_count, control_input_count, output_count)
#define PURE_OPTIONAL_OP_LIST(V) \
V(Word32Ctz, Operator::kNoProperties, 1, 0, 1) \
V(Word64Ctz, Operator::kNoProperties, 1, 0, 1) \
......@@ -406,6 +415,8 @@ MachineType AtomicOpType(Operator const* op) {
V(Float32RoundTiesEven, Operator::kNoProperties, 1, 0, 1) \
V(Float64RoundTiesEven, Operator::kNoProperties, 1, 0, 1)
// The format is:
// V(Name, properties, value_input_count, control_input_count, output_count)
#define OVERFLOW_OP_LIST(V) \
V(Int32AddWithOverflow, Operator::kAssociative | Operator::kCommutative) \
V(Int32SubWithOverflow, Operator::kNoProperties) \
......
......@@ -660,10 +660,15 @@ class V8_EXPORT_PRIVATE MachineOperatorBuilder final
const Operator* Word64PoisonOnSpeculation();
// Access to the machine stack.
// TODO(jgruber): Remove LoadStackPointer once uses in wasm, interpreter, and
// in CSA have been removed.
const Operator* LoadStackPointer();
const Operator* LoadFramePointer();
const Operator* LoadParentFramePointer();
// Compares: stack_pointer > value.
const Operator* StackPointerGreaterThan();
// Memory barrier.
const Operator* MemBarrier();
......
......@@ -739,7 +739,8 @@
V(SignExtendWord8ToInt64) \
V(SignExtendWord16ToInt64) \
V(SignExtendWord32ToInt64) \
V(UnsafePointerAdd)
V(UnsafePointerAdd) \
V(StackPointerGreaterThan)
#define MACHINE_SIMD_OP_LIST(V) \
V(F64x2Splat) \
......
......@@ -1881,6 +1881,7 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
case IrOpcode::kSignExtendWord16ToInt64:
case IrOpcode::kSignExtendWord32ToInt64:
case IrOpcode::kStaticAssert:
case IrOpcode::kStackPointerGreaterThan:
#define SIMD_MACHINE_OP_CASE(Name) case IrOpcode::k##Name:
MACHINE_SIMD_OP_LIST(SIMD_MACHINE_OP_CASE)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment