Commit 9ff644ae authored by jgruber's avatar jgruber Committed by Commit Bot

Fix stack check pattern matching for CSA code

The stack check instruction sequence is pattern-matched in
instruction-selector-{ia32,x64}.cc and replaced with its own specialized
opcode, for which we later generate an efficient stack check in a single
instruction.

But this pattern matching has never worked for CSA-generated code. The
matcher expected LoadStackPointer in the right operand and the external
reference load in the left operand. CSA generated exactly vice-versa.

This CL does a few things; it
1. reverts the recent change to load the
limit from smi roots:

Revert "[csa] Load the stack limit from smi roots"
This reverts commit 507c29c9.

2. tweaks the CSA instruction sequence to output what the matcher
expects.
3. refactors stack check matching into a new StackCheckMatcher class.
4. typifies CSA::PerformStackCheck as a drive-by.

Bug: v8:6666,v8:7844
Change-Id: I9bb879ac10bfe7187750c5f9e7834dc4accf28b5
Reviewed-on: https://chromium-review.googlesource.com/1099068Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Reviewed-by: 's avatarSigurd Schneider <sigurds@chromium.org>
Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53737}
parent 9c22f3d7
......@@ -289,7 +289,7 @@ TF_BUILTIN(CallProxy, ProxiesCodeStubAssembler) {
CSA_ASSERT(this, IsJSProxy(proxy));
CSA_ASSERT(this, IsCallable(proxy));
PerformStackCheck(context);
PerformStackCheck(CAST(context));
Label throw_proxy_handler_revoked(this, Label::kDeferred),
trap_undefined(this);
......@@ -408,7 +408,7 @@ TF_BUILTIN(ProxyHasProperty, ProxiesCodeStubAssembler) {
CSA_ASSERT(this, IsJSProxy(proxy));
PerformStackCheck(context);
PerformStackCheck(CAST(context));
// 1. Assert: IsPropertyKey(P) is true.
CSA_ASSERT(this, IsName(name));
......
......@@ -11970,20 +11970,24 @@ void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
}
void CodeStubAssembler::PerformStackCheck(Node* context) {
void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
Label ok(this), stack_check_interrupt(this, Label::kDeferred);
Node* sp = LoadStackPointer();
Node* stack_limit = BitcastTaggedToWord(LoadRoot(Heap::kStackLimitRootIndex));
Node* interrupt = UintPtrLessThan(sp, stack_limit);
// The instruction sequence below is carefully crafted to hit our pattern
// matcher for stack checks within instruction selection.
// See StackCheckMatcher::Matched and JSGenericLowering::LowerJSStackCheck.
Branch(interrupt, &stack_check_interrupt, &ok);
TNode<UintPtrT> sp = UncheckedCast<UintPtrT>(LoadStackPointer());
TNode<UintPtrT> stack_limit = UncheckedCast<UintPtrT>(Load(
MachineType::Pointer(),
ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))));
TNode<BoolT> sp_within_limit = UintPtrLessThan(stack_limit, sp);
Branch(sp_within_limit, &ok, &stack_check_interrupt);
BIND(&stack_check_interrupt);
{
CallRuntime(Runtime::kStackGuard, context);
Goto(&ok);
}
CallRuntime(Runtime::kStackGuard, context);
Goto(&ok);
BIND(&ok);
}
......
......@@ -2603,7 +2603,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
bool ConstexprBoolNot(bool value) { return !value; }
void PerformStackCheck(Node* context);
void PerformStackCheck(TNode<Context> context);
protected:
// Implements DescriptorArray::Search().
......
......@@ -1244,20 +1244,15 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
void VisitWordCompare(InstructionSelector* selector, Node* node,
FlagsContinuation* cont) {
IA32OperandGenerator g(selector);
Int32BinopMatcher m(node);
if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
ExternalReference js_stack_limit =
ExternalReference::address_of_stack_limit(selector->isolate());
if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
// Compare(Load(js_stack_limit), LoadStackPointer)
if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
InstructionCode opcode = cont->Encode(kIA32StackCheck);
CHECK(cont->IsBranch());
selector->EmitWithContinuation(opcode, cont);
return;
}
StackCheckMatcher<Int32BinopMatcher, IrOpcode::kUint32LessThan> m(
selector->isolate(), node);
if (m.Matched()) {
// Compare(Load(js_stack_limit), LoadStackPointer)
if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
InstructionCode opcode = cont->Encode(kIA32StackCheck);
CHECK(cont->IsBranch());
selector->EmitWithContinuation(opcode, cont);
return;
}
VisitWordCompare(selector, node, kIA32Cmp, cont);
}
......
......@@ -7,12 +7,11 @@
#include <cmath>
// TODO(turbofan): Move ExternalReference out of assembler.h
#include "src/assembler.h"
#include "src/base/compiler-specific.h"
#include "src/compiler/node.h"
#include "src/compiler/operator.h"
#include "src/double.h"
#include "src/external-reference.h"
#include "src/globals.h"
namespace v8 {
......@@ -744,6 +743,36 @@ struct V8_EXPORT_PRIVATE DiamondMatcher
Node* if_false_;
};
template <class BinopMatcher, IrOpcode::Value expected_opcode>
struct StackCheckMatcher {
StackCheckMatcher(Isolate* isolate, Node* compare)
: isolate_(isolate), compare_(compare) {}
bool Matched() {
// TODO(jgruber): Ideally, we could be more flexible here and also match the
// same pattern with switched operands (i.e.: left is LoadStackPointer and
// right is the js_stack_limit load). But to be correct in all cases, we'd
// then have to invert the outcome of the stack check comparison.
if (compare_->opcode() != expected_opcode) return false;
BinopMatcher m(compare_);
return MatchedInternal(m.left(), m.right());
}
private:
bool MatchedInternal(const typename BinopMatcher::LeftMatcher& l,
const typename BinopMatcher::RightMatcher& r) {
if (l.IsLoad() && r.IsLoadStackPointer()) {
LoadMatcher<ExternalReferenceMatcher> mleft(l.node());
ExternalReference js_stack_limit =
ExternalReference::address_of_stack_limit(isolate_);
if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) return true;
}
return false;
}
Isolate* isolate_;
Node* compare_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
......
......@@ -1727,19 +1727,15 @@ void VisitWord64Compare(InstructionSelector* selector, Node* node,
g.UseRegister(m.right().node()), cont);
}
}
Int64BinopMatcher m(node);
if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
ExternalReference js_stack_limit =
ExternalReference::address_of_stack_limit(selector->isolate());
if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
// Compare(Load(js_stack_limit), LoadStackPointer)
if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
InstructionCode opcode = cont->Encode(kX64StackCheck);
CHECK(cont->IsBranch());
selector->EmitWithContinuation(opcode, cont);
return;
}
StackCheckMatcher<Int64BinopMatcher, IrOpcode::kUint64LessThan> m(
selector->isolate(), node);
if (m.Matched()) {
// Compare(Load(js_stack_limit), LoadStackPointer)
if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
InstructionCode opcode = cont->Encode(kX64StackCheck);
CHECK(cont->IsBranch());
selector->EmitWithContinuation(opcode, cont);
return;
}
VisitWordCompare(selector, node, kX64Cmp, cont);
}
......
......@@ -262,7 +262,7 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference Create(const Runtime::Function* f);
static ExternalReference Create(IsolateAddressId id, Isolate* isolate);
static ExternalReference Create(Runtime::FunctionId id);
static ExternalReference Create(Address address);
static V8_EXPORT_PRIVATE ExternalReference Create(Address address);
template <typename SubjectChar, typename PatternChar>
static ExternalReference search_string_raw();
......
......@@ -2646,7 +2646,7 @@ IGNITION_HANDLER(CreateRestParameter, InterpreterAssembler) {
//
// Performs a stack guard check.
IGNITION_HANDLER(StackCheck, InterpreterAssembler) {
Node* context = GetContext();
TNode<Context> context = CAST(GetContext());
PerformStackCheck(context);
Dispatch();
}
......
......@@ -853,6 +853,58 @@ TEST_F(InstructionSelectorTest, SpeculationFence) {
EXPECT_EQ(kLFence, s[0]->arch_opcode());
}
TEST_F(InstructionSelectorTest, StackCheck0) {
ExternalReference js_stack_limit =
ExternalReference::Create(isolate()->stack_guard()->address_of_jslimit());
StreamBuilder m(this, MachineType::Int32());
Node* const sp = m.LoadStackPointer();
Node* const stack_limit =
m.Load(MachineType::Pointer(), m.ExternalConstant(js_stack_limit));
Node* const interrupt = m.UintPtrLessThan(sp, stack_limit);
RawMachineLabel if_true, if_false;
m.Branch(interrupt, &if_true, &if_false);
m.Bind(&if_true);
m.Return(m.Int32Constant(1));
m.Bind(&if_false);
m.Return(m.Int32Constant(0));
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
EXPECT_EQ(kIA32Cmp, s[0]->arch_opcode());
EXPECT_EQ(4U, s[0]->InputCount());
EXPECT_EQ(0U, s[0]->OutputCount());
}
TEST_F(InstructionSelectorTest, StackCheck1) {
ExternalReference js_stack_limit =
ExternalReference::Create(isolate()->stack_guard()->address_of_jslimit());
StreamBuilder m(this, MachineType::Int32());
Node* const sp = m.LoadStackPointer();
Node* const stack_limit =
m.Load(MachineType::Pointer(), m.ExternalConstant(js_stack_limit));
Node* const sp_within_limit = m.UintPtrLessThan(stack_limit, sp);
RawMachineLabel if_true, if_false;
m.Branch(sp_within_limit, &if_true, &if_false);
m.Bind(&if_true);
m.Return(m.Int32Constant(1));
m.Bind(&if_false);
m.Return(m.Int32Constant(0));
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
EXPECT_EQ(kIA32StackCheck, s[0]->arch_opcode());
EXPECT_EQ(2U, s[0]->InputCount());
EXPECT_EQ(0U, s[0]->OutputCount());
}
} // namespace compiler
} // namespace internal
} // namespace v8
......@@ -1631,6 +1631,58 @@ TEST_F(InstructionSelectorTest, SpeculationFence) {
EXPECT_EQ(kLFence, s[0]->arch_opcode());
}
TEST_F(InstructionSelectorTest, StackCheck0) {
ExternalReference js_stack_limit =
ExternalReference::Create(isolate()->stack_guard()->address_of_jslimit());
StreamBuilder m(this, MachineType::Int32());
Node* const sp = m.LoadStackPointer();
Node* const stack_limit =
m.Load(MachineType::Pointer(), m.ExternalConstant(js_stack_limit));
Node* const interrupt = m.UintPtrLessThan(sp, stack_limit);
RawMachineLabel if_true, if_false;
m.Branch(interrupt, &if_true, &if_false);
m.Bind(&if_true);
m.Return(m.Int32Constant(1));
m.Bind(&if_false);
m.Return(m.Int32Constant(0));
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
EXPECT_EQ(kX64Cmp, s[0]->arch_opcode());
EXPECT_EQ(4U, s[0]->InputCount());
EXPECT_EQ(0U, s[0]->OutputCount());
}
TEST_F(InstructionSelectorTest, StackCheck1) {
ExternalReference js_stack_limit =
ExternalReference::Create(isolate()->stack_guard()->address_of_jslimit());
StreamBuilder m(this, MachineType::Int32());
Node* const sp = m.LoadStackPointer();
Node* const stack_limit =
m.Load(MachineType::Pointer(), m.ExternalConstant(js_stack_limit));
Node* const sp_within_limit = m.UintPtrLessThan(stack_limit, sp);
RawMachineLabel if_true, if_false;
m.Branch(sp_within_limit, &if_true, &if_false);
m.Bind(&if_true);
m.Return(m.Int32Constant(1));
m.Bind(&if_false);
m.Return(m.Int32Constant(0));
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
EXPECT_EQ(kX64StackCheck, s[0]->arch_opcode());
EXPECT_EQ(2U, s[0]->InputCount());
EXPECT_EQ(0U, s[0]->OutputCount());
}
} // namespace compiler
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment