Commit 577ec180 authored by bmeurer's avatar bmeurer Committed by Commit bot

[turbofan] Hook up the shift code stubs with TurboFan.

Use the ShiftLeftStub, ShiftRightStub and ShiftRightLogicalStub in
JSGenericLowering instead of the old-style patching BinaryOpIC.

Also remove the machinery to support patching ICs in TurboFan
completely, as this was the last user of code patching in TurboFan!

R=yangguo@chromium.org

Review-Url: https://codereview.chromium.org/1942153002
Cr-Commit-Position: refs/heads/master@{#35959}
parent cb73f264
...@@ -1657,11 +1657,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { ...@@ -1657,11 +1657,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
} }
void CodeGenerator::AddNopForSmiCodeInlining() {
// On 32-bit ARM we do not insert nops for inlined Smi code.
}
void CodeGenerator::EnsureSpaceForLazyDeopt() { void CodeGenerator::EnsureSpaceForLazyDeopt() {
if (!info()->ShouldEnsureSpaceForLazyDeopt()) { if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
return; return;
......
...@@ -1836,9 +1836,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { ...@@ -1836,9 +1836,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
} }
void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); }
void CodeGenerator::EnsureSpaceForLazyDeopt() { void CodeGenerator::EnsureSpaceForLazyDeopt() {
if (!info()->ShouldEnsureSpaceForLazyDeopt()) { if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
return; return;
......
...@@ -507,10 +507,6 @@ void CodeGenerator::RecordCallPosition(Instruction* instr) { ...@@ -507,10 +507,6 @@ void CodeGenerator::RecordCallPosition(Instruction* instr) {
handlers_.push_back({caught, GetLabel(handler_rpo), masm()->pc_offset()}); handlers_.push_back({caught, GetLabel(handler_rpo), masm()->pc_offset()});
} }
if (flags & CallDescriptor::kNeedsNopAfterCall) {
AddNopForSmiCodeInlining();
}
if (needs_frame_state) { if (needs_frame_state) {
MarkLazyDeoptSite(); MarkLazyDeoptSite();
// If the frame state is present, it starts at argument 1 (just after the // If the frame state is present, it starts at argument 1 (just after the
......
...@@ -181,7 +181,6 @@ class CodeGenerator final : public GapResolver::Assembler { ...@@ -181,7 +181,6 @@ class CodeGenerator final : public GapResolver::Assembler {
Translation* translation); Translation* translation);
void AddTranslationForOperand(Translation* translation, Instruction* instr, void AddTranslationForOperand(Translation* translation, Instruction* instr,
InstructionOperand* op, MachineType type); InstructionOperand* op, MachineType type);
void AddNopForSmiCodeInlining();
void EnsureSpaceForLazyDeopt(); void EnsureSpaceForLazyDeopt();
void MarkLazyDeoptSite(); void MarkLazyDeoptSite();
......
...@@ -1919,9 +1919,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { ...@@ -1919,9 +1919,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
} }
void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
void CodeGenerator::EnsureSpaceForLazyDeopt() { void CodeGenerator::EnsureSpaceForLazyDeopt() {
if (!info()->ShouldEnsureSpaceForLazyDeopt()) { if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
return; return;
......
...@@ -1607,8 +1607,6 @@ void InstructionSelector::VisitTailCall(Node* node) { ...@@ -1607,8 +1607,6 @@ void InstructionSelector::VisitTailCall(Node* node) {
OperandGenerator g(this); OperandGenerator g(this);
CallDescriptor const* descriptor = CallDescriptorOf(node->op()); CallDescriptor const* descriptor = CallDescriptorOf(node->op());
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls); DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters. // TODO(turbofan): Relax restriction for stack parameters.
......
...@@ -47,18 +47,6 @@ Reduction JSGenericLowering::Reduce(Node* node) { ...@@ -47,18 +47,6 @@ Reduction JSGenericLowering::Reduce(Node* node) {
} }
return Changed(node); return Changed(node);
} }
#define REPLACE_BINARY_OP_IC_CALL(Op, token) \
void JSGenericLowering::Lower##Op(Node* node) { \
CallDescriptor::Flags flags = AdjustFrameStatesForCall(node); \
ReplaceWithStubCall(node, CodeFactory::BinaryOpIC(isolate(), token), \
CallDescriptor::kPatchableCallSiteWithNop | flags); \
}
REPLACE_BINARY_OP_IC_CALL(JSShiftLeft, Token::SHL)
REPLACE_BINARY_OP_IC_CALL(JSShiftRight, Token::SAR)
REPLACE_BINARY_OP_IC_CALL(JSShiftRightLogical, Token::SHR)
#undef REPLACE_BINARY_OP_IC_CALL
#define REPLACE_RUNTIME_CALL(op, fun) \ #define REPLACE_RUNTIME_CALL(op, fun) \
void JSGenericLowering::Lower##op(Node* node) { \ void JSGenericLowering::Lower##op(Node* node) { \
ReplaceWithRuntimeCall(node, fun); \ ReplaceWithRuntimeCall(node, fun); \
...@@ -82,6 +70,9 @@ REPLACE_STUB_CALL(Modulus) ...@@ -82,6 +70,9 @@ REPLACE_STUB_CALL(Modulus)
REPLACE_STUB_CALL(BitwiseAnd) REPLACE_STUB_CALL(BitwiseAnd)
REPLACE_STUB_CALL(BitwiseOr) REPLACE_STUB_CALL(BitwiseOr)
REPLACE_STUB_CALL(BitwiseXor) REPLACE_STUB_CALL(BitwiseXor)
REPLACE_STUB_CALL(ShiftLeft)
REPLACE_STUB_CALL(ShiftRight)
REPLACE_STUB_CALL(ShiftRightLogical)
REPLACE_STUB_CALL(LessThan) REPLACE_STUB_CALL(LessThan)
REPLACE_STUB_CALL(LessThanOrEqual) REPLACE_STUB_CALL(LessThanOrEqual)
REPLACE_STUB_CALL(GreaterThan) REPLACE_STUB_CALL(GreaterThan)
......
...@@ -152,20 +152,17 @@ class CallDescriptor final : public ZoneObject { ...@@ -152,20 +152,17 @@ class CallDescriptor final : public ZoneObject {
enum Flag { enum Flag {
kNoFlags = 0u, kNoFlags = 0u,
kNeedsFrameState = 1u << 0, kNeedsFrameState = 1u << 0,
kPatchableCallSite = 1u << 1, kHasExceptionHandler = 1u << 1,
kNeedsNopAfterCall = 1u << 2, kHasLocalCatchHandler = 1u << 2,
kHasExceptionHandler = 1u << 3, kSupportsTailCalls = 1u << 3,
kHasLocalCatchHandler = 1u << 4, kCanUseRoots = 1u << 4,
kSupportsTailCalls = 1u << 5,
kCanUseRoots = 1u << 6,
// (arm64 only) native stack should be used for arguments. // (arm64 only) native stack should be used for arguments.
kUseNativeStack = 1u << 7, kUseNativeStack = 1u << 5,
// (arm64 only) call instruction has to restore JSSP or CSP. // (arm64 only) call instruction has to restore JSSP or CSP.
kRestoreJSSP = 1u << 8, kRestoreJSSP = 1u << 6,
kRestoreCSP = 1u << 9, kRestoreCSP = 1u << 7,
// Causes the code generator to initialize the root register. // Causes the code generator to initialize the root register.
kInitializeRootRegister = 1u << 10, kInitializeRootRegister = 1u << 8
kPatchableCallSiteWithNop = kPatchableCallSite | kNeedsNopAfterCall
}; };
typedef base::Flags<Flag> Flags; typedef base::Flags<Flag> Flags;
......
...@@ -1940,13 +1940,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { ...@@ -1940,13 +1940,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
} }
void CodeGenerator::AddNopForSmiCodeInlining() {
// Unused on 32-bit ARM. Still exists on 64-bit arm.
// TODO(plind): Unclear when this is called now. Understand, fix if needed.
__ nop(); // Maybe PROPERTY_ACCESS_INLINED?
}
void CodeGenerator::EnsureSpaceForLazyDeopt() { void CodeGenerator::EnsureSpaceForLazyDeopt() {
if (!info()->ShouldEnsureSpaceForLazyDeopt()) { if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
return; return;
......
...@@ -2204,13 +2204,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { ...@@ -2204,13 +2204,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
} }
void CodeGenerator::AddNopForSmiCodeInlining() {
// Unused on 32-bit ARM. Still exists on 64-bit arm.
// TODO(plind): Unclear when this is called now. Understand, fix if needed.
__ nop(); // Maybe PROPERTY_ACCESS_INLINED?
}
void CodeGenerator::EnsureSpaceForLazyDeopt() { void CodeGenerator::EnsureSpaceForLazyDeopt() {
if (!info()->ShouldEnsureSpaceForLazyDeopt()) { if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
return; return;
......
...@@ -2063,11 +2063,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { ...@@ -2063,11 +2063,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
} }
void CodeGenerator::AddNopForSmiCodeInlining() {
// We do not insert nops for inlined Smi code.
}
void CodeGenerator::EnsureSpaceForLazyDeopt() { void CodeGenerator::EnsureSpaceForLazyDeopt() {
if (!info()->ShouldEnsureSpaceForLazyDeopt()) { if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
return; return;
......
...@@ -2094,10 +2094,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { ...@@ -2094,10 +2094,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
} }
} }
void CodeGenerator::AddNopForSmiCodeInlining() {
// We do not insert nops for inlined Smi code.
}
void CodeGenerator::EnsureSpaceForLazyDeopt() { void CodeGenerator::EnsureSpaceForLazyDeopt() {
if (!info()->ShouldEnsureSpaceForLazyDeopt()) { if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
return; return;
......
...@@ -2278,9 +2278,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { ...@@ -2278,9 +2278,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
} }
void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
void CodeGenerator::EnsureSpaceForLazyDeopt() { void CodeGenerator::EnsureSpaceForLazyDeopt() {
if (!info()->ShouldEnsureSpaceForLazyDeopt()) { if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
return; return;
......
...@@ -2455,9 +2455,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { ...@@ -2455,9 +2455,6 @@ void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
} }
void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
void CodeGenerator::EnsureSpaceForLazyDeopt() { void CodeGenerator::EnsureSpaceForLazyDeopt() {
if (!info()->ShouldEnsureSpaceForLazyDeopt()) { if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
return; return;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment