Commit 74f9d8c9 authored by danno's avatar danno Committed by Commit bot

Add %GetCallerJSFunction intrinsic

Only optimized for TF

R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/1146963002

Cr-Commit-Position: refs/heads/master@{#28812}
parent ca2f8d81
...@@ -197,6 +197,20 @@ FieldAccess AccessBuilder::ForSharedFunctionInfoTypeFeedbackVector() { ...@@ -197,6 +197,20 @@ FieldAccess AccessBuilder::ForSharedFunctionInfoTypeFeedbackVector() {
Handle<Name>(), Type::Any(), kMachAnyTagged}; Handle<Name>(), Type::Any(), kMachAnyTagged};
} }
// static
FieldAccess AccessBuilder::ForFrameCallerFramePtr() {
return {kUntaggedBase, StandardFrameConstants::kCallerFPOffset,
MaybeHandle<Name>(), Type::Internal(), kMachPtr};
}
// static
FieldAccess AccessBuilder::ForFrameMarker() {
return {kUntaggedBase, StandardFrameConstants::kMarkerOffset,
MaybeHandle<Name>(), Type::Tagged(), kMachAnyTagged};
}
} // namespace compiler } // namespace compiler
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -83,6 +83,12 @@ class AccessBuilder final : public AllStatic { ...@@ -83,6 +83,12 @@ class AccessBuilder final : public AllStatic {
// Provides access to the TypeFeedbackVector in SharedFunctionInfo. // Provides access to the TypeFeedbackVector in SharedFunctionInfo.
static FieldAccess ForSharedFunctionInfoTypeFeedbackVector(); static FieldAccess ForSharedFunctionInfoTypeFeedbackVector();
// Provides access to the next frame pointer in a stack frame.
static FieldAccess ForFrameCallerFramePtr();
// Provides access to the marker in a stack frame.
static FieldAccess ForFrameMarker();
private: private:
DISALLOW_IMPLICIT_CONSTRUCTORS(AccessBuilder); DISALLOW_IMPLICIT_CONSTRUCTORS(AccessBuilder);
}; };
......
...@@ -403,6 +403,10 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -403,6 +403,10 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ mov(i.OutputRegister(), sp); __ mov(i.OutputRegister(), sp);
DCHECK_EQ(LeaveCC, i.OutputSBit()); DCHECK_EQ(LeaveCC, i.OutputSBit());
break; break;
case kArchFramePointer:
__ mov(i.OutputRegister(), fp);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchTruncateDoubleToI: case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputFloat64Register(0)); __ TruncateDoubleToI(i.OutputRegister(), i.InputFloat64Register(0));
DCHECK_EQ(LeaveCC, i.OutputSBit()); DCHECK_EQ(LeaveCC, i.OutputSBit());
......
...@@ -443,6 +443,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -443,6 +443,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer: case kArchStackPointer:
__ mov(i.OutputRegister(), masm()->StackPointer()); __ mov(i.OutputRegister(), masm()->StackPointer());
break; break;
case kArchFramePointer:
__ mov(i.OutputRegister(), fp);
break;
case kArchTruncateDoubleToI: case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
break; break;
......
...@@ -376,6 +376,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -376,6 +376,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer: case kArchStackPointer:
__ mov(i.OutputRegister(), esp); __ mov(i.OutputRegister(), esp);
break; break;
case kArchFramePointer:
__ mov(i.OutputRegister(), ebp);
break;
case kArchTruncateDoubleToI: { case kArchTruncateDoubleToI: {
auto result = i.OutputRegister(); auto result = i.OutputRegister();
auto input = i.InputDoubleRegister(0); auto input = i.InputDoubleRegister(0);
......
...@@ -45,6 +45,7 @@ namespace compiler { ...@@ -45,6 +45,7 @@ namespace compiler {
V(ArchDeoptimize) \ V(ArchDeoptimize) \
V(ArchRet) \ V(ArchRet) \
V(ArchStackPointer) \ V(ArchStackPointer) \
V(ArchFramePointer) \
V(ArchTruncateDoubleToI) \ V(ArchTruncateDoubleToI) \
V(CheckedLoadInt8) \ V(CheckedLoadInt8) \
V(CheckedLoadUint8) \ V(CheckedLoadUint8) \
......
...@@ -761,6 +761,8 @@ void InstructionSelector::VisitNode(Node* node) { ...@@ -761,6 +761,8 @@ void InstructionSelector::VisitNode(Node* node) {
return MarkAsFloat64(node), VisitFloat64InsertHighWord32(node); return MarkAsFloat64(node), VisitFloat64InsertHighWord32(node);
case IrOpcode::kLoadStackPointer: case IrOpcode::kLoadStackPointer:
return VisitLoadStackPointer(node); return VisitLoadStackPointer(node);
case IrOpcode::kLoadFramePointer:
return VisitLoadFramePointer(node);
case IrOpcode::kCheckedLoad: { case IrOpcode::kCheckedLoad: {
MachineType rep = OpParameter<MachineType>(node); MachineType rep = OpParameter<MachineType>(node);
MarkAsRepresentation(rep, node); MarkAsRepresentation(rep, node);
...@@ -791,6 +793,12 @@ void InstructionSelector::VisitLoadStackPointer(Node* node) { ...@@ -791,6 +793,12 @@ void InstructionSelector::VisitLoadStackPointer(Node* node) {
} }
void InstructionSelector::VisitLoadFramePointer(Node* node) {
OperandGenerator g(this);
Emit(kArchFramePointer, g.DefineAsRegister(node));
}
void InstructionSelector::EmitTableSwitch(const SwitchInfo& sw, void InstructionSelector::EmitTableSwitch(const SwitchInfo& sw,
InstructionOperand& index_operand) { InstructionOperand& index_operand) {
OperandGenerator g(this); OperandGenerator g(this);
......
...@@ -84,6 +84,8 @@ Reduction JSIntrinsicLowering::Reduce(Node* node) { ...@@ -84,6 +84,8 @@ Reduction JSIntrinsicLowering::Reduce(Node* node) {
return ReduceFixedArraySet(node); return ReduceFixedArraySet(node);
case Runtime::kInlineGetTypeFeedbackVector: case Runtime::kInlineGetTypeFeedbackVector:
return ReduceGetTypeFeedbackVector(node); return ReduceGetTypeFeedbackVector(node);
case Runtime::kInlineGetCallerJSFunction:
return ReduceGetCallerJSFunction(node);
default: default:
break; break;
} }
...@@ -455,6 +457,31 @@ Reduction JSIntrinsicLowering::ReduceGetTypeFeedbackVector(Node* node) { ...@@ -455,6 +457,31 @@ Reduction JSIntrinsicLowering::ReduceGetTypeFeedbackVector(Node* node) {
} }
Reduction JSIntrinsicLowering::ReduceGetCallerJSFunction(Node* node) {
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* const frame_state = NodeProperties::GetFrameStateInput(node, 0);
Node* outer_frame = frame_state->InputAt(kFrameStateOuterStateInput);
if (outer_frame->opcode() == IrOpcode::kFrameState) {
// Use the runtime implementation to throw the appropriate error if the
// containing function is inlined.
return NoChange();
}
// TODO(danno): This implementation forces intrinsic lowering to happen after
// inlining, which is fine for now, but eventually the frame-querying logic
// probably should go later, e.g. in instruction selection, so that there is
// no phase-ordering dependency.
FieldAccess access = AccessBuilder::ForFrameCallerFramePtr();
Node* fp = graph()->NewNode(machine()->LoadFramePointer());
Node* next_fp =
graph()->NewNode(simplified()->LoadField(access), fp, effect, control);
return Change(node, simplified()->LoadField(AccessBuilder::ForFrameMarker()),
next_fp, effect, control);
}
Reduction JSIntrinsicLowering::Change(Node* node, const Operator* op, Node* a, Reduction JSIntrinsicLowering::Change(Node* node, const Operator* op, Node* a,
Node* b) { Node* b) {
node->set_op(op); node->set_op(op);
......
...@@ -53,6 +53,7 @@ class JSIntrinsicLowering final : public AdvancedReducer { ...@@ -53,6 +53,7 @@ class JSIntrinsicLowering final : public AdvancedReducer {
Reduction ReduceValueOf(Node* node); Reduction ReduceValueOf(Node* node);
Reduction ReduceFixedArraySet(Node* node); Reduction ReduceFixedArraySet(Node* node);
Reduction ReduceGetTypeFeedbackVector(Node* node); Reduction ReduceGetTypeFeedbackVector(Node* node);
Reduction ReduceGetCallerJSFunction(Node* node);
Reduction Change(Node* node, const Operator* op); Reduction Change(Node* node, const Operator* op);
Reduction Change(Node* node, const Operator* op, Node* a, Node* b); Reduction Change(Node* node, const Operator* op, Node* a, Node* b);
......
...@@ -134,6 +134,7 @@ bool Linkage::NeedsFrameState(Runtime::FunctionId function) { ...@@ -134,6 +134,7 @@ bool Linkage::NeedsFrameState(Runtime::FunctionId function) {
case Runtime::kInlineCallFunction: case Runtime::kInlineCallFunction:
case Runtime::kInlineDateField: // TODO(bmeurer): Remove this. case Runtime::kInlineDateField: // TODO(bmeurer): Remove this.
case Runtime::kInlineDeoptimizeNow: case Runtime::kInlineDeoptimizeNow:
case Runtime::kInlineGetCallerJSFunction:
case Runtime::kInlineGetPrototype: case Runtime::kInlineGetPrototype:
case Runtime::kInlineRegExpExec: case Runtime::kInlineRegExpExec:
case Runtime::kInlineThrowIfNotADate: case Runtime::kInlineThrowIfNotADate:
......
...@@ -148,7 +148,8 @@ CheckedStoreRepresentation CheckedStoreRepresentationOf(Operator const* op) { ...@@ -148,7 +148,8 @@ CheckedStoreRepresentation CheckedStoreRepresentationOf(Operator const* op) {
V(Float32Min, Operator::kNoProperties, 2, 0, 1) \ V(Float32Min, Operator::kNoProperties, 2, 0, 1) \
V(Float64Max, Operator::kNoProperties, 2, 0, 1) \ V(Float64Max, Operator::kNoProperties, 2, 0, 1) \
V(Float64Min, Operator::kNoProperties, 2, 0, 1) \ V(Float64Min, Operator::kNoProperties, 2, 0, 1) \
V(LoadStackPointer, Operator::kNoProperties, 0, 0, 1) V(LoadStackPointer, Operator::kNoProperties, 0, 0, 1) \
V(LoadFramePointer, Operator::kNoProperties, 0, 0, 1)
#define MACHINE_TYPE_LIST(V) \ #define MACHINE_TYPE_LIST(V) \
......
...@@ -225,6 +225,7 @@ class MachineOperatorBuilder final : public ZoneObject { ...@@ -225,6 +225,7 @@ class MachineOperatorBuilder final : public ZoneObject {
// Access to the machine stack. // Access to the machine stack.
const Operator* LoadStackPointer(); const Operator* LoadStackPointer();
const Operator* LoadFramePointer();
// checked-load heap, index, length // checked-load heap, index, length
const Operator* CheckedLoad(CheckedLoadRepresentation); const Operator* CheckedLoad(CheckedLoadRepresentation);
......
...@@ -487,6 +487,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -487,6 +487,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer: case kArchStackPointer:
__ mov(i.OutputRegister(), sp); __ mov(i.OutputRegister(), sp);
break; break;
case kArchFramePointer:
__ mov(i.OutputRegister(), fp);
break;
case kArchTruncateDoubleToI: case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
break; break;
......
...@@ -487,6 +487,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -487,6 +487,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer: case kArchStackPointer:
__ mov(i.OutputRegister(), sp); __ mov(i.OutputRegister(), sp);
break; break;
case kArchFramePointer:
__ mov(i.OutputRegister(), fp);
break;
case kArchTruncateDoubleToI: case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
break; break;
......
...@@ -280,6 +280,7 @@ ...@@ -280,6 +280,7 @@
V(Float64InsertLowWord32) \ V(Float64InsertLowWord32) \
V(Float64InsertHighWord32) \ V(Float64InsertHighWord32) \
V(LoadStackPointer) \ V(LoadStackPointer) \
V(LoadFramePointer) \
V(CheckedLoad) \ V(CheckedLoad) \
V(CheckedStore) V(CheckedStore)
......
...@@ -687,6 +687,10 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -687,6 +687,10 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ mr(i.OutputRegister(), sp); __ mr(i.OutputRegister(), sp);
DCHECK_EQ(LeaveRC, i.OutputRCBit()); DCHECK_EQ(LeaveRC, i.OutputRCBit());
break; break;
case kArchFramePointer:
__ mr(i.OutputRegister(), fp);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
case kArchTruncateDoubleToI: case kArchTruncateDoubleToI:
// TODO(mbrandy): move slow call to stub out of line. // TODO(mbrandy): move slow call to stub out of line.
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
......
...@@ -441,6 +441,7 @@ class RawMachineAssembler : public GraphBuilder { ...@@ -441,6 +441,7 @@ class RawMachineAssembler : public GraphBuilder {
// Stack operations. // Stack operations.
Node* LoadStackPointer() { return NewNode(machine()->LoadStackPointer()); } Node* LoadStackPointer() { return NewNode(machine()->LoadStackPointer()); }
Node* LoadFramePointer() { return NewNode(machine()->LoadFramePointer()); }
// Parameters. // Parameters.
Node* Parameter(size_t index); Node* Parameter(size_t index);
......
...@@ -1038,6 +1038,7 @@ class RepresentationSelector { ...@@ -1038,6 +1038,7 @@ class RepresentationSelector {
case IrOpcode::kFloat64InsertHighWord32: case IrOpcode::kFloat64InsertHighWord32:
return VisitBinop(node, kMachFloat64, kMachInt32, kMachFloat64); return VisitBinop(node, kMachFloat64, kMachInt32, kMachFloat64);
case IrOpcode::kLoadStackPointer: case IrOpcode::kLoadStackPointer:
case IrOpcode::kLoadFramePointer:
return VisitLeaf(node, kMachPtr); return VisitLeaf(node, kMachPtr);
case IrOpcode::kStateValues: case IrOpcode::kStateValues:
VisitStateValues(node); VisitStateValues(node);
......
...@@ -2312,6 +2312,11 @@ Bounds Typer::Visitor::TypeLoadStackPointer(Node* node) { ...@@ -2312,6 +2312,11 @@ Bounds Typer::Visitor::TypeLoadStackPointer(Node* node) {
} }
Bounds Typer::Visitor::TypeLoadFramePointer(Node* node) {
return Bounds(Type::Internal());
}
Bounds Typer::Visitor::TypeCheckedLoad(Node* node) { Bounds Typer::Visitor::TypeCheckedLoad(Node* node) {
return Bounds::Unbounded(zone()); return Bounds::Unbounded(zone());
} }
......
...@@ -867,6 +867,7 @@ void Verifier::Visitor::Check(Node* node) { ...@@ -867,6 +867,7 @@ void Verifier::Visitor::Check(Node* node) {
case IrOpcode::kFloat64InsertLowWord32: case IrOpcode::kFloat64InsertLowWord32:
case IrOpcode::kFloat64InsertHighWord32: case IrOpcode::kFloat64InsertHighWord32:
case IrOpcode::kLoadStackPointer: case IrOpcode::kLoadStackPointer:
case IrOpcode::kLoadFramePointer:
case IrOpcode::kCheckedLoad: case IrOpcode::kCheckedLoad:
case IrOpcode::kCheckedStore: case IrOpcode::kCheckedStore:
// TODO(rossberg): Check. // TODO(rossberg): Check.
......
...@@ -626,6 +626,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -626,6 +626,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer: case kArchStackPointer:
__ movq(i.OutputRegister(), rsp); __ movq(i.OutputRegister(), rsp);
break; break;
case kArchFramePointer:
__ movq(i.OutputRegister(), rbp);
break;
case kArchTruncateDoubleToI: { case kArchTruncateDoubleToI: {
auto result = i.OutputRegister(); auto result = i.OutputRegister();
auto input = i.InputDoubleRegister(0); auto input = i.InputDoubleRegister(0);
......
...@@ -416,5 +416,15 @@ RUNTIME_FUNCTION(Runtime_GetTypeFeedbackVector) { ...@@ -416,5 +416,15 @@ RUNTIME_FUNCTION(Runtime_GetTypeFeedbackVector) {
CONVERT_ARG_CHECKED(JSFunction, function, 0); CONVERT_ARG_CHECKED(JSFunction, function, 0);
return function->shared()->feedback_vector(); return function->shared()->feedback_vector();
} }
RUNTIME_FUNCTION(Runtime_GetCallerJSFunction) {
SealHandleScope shs(isolate);
StackFrameIterator it(isolate);
RUNTIME_ASSERT(it.frame()->type() == StackFrame::STUB);
it.Advance();
RUNTIME_ASSERT(it.frame()->type() == StackFrame::JAVA_SCRIPT);
return JavaScriptFrame::cast(it.frame())->function();
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -325,7 +325,8 @@ namespace internal { ...@@ -325,7 +325,8 @@ namespace internal {
F(Likely, 1, 1) \ F(Likely, 1, 1) \
F(Unlikely, 1, 1) \ F(Unlikely, 1, 1) \
F(HarmonyToString, 0, 1) \ F(HarmonyToString, 0, 1) \
F(GetTypeFeedbackVector, 1, 1) F(GetTypeFeedbackVector, 1, 1) \
F(GetCallerJSFunction, 0, 1)
#define FOR_EACH_INTRINSIC_JSON(F) \ #define FOR_EACH_INTRINSIC_JSON(F) \
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --noalways-opt --nostress-opt
// Ensure that "real" js functions that call GetCallerJSFunction get an
// exception, since they are not stubs.
(function() {
var a = function() {
return %_GetCallerJSFunction();
}
assertThrows(a);
}());
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --turbo-filter=* --nostress-opt
// Test that for fully optimized but non inlined code, GetCallerJSFunction walks
// up a single stack frame to get the calling function. Full optimization elides
// the check in the runtime version of the intrinsic that would throw since the
// caller isn't a stub. It's a bit of a hack, but allows minimal testing of the
// intrinsic without writing a full-blown cctest.
(function() {
var a = function() {
return %_GetCallerJSFunction();
};
var b = function() {
return a();
};
%OptimizeFunctionOnNextCall(a);
assertEquals(b, b());
}());
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment