Commit 74f9d8c9 authored by danno's avatar danno Committed by Commit bot

Add %GetCallerJSFunction intrinsic

Only optimized for TF

R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/1146963002

Cr-Commit-Position: refs/heads/master@{#28812}
parent ca2f8d81
......@@ -197,6 +197,20 @@ FieldAccess AccessBuilder::ForSharedFunctionInfoTypeFeedbackVector() {
Handle<Name>(), Type::Any(), kMachAnyTagged};
}
// static
FieldAccess AccessBuilder::ForFrameCallerFramePtr() {
return {kUntaggedBase, StandardFrameConstants::kCallerFPOffset,
MaybeHandle<Name>(), Type::Internal(), kMachPtr};
}
// static
FieldAccess AccessBuilder::ForFrameMarker() {
return {kUntaggedBase, StandardFrameConstants::kMarkerOffset,
MaybeHandle<Name>(), Type::Tagged(), kMachAnyTagged};
}
} // namespace compiler
} // namespace internal
} // namespace v8
......@@ -83,6 +83,12 @@ class AccessBuilder final : public AllStatic {
// Provides access to the TypeFeedbackVector in SharedFunctionInfo.
static FieldAccess ForSharedFunctionInfoTypeFeedbackVector();
// Provides access to the next frame pointer in a stack frame.
static FieldAccess ForFrameCallerFramePtr();
// Provides access to the marker in a stack frame.
static FieldAccess ForFrameMarker();
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(AccessBuilder);
};
......
......@@ -403,6 +403,10 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ mov(i.OutputRegister(), sp);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchFramePointer:
__ mov(i.OutputRegister(), fp);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputFloat64Register(0));
DCHECK_EQ(LeaveCC, i.OutputSBit());
......
......@@ -443,6 +443,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer:
__ mov(i.OutputRegister(), masm()->StackPointer());
break;
case kArchFramePointer:
__ mov(i.OutputRegister(), fp);
break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
break;
......
......@@ -376,6 +376,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer:
__ mov(i.OutputRegister(), esp);
break;
case kArchFramePointer:
__ mov(i.OutputRegister(), ebp);
break;
case kArchTruncateDoubleToI: {
auto result = i.OutputRegister();
auto input = i.InputDoubleRegister(0);
......
......@@ -45,6 +45,7 @@ namespace compiler {
V(ArchDeoptimize) \
V(ArchRet) \
V(ArchStackPointer) \
V(ArchFramePointer) \
V(ArchTruncateDoubleToI) \
V(CheckedLoadInt8) \
V(CheckedLoadUint8) \
......
......@@ -761,6 +761,8 @@ void InstructionSelector::VisitNode(Node* node) {
return MarkAsFloat64(node), VisitFloat64InsertHighWord32(node);
case IrOpcode::kLoadStackPointer:
return VisitLoadStackPointer(node);
case IrOpcode::kLoadFramePointer:
return VisitLoadFramePointer(node);
case IrOpcode::kCheckedLoad: {
MachineType rep = OpParameter<MachineType>(node);
MarkAsRepresentation(rep, node);
......@@ -791,6 +793,12 @@ void InstructionSelector::VisitLoadStackPointer(Node* node) {
}
void InstructionSelector::VisitLoadFramePointer(Node* node) {
OperandGenerator g(this);
Emit(kArchFramePointer, g.DefineAsRegister(node));
}
void InstructionSelector::EmitTableSwitch(const SwitchInfo& sw,
InstructionOperand& index_operand) {
OperandGenerator g(this);
......
......@@ -84,6 +84,8 @@ Reduction JSIntrinsicLowering::Reduce(Node* node) {
return ReduceFixedArraySet(node);
case Runtime::kInlineGetTypeFeedbackVector:
return ReduceGetTypeFeedbackVector(node);
case Runtime::kInlineGetCallerJSFunction:
return ReduceGetCallerJSFunction(node);
default:
break;
}
......@@ -455,6 +457,31 @@ Reduction JSIntrinsicLowering::ReduceGetTypeFeedbackVector(Node* node) {
}
Reduction JSIntrinsicLowering::ReduceGetCallerJSFunction(Node* node) {
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* const frame_state = NodeProperties::GetFrameStateInput(node, 0);
Node* outer_frame = frame_state->InputAt(kFrameStateOuterStateInput);
if (outer_frame->opcode() == IrOpcode::kFrameState) {
// Use the runtime implementation to throw the appropriate error if the
// containing function is inlined.
return NoChange();
}
// TODO(danno): This implementation forces intrinsic lowering to happen after
// inlining, which is fine for now, but eventually the frame-querying logic
// probably should go later, e.g. in instruction selection, so that there is
// no phase-ordering dependency.
FieldAccess access = AccessBuilder::ForFrameCallerFramePtr();
Node* fp = graph()->NewNode(machine()->LoadFramePointer());
Node* next_fp =
graph()->NewNode(simplified()->LoadField(access), fp, effect, control);
return Change(node, simplified()->LoadField(AccessBuilder::ForFrameMarker()),
next_fp, effect, control);
}
Reduction JSIntrinsicLowering::Change(Node* node, const Operator* op, Node* a,
Node* b) {
node->set_op(op);
......
......@@ -53,6 +53,7 @@ class JSIntrinsicLowering final : public AdvancedReducer {
Reduction ReduceValueOf(Node* node);
Reduction ReduceFixedArraySet(Node* node);
Reduction ReduceGetTypeFeedbackVector(Node* node);
Reduction ReduceGetCallerJSFunction(Node* node);
Reduction Change(Node* node, const Operator* op);
Reduction Change(Node* node, const Operator* op, Node* a, Node* b);
......
......@@ -134,6 +134,7 @@ bool Linkage::NeedsFrameState(Runtime::FunctionId function) {
case Runtime::kInlineCallFunction:
case Runtime::kInlineDateField: // TODO(bmeurer): Remove this.
case Runtime::kInlineDeoptimizeNow:
case Runtime::kInlineGetCallerJSFunction:
case Runtime::kInlineGetPrototype:
case Runtime::kInlineRegExpExec:
case Runtime::kInlineThrowIfNotADate:
......
......@@ -148,7 +148,8 @@ CheckedStoreRepresentation CheckedStoreRepresentationOf(Operator const* op) {
V(Float32Min, Operator::kNoProperties, 2, 0, 1) \
V(Float64Max, Operator::kNoProperties, 2, 0, 1) \
V(Float64Min, Operator::kNoProperties, 2, 0, 1) \
V(LoadStackPointer, Operator::kNoProperties, 0, 0, 1)
V(LoadStackPointer, Operator::kNoProperties, 0, 0, 1) \
V(LoadFramePointer, Operator::kNoProperties, 0, 0, 1)
#define MACHINE_TYPE_LIST(V) \
......
......@@ -225,6 +225,7 @@ class MachineOperatorBuilder final : public ZoneObject {
// Access to the machine stack.
const Operator* LoadStackPointer();
const Operator* LoadFramePointer();
// checked-load heap, index, length
const Operator* CheckedLoad(CheckedLoadRepresentation);
......
......@@ -487,6 +487,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer:
__ mov(i.OutputRegister(), sp);
break;
case kArchFramePointer:
__ mov(i.OutputRegister(), fp);
break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
break;
......
......@@ -487,6 +487,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer:
__ mov(i.OutputRegister(), sp);
break;
case kArchFramePointer:
__ mov(i.OutputRegister(), fp);
break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
break;
......
......@@ -280,6 +280,7 @@
V(Float64InsertLowWord32) \
V(Float64InsertHighWord32) \
V(LoadStackPointer) \
V(LoadFramePointer) \
V(CheckedLoad) \
V(CheckedStore)
......
......@@ -687,6 +687,10 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ mr(i.OutputRegister(), sp);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
case kArchFramePointer:
__ mr(i.OutputRegister(), fp);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
case kArchTruncateDoubleToI:
// TODO(mbrandy): move slow call to stub out of line.
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
......
......@@ -441,6 +441,7 @@ class RawMachineAssembler : public GraphBuilder {
// Stack operations.
Node* LoadStackPointer() { return NewNode(machine()->LoadStackPointer()); }
Node* LoadFramePointer() { return NewNode(machine()->LoadFramePointer()); }
// Parameters.
Node* Parameter(size_t index);
......
......@@ -1038,6 +1038,7 @@ class RepresentationSelector {
case IrOpcode::kFloat64InsertHighWord32:
return VisitBinop(node, kMachFloat64, kMachInt32, kMachFloat64);
case IrOpcode::kLoadStackPointer:
case IrOpcode::kLoadFramePointer:
return VisitLeaf(node, kMachPtr);
case IrOpcode::kStateValues:
VisitStateValues(node);
......
......@@ -2312,6 +2312,11 @@ Bounds Typer::Visitor::TypeLoadStackPointer(Node* node) {
}
Bounds Typer::Visitor::TypeLoadFramePointer(Node* node) {
return Bounds(Type::Internal());
}
Bounds Typer::Visitor::TypeCheckedLoad(Node* node) {
return Bounds::Unbounded(zone());
}
......
......@@ -867,6 +867,7 @@ void Verifier::Visitor::Check(Node* node) {
case IrOpcode::kFloat64InsertLowWord32:
case IrOpcode::kFloat64InsertHighWord32:
case IrOpcode::kLoadStackPointer:
case IrOpcode::kLoadFramePointer:
case IrOpcode::kCheckedLoad:
case IrOpcode::kCheckedStore:
// TODO(rossberg): Check.
......
......@@ -626,6 +626,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchStackPointer:
__ movq(i.OutputRegister(), rsp);
break;
case kArchFramePointer:
__ movq(i.OutputRegister(), rbp);
break;
case kArchTruncateDoubleToI: {
auto result = i.OutputRegister();
auto input = i.InputDoubleRegister(0);
......
......@@ -416,5 +416,15 @@ RUNTIME_FUNCTION(Runtime_GetTypeFeedbackVector) {
CONVERT_ARG_CHECKED(JSFunction, function, 0);
return function->shared()->feedback_vector();
}
RUNTIME_FUNCTION(Runtime_GetCallerJSFunction) {
SealHandleScope shs(isolate);
StackFrameIterator it(isolate);
RUNTIME_ASSERT(it.frame()->type() == StackFrame::STUB);
it.Advance();
RUNTIME_ASSERT(it.frame()->type() == StackFrame::JAVA_SCRIPT);
return JavaScriptFrame::cast(it.frame())->function();
}
} // namespace internal
} // namespace v8
......@@ -325,7 +325,8 @@ namespace internal {
F(Likely, 1, 1) \
F(Unlikely, 1, 1) \
F(HarmonyToString, 0, 1) \
F(GetTypeFeedbackVector, 1, 1)
F(GetTypeFeedbackVector, 1, 1) \
F(GetCallerJSFunction, 0, 1)
#define FOR_EACH_INTRINSIC_JSON(F) \
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --noalways-opt --nostress-opt
// Ensure that "real" js functions that call GetCallerJSFunction get an
// exception, since they are not stubs.
(function() {
var a = function() {
return %_GetCallerJSFunction();
}
assertThrows(a);
}());
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --turbo-filter=* --nostress-opt
// Test that for fully optimized but non inlined code, GetCallerJSFunction walks
// up a single stack frame to get the calling function. Full optimization elides
// the check in the runtime version of the intrinsic that would throw since the
// caller isn't a stub. It's a bit of a hack, but allows minimal testing of the
// intrinsic without writing a full-blown cctest.
(function() {
var a = function() {
return %_GetCallerJSFunction();
};
var b = function() {
return a();
};
%OptimizeFunctionOnNextCall(a);
assertEquals(b, b());
}());
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment