Commit 525b72a4 authored by lrn@chromium.org's avatar lrn@chromium.org

X64: Implemented InvokeFunction

Review URL: http://codereview.chromium.org/122030


git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2142 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 594a30ef
......@@ -363,7 +363,7 @@ void RelocIterator::next() {
if (SetMode(DebugInfoModeFromTag(top_tag))) return;
} else {
// Otherwise, just skip over the data.
Advance(kIntSize);
Advance(kIntptrSize);
}
} else {
AdvanceReadPC();
......
......@@ -73,7 +73,7 @@ XMMRegister xmm14 = { 14 };
XMMRegister xmm15 = { 15 };
Operand::Operand(Register base, int32_t disp) {
Operand::Operand(Register base, int32_t disp): rex_(0) {
len_ = 1;
if (base.is(rsp) || base.is(r12)) {
// SIB byte is needed to encode (rsp + offset) or (r12 + offset).
......@@ -95,7 +95,7 @@ Operand::Operand(Register base, int32_t disp) {
Operand::Operand(Register base,
Register index,
ScaleFactor scale,
int32_t disp) {
int32_t disp): rex_(0) {
ASSERT(!index.is(rsp) && !index.is(r12));
len_ = 1;
set_sib(scale, index, base);
......
......@@ -428,7 +428,6 @@ class Assembler : public Malloced {
// Load a 32-bit immediate value, zero-extended to 64 bits.
void movl(Register dst, Immediate imm32);
void movq(Register dst, int32_t imm32);
void movq(Register dst, const Operand& src);
// Sign extends immediate 32-bit value to 64 bits.
void movq(Register dst, Immediate x);
......
......@@ -39,10 +39,126 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
masm->int3(); // UNIMPLEMENTED.
}
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ push(rbp);
__ movq(rbp, rsp);
// Store the arguments adaptor context sentinel.
__ push(Immediate(ArgumentsAdaptorFrame::SENTINEL));
// Push the function on the stack.
__ push(rdi);
// Preserve the number of arguments on the stack. Must preserve both
// eax and ebx because these registers are used when copying the
// arguments and the receiver.
ASSERT(kSmiTagSize == 1);
__ lea(rcx, Operand(rax, rax, kTimes1, kSmiTag));
__ push(rcx);
}
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
// Retrieve the number of arguments from the stack. Number is a Smi.
__ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
// Leave the frame.
__ movq(rsp, rbp);
__ pop(rbp);
// Remove caller arguments from the stack.
// rbx holds a Smi, so we convery to dword offset by multiplying by 4.
ASSERT_EQ(kSmiTagSize, 1 && kSmiTag == 0);
ASSERT_EQ(kPointerSize, (1 << kSmiTagSize) * 4);
__ pop(rcx);
__ lea(rsp, Operand(rsp, rbx, kTimes4, 1 * kPointerSize)); // 1 ~ receiver
__ push(rcx);
}
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
// ----------- S t a t e -------------
// -- rax : actual number of arguments
// -- rbx : expected number of arguments
// -- rdx : code entry to call
// -----------------------------------
Label invoke, dont_adapt_arguments;
__ IncrementCounter(&Counters::arguments_adaptors, 1);
Label enough, too_few;
__ cmpq(rax, rbx);
__ j(less, &too_few);
__ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
__ j(equal, &dont_adapt_arguments);
{ // Enough parameters: Actual >= expected.
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
// Copy receiver and all expected arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
__ lea(rax, Operand(rbp, rax, kTimesPointerSize, offset));
__ movq(rcx, Immediate(-1)); // account for receiver
Label copy;
__ bind(&copy);
__ incq(rcx);
__ push(Operand(rax, 0));
__ subq(rax, Immediate(kPointerSize));
__ cmpq(rcx, rbx);
__ j(less, &copy);
__ jmp(&invoke);
}
{ // Too few parameters: Actual < expected.
__ bind(&too_few);
EnterArgumentsAdaptorFrame(masm);
// Copy receiver and all actual arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
__ lea(rdi, Operand(rbp, rax, kTimesPointerSize, offset));
__ movq(rcx, Immediate(-1)); // account for receiver
Label copy;
__ bind(&copy);
__ incq(rcx);
__ push(Operand(rdi, 0));
__ subq(rdi, Immediate(kPointerSize));
__ cmpq(rcx, rax);
__ j(less, &copy);
// Fill remaining expected arguments with undefined values.
Label fill;
__ movq(kScratchRegister,
Factory::undefined_value(),
RelocInfo::EMBEDDED_OBJECT);
__ bind(&fill);
__ incq(rcx);
__ push(kScratchRegister);
__ cmpq(rcx, rbx);
__ j(less, &fill);
// Restore function pointer.
__ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
}
// Call the entry point.
__ bind(&invoke);
__ call(rdx);
// Leave frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ ret(0);
// -------------------------------------------
// Dont adapt arguments.
// -------------------------------------------
__ bind(&dont_adapt_arguments);
__ jmp(rdx);
}
void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
}
......@@ -82,7 +198,6 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Enter an internal frame.
__ EnterInternalFrame();
// Load the function context into rsi.
__ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
......@@ -155,6 +270,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ call(kScratchRegister);
} else {
ParameterCount actual(rax);
// Function must be in rdi.
__ InvokeFunction(rdi, actual, CALL_FUNCTION);
}
......
......@@ -102,13 +102,65 @@ void MacroAssembler::Abort(const char* msg) {
}
void MacroAssembler::CallRuntime(Runtime::FunctionId id, int argc) {
UNIMPLEMENTED();
void MacroAssembler::CallStub(CodeStub* stub) {
ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
movq(kScratchRegister, stub->GetCode(), RelocInfo::CODE_TARGET);
call(kScratchRegister);
}
void MacroAssembler::StubReturn(int argc) {
ASSERT(argc >= 1 && generating_stub());
ret((argc - 1) * kPointerSize);
}
void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) {
addq(rsp, Immediate(num_arguments * kPointerSize));
}
movq(rax, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT);
}
void MacroAssembler::TailCallRuntime(ExternalReference const& a, int b) {
UNIMPLEMENTED();
void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
CallRuntime(Runtime::FunctionForId(id), num_arguments);
}
void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
// If the expected number of arguments of the runtime function is
// constant, we check that the actual number of arguments match the
// expectation.
if (f->nargs >= 0 && f->nargs != num_arguments) {
IllegalOperation(num_arguments);
return;
}
Runtime::FunctionId function_id =
static_cast<Runtime::FunctionId>(f->stub_id);
RuntimeStub stub(function_id, num_arguments);
CallStub(&stub);
}
void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
int num_arguments) {
// TODO(1236192): Most runtime routines don't need the number of
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
movq(rax, Immediate(num_arguments));
JumpToBuiltin(ext);
}
void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
// Set the entry point and jump to the C entry runtime stub.
movq(rbx, ext);
CEntryStub ces;
movq(kScratchRegister, ces.GetCode(), RelocInfo::CODE_TARGET);
jmp(kScratchRegister);
}
......@@ -325,12 +377,121 @@ void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
#endif // ENABLE_DEBUGGER_SUPPORT
void MacroAssembler::InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
Handle<Code> code_constant,
Register code_register,
Label* done,
InvokeFlag flag) {
bool definitely_matches = false;
Label invoke;
if (expected.is_immediate()) {
ASSERT(actual.is_immediate());
if (expected.immediate() == actual.immediate()) {
definitely_matches = true;
} else {
movq(rax, Immediate(actual.immediate()));
if (expected.immediate() ==
SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
// Don't worry about adapting arguments for built-ins that
// don't want that done. Skip adaption code by making it look
// like we have a match between expected and actual number of
// arguments.
definitely_matches = true;
} else {
movq(rbx, Immediate(expected.immediate()));
}
}
} else {
if (actual.is_immediate()) {
// Expected is in register, actual is immediate. This is the
// case when we invoke function values without going through the
// IC mechanism.
cmpq(expected.reg(), Immediate(actual.immediate()));
j(equal, &invoke);
ASSERT(expected.reg().is(rbx));
movq(rax, Immediate(actual.immediate()));
} else if (!expected.reg().is(actual.reg())) {
// Both expected and actual are in (different) registers. This
// is the case when we invoke functions using call and apply.
cmpq(expected.reg(), actual.reg());
j(equal, &invoke);
ASSERT(actual.reg().is(rax));
ASSERT(expected.reg().is(rbx));
}
}
if (!definitely_matches) {
Handle<Code> adaptor =
Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
if (!code_constant.is_null()) {
movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
} else if (!code_register.is(rdx)) {
movq(rdx, code_register);
}
movq(kScratchRegister, adaptor, RelocInfo::CODE_TARGET);
if (flag == CALL_FUNCTION) {
call(kScratchRegister);
jmp(done);
} else {
jmp(kScratchRegister);
}
bind(&invoke);
}
}
void MacroAssembler::InvokeCode(Register code,
const ParameterCount& expected,
const ParameterCount& actual,
InvokeFlag flag) {
Label done;
InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
if (flag == CALL_FUNCTION) {
call(code);
} else {
ASSERT(flag == JUMP_FUNCTION);
jmp(code);
}
bind(&done);
}
void MacroAssembler::InvokeCode(Handle<Code> code,
const ParameterCount& expected,
const ParameterCount& actual,
RelocInfo::Mode rmode,
InvokeFlag flag) {
Label done;
Register dummy = rax;
InvokePrologue(expected, actual, code, dummy, &done, flag);
movq(kScratchRegister, code, rmode);
if (flag == CALL_FUNCTION) {
call(kScratchRegister);
} else {
ASSERT(flag == JUMP_FUNCTION);
jmp(kScratchRegister);
}
bind(&done);
}
void MacroAssembler::InvokeFunction(Register fun,
void MacroAssembler::InvokeFunction(Register function,
const ParameterCount& actual,
InvokeFlag flag) {
UNIMPLEMENTED();
ASSERT(function.is(rdi));
movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
movq(rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
ParameterCount expected(rbx);
InvokeCode(rdx, expected, actual, flag);
}
......
......@@ -127,7 +127,7 @@ class MacroAssembler: public Assembler {
// JavaScript invokes
// Invoke the JavaScript function code by either calling or jumping.
void InvokeCode(const Operand& code,
void InvokeCode(Register code,
const ParameterCount& expected,
const ParameterCount& actual,
InvokeFlag flag);
......@@ -310,13 +310,13 @@ class MacroAssembler: public Assembler {
bool generating_stub_;
bool allow_stub_calls_;
Handle<Object> code_object_; // This handle will be patched with the code
// code object on installation.
// object on installation.
// Helper functions for generating invokes.
void InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
Handle<Code> code_constant,
const Operand& code_operand,
Register code_register,
Label* done,
InvokeFlag flag);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment