Commit 0b4643b7 authored by Kanghua Yu's avatar Kanghua Yu Committed by Commit Bot

[x64] Add conditional jumps with Code targets

Change-Id: I647a1c175fb1a4f6fa764cc68c7b9c94d3b65817
Reviewed-on: https://chromium-review.googlesource.com/1053688
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53175}
parent 0e37130b
......@@ -1127,8 +1127,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
Label stack_overflow;
// Number of values to be pushed.
__ Move(rcx, rax);
__ addp(rcx, Immediate(1)); // Add one for receiver.
__ leal(rcx, Operand(rax, 1)); // Add one for receiver.
// Add a stack check before pushing arguments.
Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
......@@ -1139,7 +1138,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
// Push "undefined" as the receiver arg if we need to.
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
__ PushRoot(Heap::kUndefinedValueRootIndex);
__ subp(rcx, Immediate(1)); // Subtract one for receiver.
__ decl(rcx); // Subtract one for receiver.
}
// rbx and rdx will be modified.
......@@ -1147,7 +1146,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
__ Pop(rbx); // Pass the spread in a register
__ subp(rax, Immediate(1)); // Subtract one for spread
__ decl(rax); // Subtract one for spread
}
// Call the target.
......@@ -1199,7 +1198,7 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
__ Pop(rbx); // Pass the spread in a register
__ subp(rax, Immediate(1)); // Subtract one for spread
__ decl(rax); // Subtract one for spread
// Push return address in preparation for the tail-call.
__ PushReturnAddressFrom(kScratchRegister);
......@@ -2505,37 +2504,29 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// -----------------------------------
StackArgumentsAccessor args(rsp, rax);
// TODO(jgruber): Support conditional jumps (Assembler::j) with Code targets.
Label non_js_function, non_js_bound_function, non_callable, non_function,
non_smi;
Label non_callable;
__ JumpIfSmi(rdi, &non_callable);
__ bind(&non_smi);
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
__ j(not_equal, &non_js_function, Label::kNear);
__ Jump(masm->isolate()->builtins()->CallFunction(mode),
RelocInfo::CODE_TARGET);
RelocInfo::CODE_TARGET, equal);
__ bind(&non_js_function);
__ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
__ j(not_equal, &non_js_bound_function, Label::kNear);
__ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
RelocInfo::CODE_TARGET);
RelocInfo::CODE_TARGET, equal);
// Check if target has a [[Call]] internal method.
__ bind(&non_js_bound_function);
__ testb(FieldOperand(rcx, Map::kBitFieldOffset),
Immediate(Map::IsCallableBit::kMask));
__ j(zero, &non_callable, Label::kNear);
// Check if target is a proxy and call CallProxy external builtin
__ CmpInstanceType(rcx, JS_PROXY_TYPE);
__ j(not_equal, &non_function, Label::kNear);
__ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
__ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET,
equal);
// 2. Call to something else, which might have a [[Call]] internal method (if
// not we raise an exception).
__ bind(&non_function);
// Overwrite the original receiver with the (original) target.
__ movp(args.GetReceiverOperand(), rdi);
// Let the "call_as_function_delegate" take care of the rest.
......@@ -2567,18 +2558,13 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
// rbx to contain either an AllocationSite or undefined.
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
Label call_generic_stub;
// Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
__ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
__ testl(FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
__ j(zero, &call_generic_stub, Label::kNear);
__ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
RelocInfo::CODE_TARGET);
RelocInfo::CODE_TARGET, not_zero);
__ bind(&call_generic_stub);
__ Jump(masm->isolate()->builtins()->JSConstructStubGeneric(),
RelocInfo::CODE_TARGET);
}
......@@ -2622,7 +2608,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
StackArgumentsAccessor args(rsp, rax);
// Check if target is a Smi.
Label non_constructor, non_proxy;
Label non_constructor;
__ JumpIfSmi(rdi, &non_constructor);
// Check if target has a [[Construct]] internal method.
......@@ -2631,32 +2617,23 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
Immediate(Map::IsConstructorBit::kMask));
__ j(zero, &non_constructor);
// TODO(jgruber): Support conditional jumps (Assembler::j) with Code targets.
Label non_js_function, non_js_bound_function;
// Dispatch based on instance type.
__ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
__ j(not_equal, &non_js_function);
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
RelocInfo::CODE_TARGET);
RelocInfo::CODE_TARGET, equal);
// Only dispatch to bound functions after checking whether they are
// constructors.
__ bind(&non_js_function);
__ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
__ j(not_equal, &non_js_bound_function);
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
RelocInfo::CODE_TARGET);
RelocInfo::CODE_TARGET, equal);
// Only dispatch to proxies after checking whether they are constructors.
__ bind(&non_js_bound_function);
__ CmpInstanceType(rcx, JS_PROXY_TYPE);
__ j(not_equal, &non_proxy, Label::kNear);
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
RelocInfo::CODE_TARGET);
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy), RelocInfo::CODE_TARGET,
equal);
// Called Construct on an exotic Object with a [[Construct]] internal method.
__ bind(&non_proxy);
{
// Overwrite the original receiver with the (original) target.
__ movp(args.GetReceiverOperand(), rdi);
......@@ -2973,16 +2950,16 @@ void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
__ shrl(rcx, Immediate(HeapNumber::kExponentShift));
__ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
__ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
__ j(below, &process_64_bits);
__ j(below, &process_64_bits, Label::kNear);
// Result is entirely in lower 32-bits of mantissa
int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
__ subl(rcx, Immediate(delta));
__ xorl(result_reg, result_reg);
__ cmpl(rcx, Immediate(31));
__ j(above, &done);
__ j(above, &done, Label::kNear);
__ shll_cl(scratch1);
__ jmp(&check_negative);
__ jmp(&check_negative, Label::kNear);
__ bind(&process_64_bits);
__ Cvttsd2siq(result_reg, kScratchDoubleReg);
......
......@@ -1430,6 +1430,12 @@ void Assembler::j(Condition cc, Address entry, RelocInfo::Mode rmode) {
void Assembler::j(Condition cc,
Handle<Code> target,
RelocInfo::Mode rmode) {
if (cc == always) {
jmp(target, rmode);
return;
} else if (cc == never) {
return;
}
EnsureSpace ensure_space(this);
DCHECK(is_uint4(cc));
// 0000 1111 1000 tttn #32-bit disp.
......
......@@ -1548,18 +1548,24 @@ void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
jmp(kScratchRegister);
}
void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
// TODO(X64): Inline this
void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
Condition cc) {
// TODO(X64): Inline this
#ifdef V8_EMBEDDED_BUILTINS
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
Label skip;
if (cc != always) {
if (cc == never) return;
j(NegateCondition(cc), &skip, Label::kNear);
}
LookupConstant(kScratchRegister, code_object);
leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
jmp(kScratchRegister);
bind(&skip);
return;
}
#endif // V8_EMBEDDED_BUILTINS
jmp(code_object, rmode);
j(cc, code_object, rmode);
}
void MacroAssembler::JumpToInstructionStream(Address entry) {
......
......@@ -765,7 +765,8 @@ class MacroAssembler : public TurboAssembler {
void Jump(Address destination, RelocInfo::Mode rmode);
void Jump(ExternalReference ext);
void Jump(Operand op);
void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
void Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
Condition cc = always);
// Generates a trampoline to jump to the off-heap instruction stream.
void JumpToInstructionStream(Address entry);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment