Commit 5dcd32be authored by whesse@chromium.org's avatar whesse@chromium.org

X64 Crankshaft: Add inline one-element cache for Instanceof.

Review URL: http://codereview.chromium.org/6621071

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7109 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 012db516
......@@ -3657,20 +3657,39 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
void InstanceofStub::Generate(MacroAssembler* masm) {
// Implements "value instanceof function" operator.
// Expected input state:
// Expected input state with no inline cache:
// rsp[0] : return address
// rsp[1] : function pointer
// rsp[2] : value
// Expected input state with an inline one-element cache:
// rsp[0] : return address
// rsp[1] : offset from return address to location of inline cache
// rsp[2] : function pointer
// rsp[3] : value
// Returns a bitwise zero to indicate that the value
// is and instance of the function and anything else to
// indicate that the value is not an instance.
// None of the flags are supported on X64.
ASSERT(flags_ == kNoFlags);
static const int kOffsetToMapCheckValue = 5;
static const int kOffsetToResultValue = 21;
// The last 4 bytes of the instruction sequence
// movq(rax, FieldOperand(rdi, HeapObject::kMapOffset)
// Move(kScratchRegister, Factory::the_hole_value)
// in front of the hole value address.
static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
// The last 4 bytes of the instruction sequence
// __ j(not_equal, &cache_miss);
// __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
// before the offset of the hole value in the root array.
static const unsigned int kWordBeforeResultValue = 0x458B4909;
// Only the inline check flag is supported on X64.
ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0;
// Get the object - go slow case if it's a smi.
Label slow;
__ movq(rax, Operand(rsp, 2 * kPointerSize));
__ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space));
__ JumpIfSmi(rax, &slow);
// Check that the left hand is a JS object. Leave its map in rax.
......@@ -3680,19 +3699,23 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ j(above, &slow);
// Get the prototype of the function.
__ movq(rdx, Operand(rsp, 1 * kPointerSize));
__ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space));
// rdx is function, rax is map.
// Look up the function and the map in the instanceof cache.
NearLabel miss;
__ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
__ j(not_equal, &miss);
__ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
__ j(not_equal, &miss);
__ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
__ ret(2 * kPointerSize);
// If there is a call site cache don't look in the global cache, but do the
// real lookup and update the call site cache.
if (!HasCallSiteInlineCheck()) {
// Look up the function and the map in the instanceof cache.
NearLabel miss;
__ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
__ j(not_equal, &miss);
__ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
__ j(not_equal, &miss);
__ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
__ ret(2 * kPointerSize);
__ bind(&miss);
}
__ bind(&miss);
__ TryGetFunctionPrototype(rdx, rbx, &slow);
// Check that the function prototype is a JS object.
......@@ -3706,8 +3729,19 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// rax is object map.
// rdx is function.
// rbx is function prototype.
__ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
if (!HasCallSiteInlineCheck()) {
__ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
} else {
__ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
__ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
__ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
if (FLAG_debug_code) {
__ movl(rdi, Immediate(kWordBeforeMapCheckValue));
__ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
__ Assert(equal, "InstanceofStub unexpected call site cache.");
}
}
__ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
......@@ -3726,19 +3760,56 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ jmp(&loop);
__ bind(&is_instance);
__ xorl(rax, rax);
// Store bitwise zero in the cache. This is a Smi in GC terms.
STATIC_ASSERT(kSmiTag == 0);
__ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
__ ret(2 * kPointerSize);
if (!HasCallSiteInlineCheck()) {
__ xorl(rax, rax);
// Store bitwise zero in the cache. This is a Smi in GC terms.
STATIC_ASSERT(kSmiTag == 0);
__ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
} else {
// Store offset of true in the root array at the inline check site.
ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias
== 0xB0 - 0x100);
__ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize.
__ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
__ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
__ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
__ Assert(equal, "InstanceofStub unexpected call site cache.");
}
__ xorl(rax, rax);
}
__ ret(2 * kPointerSize + extra_stack_space);
__ bind(&is_not_instance);
// We have to store a non-zero value in the cache.
__ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
__ ret(2 * kPointerSize);
if (!HasCallSiteInlineCheck()) {
// We have to store a non-zero value in the cache.
__ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
} else {
// Store offset of false in the root array at the inline check site.
ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias
== 0xB8 - 0x100);
__ movl(rax, Immediate(0xB8)); // FalseValue is at -9 * kPointerSize.
__ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
__ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
__ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
__ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
}
}
__ ret(2 * kPointerSize + extra_stack_space);
// Slow-case: Go through the JavaScript implementation.
__ bind(&slow);
if (HasCallSiteInlineCheck()) {
// Remove extra value from the stack.
__ pop(rcx);
__ pop(rax);
__ push(rcx);
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
}
......
......@@ -1823,24 +1823,50 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
virtual void Generate() {
codegen()->DoDeferredLInstanceOfKnownGlobal(instr_);
codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
}
Label* map_check() { return &map_check_; }
private:
LInstanceOfKnownGlobal* instr_;
Label map_check_;
};
DeferredInstanceOfKnownGlobal* deferred;
deferred = new DeferredInstanceOfKnownGlobal(this, instr);
Label false_result;
Label done, false_result;
Register object = ToRegister(instr->InputAt(0));
// A Smi is not an instance of anything.
__ JumpIfSmi(object, &false_result);
// Null is not an instance of anything.
// This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
NearLabel cache_miss;
// Use a temp register to avoid memory operands with variable lengths.
Register map = ToRegister(instr->TempAt(0));
__ movq(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
__ Move(kScratchRegister, Factory::the_hole_value());
__ cmpq(map, kScratchRegister); // Patched to cached map.
__ j(not_equal, &cache_miss);
// Patched to load either true or false.
__ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
#ifdef DEBUG
// Check that the code size between patch label and patch sites is invariant.
Label end_of_patched_code;
__ bind(&end_of_patched_code);
ASSERT(true);
#endif
__ jmp(&done);
// The inlined call site cache did not match. Check for null and string
// before calling the deferred code.
__ bind(&cache_miss); // Null is not an instance of anything.
__ CompareRoot(object, Heap::kNullValueRootIndex);
__ j(equal, &false_result);
......@@ -1851,17 +1877,26 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
__ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
__ bind(deferred->exit());
__ bind(&done);
}
void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check) {
__ PushSafepointRegisters();
InstanceofStub stub(InstanceofStub::kNoFlags);
InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
InstanceofStub stub(flags);
__ push(ToRegister(instr->InputAt(0)));
__ Push(instr->function());
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Register temp = ToRegister(instr->TempAt(0));
ASSERT(temp.is(rdi));
static const int kAdditionalDelta = 16;
int delta =
masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
__ movq(temp, Immediate(delta));
__ push(temp);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ movq(kScratchRegister, rax);
__ PopSafepointRegisters();
......
......@@ -92,7 +92,8 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LGoto* instr);
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr);
void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
......
......@@ -1156,7 +1156,8 @@ LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
new LInstanceOfKnownGlobal(UseFixed(instr->value(), rax));
new LInstanceOfKnownGlobal(UseFixed(instr->value(), rax),
FixedTemp(rdi));
return MarkAsCall(DefineFixed(result, rax), instr);
}
......
......@@ -844,10 +844,11 @@ class LInstanceOfAndBranch: public LControlInstruction<2, 0> {
};
class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 0> {
class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> {
public:
explicit LInstanceOfKnownGlobal(LOperand* value) {
LInstanceOfKnownGlobal(LOperand* value, LOperand* temp) {
inputs_[0] = value;
temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment