Convert KeyedLoad indexed interceptor case to a Handler.

Currently, KeyedLoads on objects with indexed interceptors are handled with a
special stub. Instead, key on the map and handler mechanism for more uniform
treatment.

R=verwaest@chromium.org

Review URL: https://codereview.chromium.org/575373004

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@24042 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 9505d5b5
......@@ -1785,6 +1785,32 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
}
void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
// Return address is in lr.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
// Check that the key is an array index, that is Uint32.
__ NonNegativeSmiTst(key);
__ b(ne, &slow);
// Everything is fine, call runtime.
__ Push(receiver, key); // Receiver, key.
// Perform tail call to the entry.
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kLoadElementWithInterceptor),
masm->isolate()),
2, 1);
__ bind(&slow);
PropertyAccessCompiler::TailCallBuiltin(
masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
}
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// sp[0] : number of parameters
// sp[4] : receiver displacement
......
......@@ -1961,6 +1961,29 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
}
void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
// Return address is in lr.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
// Check that the key is an array index, that is Uint32.
__ TestAndBranchIfAnySet(key, kSmiTagMask | kSmiSignMask, &slow);
// Everything is fine, call runtime.
__ Push(receiver, key);
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kLoadElementWithInterceptor),
masm->isolate()),
2, 1);
__ Bind(&slow);
PropertyAccessCompiler::TailCallBuiltin(
masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
}
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Stack layout on entry.
// jssp[0]: number of parameters (tagged)
......
......@@ -1289,11 +1289,6 @@ static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
}
static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
KeyedLoadIC::GenerateIndexedInterceptor(masm);
}
static void Generate_KeyedLoadIC_SloppyArguments(MacroAssembler* masm) {
KeyedLoadIC::GenerateSloppyArguments(masm);
}
......
......@@ -89,8 +89,6 @@ enum BuiltinExtraArguments {
kNoExtraICState) \
V(KeyedLoadIC_Generic, KEYED_LOAD_IC, GENERIC, kNoExtraICState) \
V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC, kNoExtraICState) \
V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MONOMORPHIC, \
kNoExtraICState) \
V(KeyedLoadIC_SloppyArguments, KEYED_LOAD_IC, MONOMORPHIC, kNoExtraICState) \
\
V(StoreIC_Setter_ForDeopt, STORE_IC, MONOMORPHIC, StoreIC::kStrictModeState) \
......
......@@ -38,6 +38,7 @@ namespace internal {
V(JSEntry) \
V(KeyedLoadICTrampoline) \
V(LoadICTrampoline) \
V(LoadIndexedInterceptor) \
V(MathPow) \
V(ProfileEntryHook) \
V(RecordWrite) \
......@@ -857,6 +858,20 @@ class FunctionPrototypeStub : public PlatformCodeStub {
};
// TODO(mvstanton): Translate to hydrogen code stub.
class LoadIndexedInterceptorStub : public PlatformCodeStub {
public:
explicit LoadIndexedInterceptorStub(Isolate* isolate)
: PlatformCodeStub(isolate) {}
virtual Code::Kind GetCodeKind() const { return Code::HANDLER; }
virtual Code::StubType GetStubType() { return Code::FAST; }
DEFINE_CALL_INTERFACE_DESCRIPTOR(Load);
DEFINE_PLATFORM_CODE_STUB(LoadIndexedInterceptor, PlatformCodeStub);
};
class HandlerStub : public HydrogenCodeStub {
public:
virtual Code::Kind GetCodeKind() const { return Code::HANDLER; }
......
......@@ -659,6 +659,36 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
}
void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
// Return address is on the stack.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
Register scratch = eax;
DCHECK(!scratch.is(receiver) && !scratch.is(key));
// Check that the key is an array index, that is Uint32.
__ test(key, Immediate(kSmiTagMask | kSmiSignMask));
__ j(not_zero, &slow);
// Everything is fine, call runtime.
__ pop(scratch);
__ push(receiver); // receiver
__ push(key); // key
__ push(scratch); // return address
// Perform tail call to the entry.
ExternalReference ref = ExternalReference(
IC_Utility(IC::kLoadElementWithInterceptor), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
__ bind(&slow);
PropertyAccessCompiler::TailCallBuiltin(
masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
}
void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// The key is in edx and the parameter count is in eax.
DCHECK(edx.is(ArgumentsAccessReadDescriptor::index()));
......
......@@ -639,48 +639,6 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
}
void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
// Return address is in lr.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
Register scratch1 = r3;
Register scratch2 = r4;
DCHECK(!scratch1.is(receiver) && !scratch1.is(key));
DCHECK(!scratch2.is(receiver) && !scratch2.is(key));
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &slow);
// Check that the key is an array index, that is Uint32.
__ NonNegativeSmiTst(key);
__ b(ne, &slow);
// Get the map of the receiver.
__ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
// Check that it has indexed interceptor and access checks
// are not enabled for this object.
__ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
__ and_(scratch2, scratch2, Operand(kSlowCaseBitFieldMask));
__ cmp(scratch2, Operand(1 << Map::kHasIndexedInterceptor));
__ b(ne, &slow);
// Everything is fine, call runtime.
__ Push(receiver, key); // Receiver, key.
// Perform tail call to the entry.
__ TailCallExternalReference(
ExternalReference(IC_Utility(kLoadElementWithInterceptor),
masm->isolate()),
2, 1);
__ bind(&slow);
GenerateMiss(masm);
}
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
// Push receiver, key and value for runtime call.
__ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
......
......@@ -682,46 +682,6 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
}
void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
// Return address is in lr.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
Register scratch1 = x3;
Register scratch2 = x4;
DCHECK(!AreAliased(scratch1, scratch2, receiver, key));
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &slow);
// Check that the key is an array index, that is Uint32.
__ TestAndBranchIfAnySet(key, kSmiTagMask | kSmiSignMask, &slow);
// Get the map of the receiver.
Register map = scratch1;
__ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
// Check that it has indexed interceptor and access checks
// are not enabled for this object.
__ Ldrb(scratch2, FieldMemOperand(map, Map::kBitFieldOffset));
DCHECK(kSlowCaseBitFieldMask == ((1 << Map::kIsAccessCheckNeeded) |
(1 << Map::kHasIndexedInterceptor)));
__ Tbnz(scratch2, Map::kIsAccessCheckNeeded, &slow);
__ Tbz(scratch2, Map::kHasIndexedInterceptor, &slow);
// Everything is fine, call runtime.
__ Push(receiver, key);
__ TailCallExternalReference(
ExternalReference(IC_Utility(kLoadElementWithInterceptor),
masm->isolate()),
2, 1);
__ Bind(&slow);
GenerateMiss(masm);
}
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
ASM_LOCATION("KeyedStoreIC::GenerateMiss");
......
......@@ -388,10 +388,11 @@ void ElementHandlerCompiler::CompileElementHandlers(
} else {
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
ElementsKind elements_kind = receiver_map->elements_kind();
if (IsFastElementsKind(elements_kind) ||
IsExternalArrayElementsKind(elements_kind) ||
IsFixedTypedArrayElementsKind(elements_kind)) {
if (receiver_map->has_indexed_interceptor()) {
cached_stub = LoadIndexedInterceptorStub(isolate()).GetCode();
} else if (IsFastElementsKind(elements_kind) ||
IsExternalArrayElementsKind(elements_kind) ||
IsFixedTypedArrayElementsKind(elements_kind)) {
cached_stub = LoadFastElementStub(isolate(), is_js_array, elements_kind)
.GetCode();
} else if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
......
......@@ -503,48 +503,6 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
}
void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
// Return address is on the stack.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
Register scratch = eax;
DCHECK(!scratch.is(receiver) && !scratch.is(key));
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &slow);
// Check that the key is an array index, that is Uint32.
__ test(key, Immediate(kSmiTagMask | kSmiSignMask));
__ j(not_zero, &slow);
// Get the map of the receiver.
__ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
// Check that it has indexed interceptor and access checks
// are not enabled for this object.
__ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
__ and_(scratch, Immediate(kSlowCaseBitFieldMask));
__ cmp(scratch, Immediate(1 << Map::kHasIndexedInterceptor));
__ j(not_zero, &slow);
// Everything is fine, call runtime.
__ pop(scratch);
__ push(receiver); // receiver
__ push(key); // key
__ push(scratch); // return address
// Perform tail call to the entry.
ExternalReference ref = ExternalReference(
IC_Utility(kLoadElementWithInterceptor), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
__ bind(&slow);
GenerateMiss(masm);
}
void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) {
// The return address is on the stack.
Register receiver = LoadDescriptor::ReceiverRegister();
......
......@@ -94,9 +94,11 @@ Handle<Code> PropertyICCompiler::ComputeKeyedLoadMonomorphic(
ElementsKind elements_kind = receiver_map->elements_kind();
Handle<Code> stub;
if (receiver_map->has_fast_elements() ||
receiver_map->has_external_array_elements() ||
receiver_map->has_fixed_typed_array_elements()) {
if (receiver_map->has_indexed_interceptor()) {
stub = LoadIndexedInterceptorStub(isolate).GetCode();
} else if (receiver_map->has_fast_elements() ||
receiver_map->has_external_array_elements() ||
receiver_map->has_fixed_typed_array_elements()) {
stub = LoadFastElementStub(isolate,
receiver_map->instance_type() == JS_ARRAY_TYPE,
elements_kind).GetCode();
......
......@@ -1109,14 +1109,6 @@ static Handle<Object> TryConvertKey(Handle<Object> key, Isolate* isolate) {
Handle<Code> KeyedLoadIC::LoadElementStub(Handle<JSObject> receiver) {
// Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS
// via megamorphic stubs, since they don't have a map in their relocation info
// and so the stubs can't be harvested for the object needed for a map check.
if (target()->type() != Code::NORMAL) {
TRACE_GENERIC_IC(isolate(), "KeyedIC", "non-NORMAL target type");
return generic_stub();
}
Handle<Map> receiver_map(receiver->map(), isolate());
MapHandleList target_receiver_maps;
if (target().is_identical_to(string_stub())) {
......@@ -1192,8 +1184,6 @@ MaybeHandle<Object> KeyedLoadIC::Load(Handle<Object> object,
if (receiver->elements()->map() ==
isolate()->heap()->sloppy_arguments_elements_map()) {
stub = sloppy_arguments_stub();
} else if (receiver->HasIndexedInterceptor()) {
stub = indexed_interceptor_stub();
} else if (!Object::ToSmi(isolate(), key).is_null() &&
(!target().is_identical_to(sloppy_arguments_stub()))) {
stub = LoadElementStub(receiver);
......
......@@ -414,7 +414,6 @@ class KeyedLoadIC : public LoadIC {
}
static void GenerateGeneric(MacroAssembler* masm);
static void GenerateString(MacroAssembler* masm);
static void GenerateIndexedInterceptor(MacroAssembler* masm);
static void GenerateSloppyArguments(MacroAssembler* masm);
// Bit mask to be tested against bit field for the cases when
......@@ -435,9 +434,6 @@ class KeyedLoadIC : public LoadIC {
private:
Handle<Code> generic_stub() const { return generic_stub(isolate()); }
Handle<Code> indexed_interceptor_stub() {
return isolate()->builtins()->KeyedLoadIC_IndexedInterceptor();
}
Handle<Code> sloppy_arguments_stub() {
return isolate()->builtins()->KeyedLoadIC_SloppyArguments();
}
......
......@@ -887,46 +887,6 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
}
void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
// Return address is in ra.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
Register scratch1 = a3;
Register scratch2 = t0;
DCHECK(!scratch1.is(receiver) && !scratch1.is(key));
DCHECK(!scratch2.is(receiver) && !scratch2.is(key));
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &slow);
// Check that the key is an array index, that is Uint32.
__ And(t0, key, Operand(kSmiTagMask | kSmiSignMask));
__ Branch(&slow, ne, t0, Operand(zero_reg));
// Get the map of the receiver.
__ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
// Check that it has indexed interceptor and access checks
// are not enabled for this object.
__ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
__ And(scratch2, scratch2, Operand(kSlowCaseBitFieldMask));
__ Branch(&slow, ne, scratch2, Operand(1 << Map::kHasIndexedInterceptor));
// Everything is fine, call runtime.
__ Push(receiver, key); // Receiver, key.
// Perform tail call to the entry.
__ TailCallExternalReference(
ExternalReference(IC_Utility(kLoadElementWithInterceptor),
masm->isolate()),
2, 1);
__ bind(&slow);
GenerateMiss(masm);
}
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
// Push receiver, key and value for runtime call.
__ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
......
......@@ -429,49 +429,6 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
}
void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
// Return address is on the stack.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
Register scratch = rax;
DCHECK(!scratch.is(receiver) && !scratch.is(key));
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &slow);
// Check that the key is an array index, that is Uint32.
STATIC_ASSERT(kSmiValueSize <= 32);
__ JumpUnlessNonNegativeSmi(key, &slow);
// Get the map of the receiver.
__ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
// Check that it has indexed interceptor and access checks
// are not enabled for this object.
__ movb(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
__ andb(scratch, Immediate(kSlowCaseBitFieldMask));
__ cmpb(scratch, Immediate(1 << Map::kHasIndexedInterceptor));
__ j(not_zero, &slow);
// Everything is fine, call runtime.
__ PopReturnAddressTo(scratch);
__ Push(receiver); // receiver
__ Push(key); // key
__ PushReturnAddressFrom(scratch);
// Perform tail call to the entry.
__ TailCallExternalReference(
ExternalReference(IC_Utility(kLoadElementWithInterceptor),
masm->isolate()),
2, 1);
__ bind(&slow);
GenerateMiss(masm);
}
static void KeyedStoreGenerateGenericHelper(
MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) {
......
......@@ -1896,6 +1896,32 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
}
void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
// Return address is in ra.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
// Check that the key is an array index, that is Uint32.
__ And(t0, key, Operand(kSmiTagMask | kSmiSignMask));
__ Branch(&slow, ne, t0, Operand(zero_reg));
// Everything is fine, call runtime.
__ Push(receiver, key); // Receiver, key.
// Perform tail call to the entry.
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kLoadElementWithInterceptor),
masm->isolate()),
2, 1);
__ bind(&slow);
PropertyAccessCompiler::TailCallBuiltin(
masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
}
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// sp[0] : number of parameters
// sp[4] : receiver displacement
......
......@@ -834,6 +834,37 @@ void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
}
void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
// Return address is on the stack.
Label slow;
Register receiver = LoadDescriptor::ReceiverRegister();
Register key = LoadDescriptor::NameRegister();
Register scratch = rax;
DCHECK(!scratch.is(receiver) && !scratch.is(key));
// Check that the key is an array index, that is Uint32.
STATIC_ASSERT(kSmiValueSize <= 32);
__ JumpUnlessNonNegativeSmi(key, &slow);
// Everything is fine, call runtime.
__ PopReturnAddressTo(scratch);
__ Push(receiver); // receiver
__ Push(key); // key
__ PushReturnAddressFrom(scratch);
// Perform tail call to the entry.
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kLoadElementWithInterceptor),
masm->isolate()),
2, 1);
__ bind(&slow);
PropertyAccessCompiler::TailCallBuiltin(
masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
}
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// rsp[0] : return address
// rsp[8] : number of parameters
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment