Commit 10605879 authored by whesse@chromium.org's avatar whesse@chromium.org

Disable inline caching on X64 separately for loads, stores, and calls, while...

Disable inline caching on X64 separately for loads, stores, and calls, while they are being implemented and tested one-by-one.
Review URL: http://codereview.chromium.org/149673

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2477 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent ce37937d
......@@ -1107,12 +1107,6 @@ bool Genesis::InstallNatives() {
global_context()->set_empty_script(*script);
}
#ifdef V8_HOST_ARCH_64_BIT
// TODO(X64): Remove this when inline caches work.
FLAG_use_ic = false;
#endif // V8_HOST_ARCH_64_BIT
if (FLAG_natives_file == NULL) {
// Without natives file, install default natives.
for (int i = Natives::GetDelayCount();
......
......@@ -153,6 +153,9 @@ static void GenerateCheckNonObjectOrLoaded(MacroAssembler* masm, Label* miss,
}
// The offset from the inlined patch site to the start of the
// inlined load instruction. It is 7 bytes (test eax, imm) plus
// 6 bytes (jne slow_label).
const int LoadIC::kOffsetToLoadInstruction = 13;
......
......@@ -365,10 +365,13 @@ Object* CallIC::LoadFunction(State state,
return TypeError("undefined_method", object, name);
}
// TODO(X64): Enable inline cache for calls.
#ifndef V8_TARGET_ARCH_X64
// Lookup is valid: Update inline cache and stub cache.
if (FLAG_use_ic && lookup.IsLoaded()) {
UpdateCaches(&lookup, state, object, name);
}
#endif
// Get the property.
PropertyAttributes attr;
......@@ -609,10 +612,13 @@ Object* LoadIC::Load(State state, Handle<Object> object, Handle<String> name) {
}
}
// TODO(X64): Enable inline cache for load.
#ifndef V8_TARGET_ARCH_X64
// Update inline cache and stub cache.
if (FLAG_use_ic && lookup.IsLoaded()) {
UpdateCaches(&lookup, state, object, name);
}
#endif
PropertyAttributes attr;
if (lookup.IsValid() && lookup.type() == INTERCEPTOR) {
......@@ -737,7 +743,9 @@ Object* KeyedLoadIC::Load(State state,
return TypeError("non_object_property_load", object, name);
}
if (FLAG_use_ic) {
// TODO(X64): Enable specialized stubs for length and prototype lookup.
#ifndef V8_TARGET_ARCH_X64
if (false && FLAG_use_ic) {
// Use specialized code for getting the length of strings.
if (object->IsString() && name->Equals(Heap::length_symbol())) {
Handle<String> string = Handle<String>::cast(object);
......@@ -747,7 +755,7 @@ Object* KeyedLoadIC::Load(State state,
set_target(Code::cast(code));
#ifdef DEBUG
TraceIC("KeyedLoadIC", name, state, target());
#endif
#endif // DEBUG
return Smi::FromInt(string->length());
}
......@@ -759,7 +767,7 @@ Object* KeyedLoadIC::Load(State state,
set_target(Code::cast(code));
#ifdef DEBUG
TraceIC("KeyedLoadIC", name, state, target());
#endif
#endif // DEBUG
return JSArray::cast(*object)->length();
}
......@@ -772,10 +780,11 @@ Object* KeyedLoadIC::Load(State state,
set_target(Code::cast(code));
#ifdef DEBUG
TraceIC("KeyedLoadIC", name, state, target());
#endif
#endif // DEBUG
return Accessors::FunctionGetPrototype(*object, 0);
}
}
#endif // !V8_TARGET_ARCH_X64
// Check if the name is trivially convertible to an index and get
// the element or char if so.
......@@ -798,10 +807,13 @@ Object* KeyedLoadIC::Load(State state,
}
}
// TODO(X64): Enable inline caching for load.
#ifndef V8_TARGET_ARCH_X64
// Update the inline cache.
if (FLAG_use_ic && lookup.IsLoaded()) {
UpdateCaches(&lookup, state, object, name);
}
#endif
PropertyAttributes attr;
if (lookup.IsValid() && lookup.type() == INTERCEPTOR) {
......@@ -972,6 +984,10 @@ Object* StoreIC::Store(State state,
return *value;
}
// TODO(X64): Enable inline cache for StoreIC.
#ifdef V8_TARGET_ARCH_X64
USE(&LookupForWrite); // The compiler complains otherwise.
#else
// Lookup the property locally in the receiver.
if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) {
LookupResult lookup;
......@@ -979,6 +995,7 @@ Object* StoreIC::Store(State state,
UpdateCaches(&lookup, state, receiver, name, value);
}
}
#endif
// Set the property.
return receiver->SetProperty(*name, *value, NONE);
......@@ -1097,10 +1114,13 @@ Object* KeyedStoreIC::Store(State state,
LookupResult lookup;
receiver->LocalLookup(*name, &lookup);
// TODO(X64): Enable inline cache for KeyedStoreIC.
#ifndef V8_TARGET_ARCH_X64
// Update inline cache and stub cache.
if (FLAG_use_ic && lookup.IsLoaded()) {
UpdateCaches(&lookup, state, receiver, name, value);
}
#endif
// Set the property.
return receiver->SetProperty(*name, *value, NONE);
......
......@@ -49,7 +49,8 @@ namespace internal {
ICU(StoreInterceptorProperty)
//
// IC is the base class for LoadIC, StoreIC and CallIC.
// IC is the base class for LoadIC, StoreIC, CallIC, KeyedLoadIC,
// and KeyedStoreIC.
//
class IC {
public:
......@@ -219,8 +220,8 @@ class LoadIC: public IC {
static void GenerateFunctionPrototype(MacroAssembler* masm);
// The offset from the inlined patch site to the start of the
// inlined load instruction. It is 7 bytes (test eax, imm) plus
// 6 bytes (jne slow_label).
// inlined load instruction. It is architecture-dependent, and not
// used on ARM.
static const int kOffsetToLoadInstruction;
private:
......
......@@ -2183,48 +2183,3 @@ const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE;
} } // namespace v8::internal
// TODO(x64): Implement and move these to their correct cc-files:
#include "ast.h"
#include "bootstrapper.h"
#include "codegen-inl.h"
#include "cpu.h"
#include "debug.h"
#include "disasm.h"
#include "disassembler.h"
#include "frames-inl.h"
#include "x64/macro-assembler-x64.h"
#include "x64/regexp-macro-assembler-x64.h"
#include "ic-inl.h"
#include "log.h"
#include "macro-assembler.h"
#include "parser.h"
#include "regexp-macro-assembler.h"
#include "regexp-stack.h"
#include "register-allocator-inl.h"
#include "register-allocator.h"
#include "runtime.h"
#include "scopes.h"
#include "serialize.h"
#include "stub-cache.h"
#include "unicode.h"
namespace v8 {
namespace internal {
void BreakLocationIterator::ClearDebugBreakAtReturn() {
UNIMPLEMENTED();
}
bool BreakLocationIterator::IsDebugBreakAtReturn() {
UNIMPLEMENTED();
return false;
}
void BreakLocationIterator::SetDebugBreakAtReturn() {
UNIMPLEMENTED();
}
} } // namespace v8::internal
......@@ -80,6 +80,21 @@ void Debug::GenerateStubNoRegistersDebugBreak(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED
}
void BreakLocationIterator::ClearDebugBreakAtReturn() {
// TODO(X64): Implement this when we start setting Debug breaks.
UNIMPLEMENTED();
}
bool BreakLocationIterator::IsDebugBreakAtReturn() {
// TODO(X64): Implement this when we start setting Debug breaks.
UNIMPLEMENTED();
return false;
}
void BreakLocationIterator::SetDebugBreakAtReturn() {
UNIMPLEMENTED();
}
#endif // ENABLE_DEBUGGER_SUPPORT
} } // namespace v8::internal
......@@ -43,11 +43,11 @@ namespace internal {
void KeyedLoadIC::ClearInlinedVersion(Address address) {
UNIMPLEMENTED();
// TODO(X64): Implement this when LoadIC is enabled.
}
void KeyedStoreIC::ClearInlinedVersion(Address address) {
UNIMPLEMENTED();
// TODO(X64): Implement this when LoadIC is enabled.
}
void KeyedStoreIC::RestoreInlinedVersion(Address address) {
......@@ -78,22 +78,32 @@ void KeyedLoadIC::Generate(MacroAssembler* masm,
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC0AB)); // Debugging aid.
// ----------- S t a t e -------------
// -- rsp[0] : return address
// -- rsp[8] : name
// -- rsp[16] : receiver
// -----------------------------------
Generate(masm, ExternalReference(Runtime::kKeyedGetProperty));
}
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC1AB)); // Debugging aid.
// ----------- S t a t e -------------
// -- rsp[0] : return address
// -- rsp[8] : name
// -- rsp[16] : receiver
// -----------------------------------
Generate(masm, ExternalReference(Runtime::kKeyedGetProperty));
}
bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
UNIMPLEMENTED();
// Never patch the map in the map check, so the check always fails.
return false;
}
bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
UNIMPLEMENTED();
// Never patch the map in the map check, so the check always fails.
return false;
}
......@@ -163,13 +173,11 @@ void KeyedStoreIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
}
void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC2AB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kKeyedStoreIC_Miss)));
}
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC3AB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kKeyedStoreIC_Miss)));
}
Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
......@@ -228,20 +236,24 @@ void CallIC::Generate(MacroAssembler* masm,
__ InvokeFunction(rdi, actual, JUMP_FUNCTION);
}
void CallIC::GenerateMegamorphic(MacroAssembler* a, int b) {
UNIMPLEMENTED();
void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
// Cache miss: Jump to runtime.
Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss)));
}
void CallIC::GenerateNormal(MacroAssembler* a, int b) {
UNIMPLEMENTED();
void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
// Cache miss: Jump to runtime.
Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss)));
}
// The offset from the inlined patch site to the start of the
// inlined load instruction.
const int LoadIC::kOffsetToLoadInstruction = 20;
void LoadIC::ClearInlinedVersion(Address address) {
UNIMPLEMENTED();
// TODO(X64): Implement this when LoadIC is enabled.
}
......@@ -266,33 +278,27 @@ void LoadIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC4AB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
}
void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC5AB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
}
void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC6AB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
}
void LoadIC::GenerateMiss(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC7AB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
}
void LoadIC::GenerateNormal(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC8AB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
}
void LoadIC::GenerateStringLength(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xC9AB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
}
bool LoadIC::PatchInlinedLoad(Address address, Object* map, int index) {
......@@ -319,13 +325,11 @@ void StoreIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
}
void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xCAAB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss)));
}
void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
masm->movq(kScratchRegister, Immediate(0xCBAB)); // Debugging aid.
Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss)));
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment