Commit 094d17ff authored by ager@chromium.org's avatar ager@chromium.org

x64: Generate code for loading from, storing to and calling properties.

Review URL: http://codereview.chromium.org/141056

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2245 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent ad970953
......@@ -141,6 +141,9 @@ static void GenerateCheckNonObjectOrLoaded(MacroAssembler* masm, Label* miss,
}
const int LoadIC::kOffsetToLoadInstruction = 13;
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- ecx : name
......
......@@ -221,7 +221,7 @@ class LoadIC: public IC {
// The offset from the inlined patch site to the start of the
// inlined load instruction. It is 7 bytes (test eax, imm) plus
// 6 bytes (jne slow_label).
static const int kOffsetToLoadInstruction = 13;
static const int kOffsetToLoadInstruction;
private:
static void Generate(MacroAssembler* masm, const ExternalReference& f);
......
......@@ -1875,16 +1875,4 @@ void BreakLocationIterator::SetDebugBreakAtReturn() {
UNIMPLEMENTED();
}
void CallIC::Generate(MacroAssembler* a, int b, ExternalReference const& c) {
UNIMPLEMENTED();
}
void CallIC::GenerateMegamorphic(MacroAssembler* a, int b) {
UNIMPLEMENTED();
}
void CallIC::GenerateNormal(MacroAssembler* a, int b) {
UNIMPLEMENTED();
}
} } // namespace v8::internal
......@@ -29,9 +29,12 @@
#include "v8.h"
#include "bootstrapper.h"
// #include "macro-assembler.h"
#include "codegen-inl.h"
#include "debug.h"
#include "ic-inl.h"
#include "parser.h"
#include "register-allocator-inl.h"
#include "scopes.h"
// TEST
#include "compiler.h"
......@@ -167,6 +170,15 @@ void CodeGenerator::TestCodeGenerator() {
" // return test_recursion_with_base(0, 0, 0, 47);\n"
" var x_value = 42;"
" var o = { x: x_value };"
" o.x = 43;"
" o.x;"
" var x_string = 'x';"
" o[x_string] = 44;"
" o[x_string];"
" o.f = function() { return 45; };"
" o.f();"
" var f_string = 'f';"
" o[f_string]();"
" var a = [ 1, 2, 3 ];"
" var x = true ? 42 : 32;"
" return test_if_then_else(0, 46, 47);"
......@@ -1156,12 +1168,20 @@ void CodeGenerator::VisitAssignment(Assignment* node) {
}
void CodeGenerator::VisitThrow(Throw* a) {
// UNIMPLEMENTED();
void CodeGenerator::VisitThrow(Throw* node) {
Comment cmnt(masm_, "[ Throw");
CodeForStatementPosition(node);
Load(node->exception());
Result result = frame_->CallRuntime(Runtime::kThrow, 1);
frame_->Push(&result);
}
void CodeGenerator::VisitProperty(Property* a) {
UNIMPLEMENTED();
void CodeGenerator::VisitProperty(Property* node) {
Comment cmnt(masm_, "[ Property");
Reference property(this, node);
property.GetValue(typeof_state());
}
......@@ -1244,8 +1264,6 @@ void CodeGenerator::VisitCall(Call* node) {
CallWithArguments(args, node->position());
*/
} else if (property != NULL) {
UNIMPLEMENTED();
/*
// Check if the key is a literal string.
Literal* literal = property->key()->AsLiteral();
......@@ -1293,7 +1311,7 @@ void CodeGenerator::VisitCall(Call* node) {
// Call the function.
CallWithArguments(args, node->position());
}
*/
} else {
// ----------------------------------
// JavaScript example: 'foo(1, 2, 3)' // foo is not global
......@@ -1315,10 +1333,12 @@ void CodeGenerator::VisitCallEval(CallEval* a) {
UNIMPLEMENTED();
}
void CodeGenerator::VisitCallNew(CallNew* a) {
UNIMPLEMENTED();
}
void CodeGenerator::VisitCallRuntime(CallRuntime* a) {
UNIMPLEMENTED();
}
......@@ -1632,136 +1652,6 @@ void CodeGenerator::UnloadReference(Reference* ref) {
}
void Reference::SetValue(InitState init_state) {
ASSERT(cgen_->HasValidEntryRegisters());
ASSERT(!is_illegal());
MacroAssembler* masm = cgen_->masm();
switch (type_) {
case SLOT: {
Comment cmnt(masm, "[ Store to Slot");
Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
ASSERT(slot != NULL);
cgen_->StoreToSlot(slot, init_state);
break;
}
// TODO(X64): Make cases other than SLOT work.
/*
case NAMED: {
Comment cmnt(masm, "[ Store to named Property");
cgen_->frame()->Push(GetName());
Result answer = cgen_->frame()->CallStoreIC();
cgen_->frame()->Push(&answer);
break;
}
case KEYED: {
Comment cmnt(masm, "[ Store to keyed Property");
// Generate inlined version of the keyed store if the code is in
// a loop and the key is likely to be a smi.
Property* property = expression()->AsProperty();
ASSERT(property != NULL);
SmiAnalysis* key_smi_analysis = property->key()->type();
if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) {
Comment cmnt(masm, "[ Inlined store to keyed Property");
// Get the receiver, key and value into registers.
Result value = cgen_->frame()->Pop();
Result key = cgen_->frame()->Pop();
Result receiver = cgen_->frame()->Pop();
Result tmp = cgen_->allocator_->Allocate();
ASSERT(tmp.is_valid());
// Determine whether the value is a constant before putting it
// in a register.
bool value_is_constant = value.is_constant();
// Make sure that value, key and receiver are in registers.
value.ToRegister();
key.ToRegister();
receiver.ToRegister();
DeferredReferenceSetKeyedValue* deferred =
new DeferredReferenceSetKeyedValue(value.reg(),
key.reg(),
receiver.reg());
// Check that the value is a smi if it is not a constant. We
// can skip the write barrier for smis and constants.
if (!value_is_constant) {
__ test(value.reg(), Immediate(kSmiTagMask));
deferred->Branch(not_zero);
}
// Check that the key is a non-negative smi.
__ test(key.reg(), Immediate(kSmiTagMask | 0x80000000));
deferred->Branch(not_zero);
// Check that the receiver is not a smi.
__ test(receiver.reg(), Immediate(kSmiTagMask));
deferred->Branch(zero);
// Check that the receiver is a JSArray.
__ mov(tmp.reg(),
FieldOperand(receiver.reg(), HeapObject::kMapOffset));
__ movzx_b(tmp.reg(),
FieldOperand(tmp.reg(), Map::kInstanceTypeOffset));
__ cmp(tmp.reg(), JS_ARRAY_TYPE);
deferred->Branch(not_equal);
// Check that the key is within bounds. Both the key and the
// length of the JSArray are smis.
__ cmp(key.reg(),
FieldOperand(receiver.reg(), JSArray::kLengthOffset));
deferred->Branch(greater_equal);
// Get the elements array from the receiver and check that it
// is not a dictionary.
__ mov(tmp.reg(),
FieldOperand(receiver.reg(), JSObject::kElementsOffset));
// Bind the deferred code patch site to be able to locate the
// fixed array map comparison. When debugging, we patch this
// comparison to always fail so that we will hit the IC call
// in the deferred code which will allow the debugger to
// break for fast case stores.
__ bind(deferred->patch_site());
__ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
Immediate(Factory::fixed_array_map()));
deferred->Branch(not_equal);
// Store the value.
__ mov(Operand(tmp.reg(),
key.reg(),
times_2,
Array::kHeaderSize - kHeapObjectTag),
value.reg());
__ IncrementCounter(&Counters::keyed_store_inline, 1);
deferred->BindExit();
cgen_->frame()->Push(&receiver);
cgen_->frame()->Push(&key);
cgen_->frame()->Push(&value);
} else {
Result answer = cgen_->frame()->CallKeyedStoreIC();
// Make sure that we do not have a test instruction after the
// call. A test instruction after the call is used to
// indicate that we have generated an inline version of the
// keyed store.
__ nop();
cgen_->frame()->Push(&answer);
}
break;
}
*/
default:
UNREACHABLE();
}
}
Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
// Currently, this assertion will fail if we try to assign to
// a constant variable that is constant because it is read-only
......@@ -2057,20 +1947,241 @@ void CodeGenerator::LoadGlobalReceiver() {
frame_->Push(&temp);
}
// Emit a LoadIC call to get the value from receiver and leave it in
// dst. The receiver register is restored after the call.
class DeferredReferenceGetNamedValue: public DeferredCode {
public:
DeferredReferenceGetNamedValue(Register dst,
Register receiver,
Handle<String> name)
: dst_(dst), receiver_(receiver), name_(name) {
set_comment("[ DeferredReferenceGetNamedValue");
}
virtual void Generate();
#undef __
Label* patch_site() { return &patch_site_; }
private:
Label patch_site_;
Register dst_;
Register receiver_;
Handle<String> name_;
};
// End of CodeGenerator implementation.
// -----------------------------------------------------------------------------
// Implementation of stubs.
void DeferredReferenceGetNamedValue::Generate() {
__ push(receiver_);
__ Move(rcx, name_);
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
__ Call(ic, RelocInfo::CODE_TARGET);
// The call must be followed by a test eax instruction to indicate
// that the inobject property case was inlined.
//
// Store the delta to the map check instruction here in the test
// instruction. Use masm_-> instead of the __ macro since the
// latter can't return a value.
int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
// Here we use masm_-> instead of the __ macro because this is the
// instruction that gets patched and coverage code gets in the way.
masm_->testq(rax, Immediate(-delta_to_patch_site));
__ IncrementCounter(&Counters::named_load_inline_miss, 1);
if (!dst_.is(rax)) __ movq(dst_, rax);
__ pop(receiver_);
}
// Stub classes have public member named masm, not masm_.
#undef __
#define __ ACCESS_MASM(masm)
Handle<String> Reference::GetName() {
ASSERT(type_ == NAMED);
Property* property = expression_->AsProperty();
if (property == NULL) {
// Global variable reference treated as a named property reference.
VariableProxy* proxy = expression_->AsVariableProxy();
ASSERT(proxy->AsVariable() != NULL);
ASSERT(proxy->AsVariable()->is_global());
return proxy->name();
} else {
Literal* raw_name = property->key()->AsLiteral();
ASSERT(raw_name != NULL);
return Handle<String>(String::cast(*raw_name->handle()));
}
}
void Reference::GetValue(TypeofState typeof_state) {
UNIMPLEMENTED();
ASSERT(!cgen_->in_spilled_code());
ASSERT(cgen_->HasValidEntryRegisters());
ASSERT(!is_illegal());
MacroAssembler* masm = cgen_->masm();
switch (type_) {
case SLOT: {
Comment cmnt(masm, "[ Load from Slot");
Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
ASSERT(slot != NULL);
cgen_->LoadFromSlot(slot, typeof_state);
break;
}
case NAMED: {
// TODO(1241834): Make sure that it is safe to ignore the
// distinction between expressions in a typeof and not in a
// typeof. If there is a chance that reference errors can be
// thrown below, we must distinguish between the two kinds of
// loads (typeof expression loads must not throw a reference
// error).
Variable* var = expression_->AsVariableProxy()->AsVariable();
bool is_global = var != NULL;
ASSERT(!is_global || var->is_global());
// Do not inline the inobject property case for loads from the global
// object. Also do not inline for unoptimized code. This saves time
// in the code generator. Unoptimized code is toplevel code or code
// that is not in a loop.
if (is_global ||
cgen_->scope()->is_global_scope() ||
cgen_->loop_nesting() == 0) {
Comment cmnt(masm, "[ Load from named Property");
cgen_->frame()->Push(GetName());
RelocInfo::Mode mode = is_global
? RelocInfo::CODE_TARGET_CONTEXT
: RelocInfo::CODE_TARGET;
Result answer = cgen_->frame()->CallLoadIC(mode);
// A test eax instruction following the call signals that the
// inobject property case was inlined. Ensure that there is not
// a test eax instruction here.
__ nop();
cgen_->frame()->Push(&answer);
} else {
// Inline the inobject property case.
Comment cmnt(masm, "[ Inlined named property load");
Result receiver = cgen_->frame()->Pop();
receiver.ToRegister();
Result value = cgen_->allocator()->Allocate();
ASSERT(value.is_valid());
DeferredReferenceGetNamedValue* deferred =
new DeferredReferenceGetNamedValue(value.reg(),
receiver.reg(),
GetName());
// Check that the receiver is a heap object.
__ testq(receiver.reg(), Immediate(kSmiTagMask));
deferred->Branch(zero);
__ bind(deferred->patch_site());
// This is the map check instruction that will be patched (so we can't
// use the double underscore macro that may insert instructions).
// Initially use an invalid map to force a failure.
masm->Move(kScratchRegister, Factory::null_value());
masm->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
kScratchRegister);
// This branch is always a forwards branch so it's always a fixed
// size which allows the assert below to succeed and patching to work.
deferred->Branch(not_equal);
// The delta from the patch label to the load offset must be
// statically known.
ASSERT(masm->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
LoadIC::kOffsetToLoadInstruction);
// The initial (invalid) offset has to be large enough to force
// a 32-bit instruction encoding to allow patching with an
// arbitrary offset. Use kMaxInt (minus kHeapObjectTag).
int offset = kMaxInt;
masm->movq(value.reg(), FieldOperand(receiver.reg(), offset));
__ IncrementCounter(&Counters::named_load_inline, 1);
deferred->BindExit();
cgen_->frame()->Push(&receiver);
cgen_->frame()->Push(&value);
}
break;
}
case KEYED: {
// TODO(1241834): Make sure that this it is safe to ignore the
// distinction between expressions in a typeof and not in a typeof.
Comment cmnt(masm, "[ Load from keyed Property");
Variable* var = expression_->AsVariableProxy()->AsVariable();
bool is_global = var != NULL;
ASSERT(!is_global || var->is_global());
// Inline array load code if inside of a loop. We do not know
// the receiver map yet, so we initially generate the code with
// a check against an invalid map. In the inline cache code, we
// patch the map check if appropriate.
// TODO(x64): Implement inlined loads for keyed properties.
// Comment cmnt(masm, "[ Load from keyed Property");
RelocInfo::Mode mode = is_global
? RelocInfo::CODE_TARGET_CONTEXT
: RelocInfo::CODE_TARGET;
Result answer = cgen_->frame()->CallKeyedLoadIC(mode);
// Make sure that we do not have a test instruction after the
// call. A test instruction after the call is used to
// indicate that we have generated an inline version of the
// keyed load. The explicit nop instruction is here because
// the push that follows might be peep-hole optimized away.
__ nop();
cgen_->frame()->Push(&answer);
break;
}
default:
UNREACHABLE();
}
}
void Reference::SetValue(InitState init_state) {
ASSERT(cgen_->HasValidEntryRegisters());
ASSERT(!is_illegal());
MacroAssembler* masm = cgen_->masm();
switch (type_) {
case SLOT: {
Comment cmnt(masm, "[ Store to Slot");
Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
ASSERT(slot != NULL);
cgen_->StoreToSlot(slot, init_state);
break;
}
case NAMED: {
Comment cmnt(masm, "[ Store to named Property");
cgen_->frame()->Push(GetName());
Result answer = cgen_->frame()->CallStoreIC();
cgen_->frame()->Push(&answer);
break;
}
case KEYED: {
Comment cmnt(masm, "[ Store to keyed Property");
// Generate inlined version of the keyed store if the code is in
// a loop and the key is likely to be a smi.
Property* property = expression()->AsProperty();
ASSERT(property != NULL);
// TODO(x64): Implement inlined version of keyed stores.
Result answer = cgen_->frame()->CallKeyedStoreIC();
// Make sure that we do not have a test instruction after the
// call. A test instruction after the call is used to
// indicate that we have generated an inline version of the
// keyed store.
__ nop();
cgen_->frame()->Push(&answer);
break;
}
default:
UNREACHABLE();
}
}
......
......@@ -80,8 +80,9 @@ void ExitFrame::Iterate(ObjectVisitor* a) const {
}
byte* InternalFrame::GetCallerStackPointer() const {
UNIMPLEMENTED();
return NULL;
// Internal frames have no arguments. The stack pointer of the
// caller is at a fixed offset from the frame pointer.
return fp() + StandardFrameConstants::kCallerSPOffset;
}
byte* JavaScriptFrame::GetCallerStackPointer() const {
......
......@@ -54,11 +54,29 @@ void KeyedStoreIC::RestoreInlinedVersion(Address address) {
UNIMPLEMENTED();
}
void KeyedLoadIC::Generate(MacroAssembler* masm,
ExternalReference const& f) {
masm->int3(); // UNIMPLEMENTED.
// ----------- S t a t e -------------
// -- rsp[0] : return address
// -- rsp[8] : name
// -- rsp[16] : receiver
// -----------------------------------
__ movq(rax, Operand(rsp, kPointerSize));
__ movq(rcx, Operand(rsp, 2 * kPointerSize));
// Move the return address below the arguments.
__ pop(rbx);
__ push(rcx);
__ push(rax);
__ push(rbx);
// Perform tail call to the entry.
__ TailCallRuntime(f, 2);
}
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
}
......@@ -124,7 +142,22 @@ Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
}
void KeyedStoreIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
masm->int3(); // UNIMPLEMENTED.
// ----------- S t a t e -------------
// -- rax : value
// -- rsp[0] : return address
// -- rsp[8] : key
// -- rsp[16] : receiver
// -----------------------------------
// Move the return address below the arguments.
__ pop(rcx);
__ push(Operand(rsp, 1 * kPointerSize));
__ push(Operand(rsp, 1 * kPointerSize));
__ push(rax);
__ push(rcx);
// Do tail-call to runtime routine.
__ TailCallRuntime(f, 3);
}
void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
......@@ -143,14 +176,93 @@ Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
return NULL;
}
void CallIC::Generate(MacroAssembler* masm,
int argc,
ExternalReference const& f) {
// Get the receiver of the function from the stack; 1 ~ return address.
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
// Get the name of the function to call from the stack.
// 2 ~ receiver, return address.
__ movq(rbx, Operand(rsp, (argc + 2) * kPointerSize));
// Enter an internal frame.
__ EnterInternalFrame();
// Push the receiver and the name of the function.
__ push(rdx);
__ push(rbx);
// Call the entry.
CEntryStub stub;
__ movq(rax, Immediate(2));
__ movq(rbx, f);
__ CallStub(&stub);
// Move result to rdi and exit the internal frame.
__ movq(rdi, rax);
__ LeaveInternalFrame();
// Check if the receiver is a global object of some sort.
Label invoke, global;
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); // receiver
__ testq(rdx, Immediate(kSmiTagMask));
__ j(zero, &invoke);
__ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
__ movzxbq(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
__ cmpq(rcx, Immediate(static_cast<int8_t>(JS_GLOBAL_OBJECT_TYPE)));
__ j(equal, &global);
__ cmpq(rcx, Immediate(static_cast<int8_t>(JS_BUILTINS_OBJECT_TYPE)));
__ j(not_equal, &invoke);
// Patch the receiver on the stack.
__ bind(&global);
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
__ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
// Invoke the function.
ParameterCount actual(argc);
__ bind(&invoke);
__ InvokeFunction(rdi, actual, JUMP_FUNCTION);
}
void CallIC::GenerateMegamorphic(MacroAssembler* a, int b) {
UNIMPLEMENTED();
}
void CallIC::GenerateNormal(MacroAssembler* a, int b) {
UNIMPLEMENTED();
}
const int LoadIC::kOffsetToLoadInstruction = 20;
void LoadIC::ClearInlinedVersion(Address address) {
UNIMPLEMENTED();
}
void LoadIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
masm->int3(); // UNIMPLEMENTED.
// ----------- S t a t e -------------
// -- rcx : name
// -- rsp[0] : return address
// -- rsp[8] : receiver
// -----------------------------------
__ movq(rax, Operand(rsp, kPointerSize));
// Move the return address below the arguments.
__ pop(rbx);
__ push(rax);
__ push(rcx);
__ push(rbx);
// Perform tail call to the entry.
__ TailCallRuntime(f, 2);
}
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
}
......
......@@ -842,6 +842,42 @@ Result VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
}
Result VirtualFrame::CallLoadIC(RelocInfo::Mode mode) {
// Name and receiver are on the top of the frame. The IC expects
// name in rcx and receiver on the stack. It does not drop the
// receiver.
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
Result name = Pop();
PrepareForCall(1, 0); // One stack arg, not callee-dropped.
name.ToRegister(rcx);
name.Unuse();
return RawCallCodeObject(ic, mode);
}
Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
// Key and receiver are on top of the frame. The IC expects them on
// the stack. It does not drop them.
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
PrepareForCall(2, 0); // Two stack args, neither callee-dropped.
return RawCallCodeObject(ic, mode);
}
Result VirtualFrame::CallKeyedStoreIC() {
// Value, key, and receiver are on the top of the frame. The IC
// expects value in rax and key and receiver on the stack. It does
// not drop the key and receiver.
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
// TODO(1222589): Make the IC grab the values from the stack.
Result value = Pop();
PrepareForCall(2, 0); // Two stack args, neither callee-dropped.
value.ToRegister(rax);
value.Unuse();
return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
}
Result VirtualFrame::CallCallIC(RelocInfo::Mode mode,
int arg_count,
int loop_nesting) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment