Commit 59cd4dc5 authored by ager@chromium.org's avatar ager@chromium.org

X64: Enable lazy compilation and add code generation for simple object

literals.
Review URL: http://codereview.chromium.org/141040

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2232 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 83ad579a
...@@ -32,10 +32,12 @@ ...@@ -32,10 +32,12 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
#define __ ACCESS_MASM(masm_)
// Platform-specific inline functions. // Platform-specific inline functions.
void DeferredCode::Jump() { UNIMPLEMENTED(); } void DeferredCode::Jump() { __ jmp(&entry_label_); }
void DeferredCode::Branch(Condition cc) { UNIMPLEMENTED(); } void DeferredCode::Branch(Condition cc) { __ j(cc, &entry_label_); }
void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) { void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
...@@ -47,6 +49,7 @@ void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) { ...@@ -47,6 +49,7 @@ void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
GenerateFastMathOp(COS, args); GenerateFastMathOp(COS, args);
} }
#undef __
} } // namespace v8::internal } } // namespace v8::internal
......
...@@ -39,12 +39,35 @@ ...@@ -39,12 +39,35 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
#define __ ACCESS_MASM(masm_)
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// Platform-specific DeferredCode functions. // Platform-specific DeferredCode functions.
void DeferredCode::SaveRegisters() { UNIMPLEMENTED(); } void DeferredCode::SaveRegisters() {
for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
int action = registers_[i];
if (action == kPush) {
__ push(RegisterAllocator::ToRegister(i));
} else if (action != kIgnore && (action & kSyncedFlag) == 0) {
__ movq(Operand(rbp, action), RegisterAllocator::ToRegister(i));
}
}
}
void DeferredCode::RestoreRegisters() {
// Restore registers in reverse order due to the stack.
for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
int action = registers_[i];
if (action == kPush) {
__ pop(RegisterAllocator::ToRegister(i));
} else if (action != kIgnore) {
action &= ~kSyncedFlag;
__ movq(RegisterAllocator::ToRegister(i), Operand(rbp, action));
}
}
}
void DeferredCode::RestoreRegisters() { UNIMPLEMENTED(); }
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// CodeGenState implementation. // CodeGenState implementation.
...@@ -94,8 +117,6 @@ CodeGenerator::CodeGenerator(int buffer_size, ...@@ -94,8 +117,6 @@ CodeGenerator::CodeGenerator(int buffer_size,
in_spilled_code_(false) { in_spilled_code_(false) {
} }
#define __ ACCESS_MASM(masm_)
void CodeGenerator::DeclareGlobals(Handle<FixedArray> a) { void CodeGenerator::DeclareGlobals(Handle<FixedArray> a) {
UNIMPLEMENTED(); UNIMPLEMENTED();
...@@ -105,14 +126,13 @@ void CodeGenerator::TestCodeGenerator() { ...@@ -105,14 +126,13 @@ void CodeGenerator::TestCodeGenerator() {
// Compile a function from a string, and run it. // Compile a function from a string, and run it.
// Set flags appropriately for this stage of implementation. // Set flags appropriately for this stage of implementation.
// TODO(X64): Make ic and lazy compilation work, and stop disabling them. // TODO(X64): Make ic work, and stop disabling them.
// These settings stick - remove them when we don't want them anymore. // These settings stick - remove them when we don't want them anymore.
#ifdef DEBUG #ifdef DEBUG
FLAG_print_builtin_source = true; FLAG_print_builtin_source = true;
FLAG_print_builtin_ast = true; FLAG_print_builtin_ast = true;
#endif #endif
FLAG_use_ic = false; FLAG_use_ic = false;
FLAG_lazy = false;
Handle<JSFunction> test_function = Compiler::Compile( Handle<JSFunction> test_function = Compiler::Compile(
Factory::NewStringFromAscii(CStrVector( Factory::NewStringFromAscii(CStrVector(
...@@ -134,6 +154,7 @@ void CodeGenerator::TestCodeGenerator() { ...@@ -134,6 +154,7 @@ void CodeGenerator::TestCodeGenerator() {
" test_local_variables(" " test_local_variables("
" test_nesting_calls(test_local_variables(1,3), 42, 47)," " test_nesting_calls(test_local_variables(1,3), 42, 47),"
" test_local_variables(-25.3, 2));" " test_local_variables(-25.3, 2));"
" var o = { x: 42 };"
" return test_if_then_else(1, 47, 39);" " return test_if_then_else(1, 47, 39);"
"})()")), "})()")),
Factory::NewStringFromAscii(CStrVector("CodeGeneratorTestScript")), Factory::NewStringFromAscii(CStrVector("CodeGeneratorTestScript")),
...@@ -677,8 +698,134 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* a) { ...@@ -677,8 +698,134 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* a) {
UNIMPLEMENTED(); UNIMPLEMENTED();
} }
void CodeGenerator::VisitObjectLiteral(ObjectLiteral* a) {
UNIMPLEMENTED(); // Materialize the object literal 'node' in the literals array
// 'literals' of the function. Leave the object boilerplate in
// 'boilerplate'.
class DeferredObjectLiteral: public DeferredCode {
public:
DeferredObjectLiteral(Register boilerplate,
Register literals,
ObjectLiteral* node)
: boilerplate_(boilerplate), literals_(literals), node_(node) {
set_comment("[ DeferredObjectLiteral");
}
void Generate();
private:
Register boilerplate_;
Register literals_;
ObjectLiteral* node_;
};
void DeferredObjectLiteral::Generate() {
// Since the entry is undefined we call the runtime system to
// compute the literal.
// Literal array (0).
__ push(literals_);
// Literal index (1).
__ push(Immediate(Smi::FromInt(node_->literal_index())));
// Constant properties (2).
__ movq(kScratchRegister,
node_->constant_properties(),
RelocInfo::EMBEDDED_OBJECT);
__ push(kScratchRegister);
__ CallRuntime(Runtime::kCreateObjectLiteralBoilerplate, 3);
if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax);
}
void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Comment cmnt(masm_, "[ ObjectLiteral");
// Retrieve the literals array and check the allocated entry. Begin
// with a writable copy of the function of this activation in a
// register.
frame_->PushFunction();
Result literals = frame_->Pop();
literals.ToRegister();
frame_->Spill(literals.reg());
// Load the literals array of the function.
__ movq(literals.reg(),
FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
// Load the literal at the ast saved index.
Result boilerplate = allocator_->Allocate();
ASSERT(boilerplate.is_valid());
int literal_offset =
FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
__ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
// Check whether we need to materialize the object literal boilerplate.
// If so, jump to the deferred code passing the literals array.
DeferredObjectLiteral* deferred =
new DeferredObjectLiteral(boilerplate.reg(), literals.reg(), node);
__ movq(kScratchRegister,
Factory::undefined_value(),
RelocInfo::EMBEDDED_OBJECT);
__ cmpq(boilerplate.reg(), kScratchRegister);
deferred->Branch(equal);
deferred->BindExit();
literals.Unuse();
// Push the boilerplate object.
frame_->Push(&boilerplate);
// Clone the boilerplate object.
Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate;
if (node->depth() == 1) {
clone_function_id = Runtime::kCloneShallowLiteralBoilerplate;
}
Result clone = frame_->CallRuntime(clone_function_id, 1);
// Push the newly cloned literal object as the result.
frame_->Push(&clone);
for (int i = 0; i < node->properties()->length(); i++) {
ObjectLiteral::Property* property = node->properties()->at(i);
switch (property->kind()) {
case ObjectLiteral::Property::CONSTANT:
break;
case ObjectLiteral::Property::MATERIALIZED_LITERAL:
if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
// else fall through.
case ObjectLiteral::Property::COMPUTED: {
// TODO(X64): Implement setting of computed values in object literals.
UNIMPLEMENTED();
}
case ObjectLiteral::Property::PROTOTYPE: {
// Duplicate the object as an argument to the runtime call.
frame_->Dup();
Load(property->key());
Load(property->value());
Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
// Ignore the result.
break;
}
case ObjectLiteral::Property::SETTER: {
// Duplicate the object as an argument to the runtime call.
frame_->Dup();
Load(property->key());
frame_->Push(Smi::FromInt(1));
Load(property->value());
Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
// Ignore the result.
break;
}
case ObjectLiteral::Property::GETTER: {
// Duplicate the object as an argument to the runtime call.
frame_->Dup();
Load(property->key());
frame_->Push(Smi::FromInt(0));
Load(property->value());
Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
// Ignore the result.
break;
}
default: UNREACHABLE();
}
}
} }
void CodeGenerator::VisitArrayLiteral(ArrayLiteral* a) { void CodeGenerator::VisitArrayLiteral(ArrayLiteral* a) {
...@@ -1632,6 +1779,7 @@ void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { ...@@ -1632,6 +1779,7 @@ void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
} else if (slot->type() == Slot::LOCAL) { } else if (slot->type() == Slot::LOCAL) {
frame_->StoreToLocalAt(slot->index()); frame_->StoreToLocalAt(slot->index());
} else { } else {
UNIMPLEMENTED();
// The other slot types (LOOKUP and GLOBAL) cannot reach here. // The other slot types (LOOKUP and GLOBAL) cannot reach here.
// //
// The use of SlotOperand below is safe for an unspilled frame // The use of SlotOperand below is safe for an unspilled frame
......
...@@ -36,6 +36,9 @@ namespace v8 { ...@@ -36,6 +36,9 @@ namespace v8 {
namespace internal { namespace internal {
#define __ ACCESS_MASM(masm())
Object* CallStubCompiler::CompileCallConstant(Object* a, Object* CallStubCompiler::CompileCallConstant(Object* a,
JSObject* b, JSObject* b,
JSFunction* c, JSFunction* c,
...@@ -122,10 +125,31 @@ Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* a, String* b) { ...@@ -122,10 +125,31 @@ Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* a, String* b) {
} }
Object* StubCompiler::CompileLazyCompile(Code::Flags a) { // TODO(1241006): Avoid having lazy compile stubs specialized by the
UNIMPLEMENTED(); // number of arguments. It is not needed anymore.
return NULL; Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
// Enter an internal frame.
__ EnterInternalFrame();
// Push a copy of the function onto the stack.
__ push(rdi);
__ push(rdi); // function is also the parameter to the runtime call
__ CallRuntime(Runtime::kLazyCompile, 1);
__ pop(rdi);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rcx);
return GetCodeWithFlags(flags, "LazyCompileStub");
} }
#undef __
} } // namespace v8::internal } } // namespace v8::internal
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment