Commit bac8b21f authored by ager@chromium.org's avatar ager@chromium.org

Port FastNewContextStub to x64 and arm.

BUG=551
Review URL: http://codereview.chromium.org/541027

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@3592 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 0d01d17b
......@@ -187,12 +187,18 @@ void CodeGenerator::GenCode(FunctionLiteral* fun) {
function_return_is_shadowed_ = false;
VirtualFrame::SpilledScope spilled_scope;
if (scope_->num_heap_slots() > 0) {
int heap_slots = scope_->num_heap_slots();
if (heap_slots > 0) {
// Allocate local context.
// Get outer context and create a new context based on it.
__ ldr(r0, frame_->Function());
frame_->EmitPush(r0);
frame_->CallRuntime(Runtime::kNewContext, 1); // r0 holds the result
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(heap_slots);
frame_->CallStub(&stub, 1);
} else {
frame_->CallRuntime(Runtime::kNewContext, 1);
}
#ifdef DEBUG
JumpTarget verified_true;
......@@ -4444,6 +4450,55 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
}
void FastNewContextStub::Generate(MacroAssembler* masm) {
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
// Pop the function from the stack.
__ pop(r3);
// Attempt to allocate the context in new space.
__ AllocateInNewSpace(length + (FixedArray::kHeaderSize / kPointerSize),
r0,
r1,
r2,
&gc,
TAG_OBJECT);
// Setup the object header.
__ LoadRoot(r2, Heap::kContextMapRootIndex);
__ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
__ mov(r2, Operand(length));
__ str(r2, FieldMemOperand(r0, Array::kLengthOffset));
// Setup the fixed slots.
__ mov(r1, Operand(Smi::FromInt(0)));
__ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX)));
__ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX)));
__ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
__ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX)));
// Copy the global object from the surrounding context.
__ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX)));
// Initialize the rest of the slots to undefined.
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
__ str(r1, MemOperand(r0, Context::SlotOffset(i)));
}
// Return. The on-stack parameter has already been popped.
__ mov(cp, r0);
__ Ret();
// Need to collect. Call into runtime system.
__ bind(&gc);
__ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1);
}
// Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
// instruction. On pre-ARM5 hardware this routine gives the wrong answer for 0
// (31 instead of 32).
......
......@@ -326,12 +326,19 @@ void CodeGenerator::GenCode(FunctionLiteral* function) {
function_return_is_shadowed_ = false;
// Allocate the local context if needed.
if (scope_->num_heap_slots() > 0) {
int heap_slots = scope_->num_heap_slots();
if (heap_slots > 0) {
Comment cmnt(masm_, "[ allocate local context");
// Allocate local context.
// Get outer context and create a new context based on it.
frame_->PushFunction();
Result context = frame_->CallRuntime(Runtime::kNewContext, 1);
Result context;
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(heap_slots);
context = frame_->CallStub(&stub, 1);
} else {
context = frame_->CallRuntime(Runtime::kNewContext, 1);
}
// Update context local.
frame_->SaveContextRegister();
......@@ -6116,6 +6123,48 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
}
void FastNewContextStub::Generate(MacroAssembler* masm) {
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
__ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
rax, rbx, rcx, &gc, TAG_OBJECT);
// Get the function from the stack.
__ movq(rcx, Operand(rsp, 1 * kPointerSize));
// Setup the object header.
__ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
__ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
__ movl(FieldOperand(rax, Array::kLengthOffset), Immediate(length));
// Setup the fixed slots.
__ xor_(rbx, rbx); // Set to NULL.
__ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
__ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
__ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
__ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
// Copy the global object from the surrounding context.
__ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
// Initialize the rest of the slots to undefined.
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
__ movq(Operand(rax, Context::SlotOffset(i)), rbx);
}
// Return and remove the on-stack parameter.
__ movq(rsi, rax);
__ ret(1 * kPointerSize);
// Need to collect. Call into runtime system.
__ bind(&gc);
__ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1);
}
void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string;
__ movq(rax, Operand(rsp, 1 * kPointerSize));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment