More cleanup of slot handling in the nonoptimizing code generator.

Rename CreateSlotOperand so that it's clear it can emit code.  Use it
where possible.

Review URL: http://codereview.chromium.org/523052

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@3534 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 15fe7a8a
......@@ -281,32 +281,29 @@ void FastCodeGenerator::MoveTOS(Expression::Context context) {
}
template <>
MemOperand FastCodeGenerator::CreateSlotOperand<MemOperand>(
Slot* source,
Register scratch) {
switch (source->type()) {
MemOperand FastCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
return MemOperand(fp, SlotOffset(source));
return MemOperand(fp, SlotOffset(slot));
case Slot::CONTEXT: {
int context_chain_length =
function_->scope()->ContextChainLength(source->var()->scope());
function_->scope()->ContextChainLength(slot->var()->scope());
__ LoadContext(scratch, context_chain_length);
return CodeGenerator::ContextOperand(scratch, source->index());
return CodeGenerator::ContextOperand(scratch, slot->index());
}
case Slot::LOOKUP:
UNIMPLEMENTED();
UNREACHABLE();
}
UNREACHABLE();
return MemOperand(r0, 0);
}
void FastCodeGenerator::Move(Register dst, Slot* source) {
// Use dst as scratch.
MemOperand location = CreateSlotOperand<MemOperand>(source, dst);
__ ldr(dst, location);
void FastCodeGenerator::Move(Register destination, Slot* source) {
// Use destination as scratch.
MemOperand location = EmitSlotSearch(source, destination);
__ ldr(destination, location);
}
......@@ -351,23 +348,14 @@ void FastCodeGenerator::Move(Slot* dst,
Register src,
Register scratch1,
Register scratch2) {
switch (dst->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
__ str(src, MemOperand(fp, SlotOffset(dst)));
break;
case Slot::CONTEXT: {
int context_chain_length =
function_->scope()->ContextChainLength(dst->var()->scope());
__ LoadContext(scratch1, context_chain_length);
int index = Context::SlotOffset(dst->index());
__ mov(scratch2, Operand(index));
__ str(src, MemOperand(scratch1, index));
__ RecordWrite(scratch1, scratch2, src);
break;
}
case Slot::LOOKUP:
UNIMPLEMENTED();
ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
ASSERT(!scratch1.is(src) && !scratch2.is(src));
MemOperand location = EmitSlotSearch(dst, scratch1);
__ str(src, location);
// Emit the write barrier code if the location is in the heap.
if (dst->type() == Slot::CONTEXT) {
__ mov(scratch2, Operand(Context::SlotOffset(dst->index())));
__ RecordWrite(scratch1, scratch2, src);
}
}
......@@ -451,15 +439,18 @@ void FastCodeGenerator::VisitDeclaration(Declaration* decl) {
case Slot::LOCAL:
if (decl->mode() == Variable::CONST) {
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ str(ip, MemOperand(fp, SlotOffset(var->slot())));
__ str(ip, MemOperand(fp, SlotOffset(slot)));
} else if (decl->fun() != NULL) {
Visit(decl->fun());
__ pop(ip);
__ str(ip, MemOperand(fp, SlotOffset(var->slot())));
__ str(ip, MemOperand(fp, SlotOffset(slot)));
}
break;
case Slot::CONTEXT:
// We bypass the general EmitSlotSearch because we know more about
// this specific context.
// The variable in the decl always resides in the current context.
ASSERT_EQ(0, function_->scope()->ContextChainLength(var->scope()));
if (FLAG_debug_code) {
......@@ -912,35 +903,35 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
// Overwrite the global object on the stack with the result if needed.
DropAndMove(context, r0);
} else if (var->slot()) {
} else if (var->slot() != NULL) {
Slot* slot = var->slot();
ASSERT_NOT_NULL(slot); // Variables rewritten as properties not handled.
switch (slot->type()) {
case Slot::LOCAL:
case Slot::PARAMETER: {
MemOperand target = MemOperand(fp, SlotOffset(slot));
switch (context) {
case Expression::kUninitialized:
UNREACHABLE();
case Expression::kEffect:
// Perform assignment and discard value.
__ pop(r0);
__ str(r0, MemOperand(fp, SlotOffset(var->slot())));
__ str(r0, target);
break;
case Expression::kValue:
// Perform assignment and preserve value.
__ ldr(r0, MemOperand(sp));
__ str(r0, MemOperand(fp, SlotOffset(var->slot())));
__ str(r0, target);
break;
case Expression::kTest:
// Perform assignment and test (and discard) value.
__ pop(r0);
__ str(r0, MemOperand(fp, SlotOffset(var->slot())));
__ str(r0, target);
TestAndBranch(r0, true_label_, false_label_);
break;
case Expression::kValueTest: {
Label discard;
__ ldr(r0, MemOperand(sp));
__ str(r0, MemOperand(fp, SlotOffset(var->slot())));
__ str(r0, target);
TestAndBranch(r0, true_label_, &discard);
__ bind(&discard);
__ pop();
......@@ -950,7 +941,7 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
case Expression::kTestValue: {
Label discard;
__ ldr(r0, MemOperand(sp));
__ str(r0, MemOperand(fp, SlotOffset(var->slot())));
__ str(r0, target);
TestAndBranch(r0, &discard, false_label_);
__ bind(&discard);
__ pop();
......@@ -962,31 +953,15 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
}
case Slot::CONTEXT: {
int chain_length =
function_->scope()->ContextChainLength(slot->var()->scope());
if (chain_length > 0) {
// Move up the chain of contexts to the context containing the slot.
__ ldr(r0, CodeGenerator::ContextOperand(cp, Context::CLOSURE_INDEX));
// Load the function context (which is the incoming, outer context).
__ ldr(r0, FieldMemOperand(r0, JSFunction::kContextOffset));
for (int i = 1; i < chain_length; i++) {
__ ldr(r0,
CodeGenerator::ContextOperand(r0, Context::CLOSURE_INDEX));
__ ldr(r0, FieldMemOperand(r0, JSFunction::kContextOffset));
}
} else { // Slot is in the current context. Generate optimized code.
__ mov(r0, cp);
}
// The context may be an intermediate context, not a function context.
__ ldr(r0, CodeGenerator::ContextOperand(r0, Context::FCONTEXT_INDEX));
__ pop(r1);
__ str(r1, CodeGenerator::ContextOperand(r0, slot->index()));
MemOperand target = EmitSlotSearch(slot, r1);
__ pop(r0);
__ str(r0, target);
// RecordWrite may destroy all its register arguments.
if (context == Expression::kValue) {
__ push(r1);
__ push(r0);
} else if (context != Expression::kEffect) {
__ mov(r3, r1);
__ mov(r3, r0);
}
int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
......@@ -994,14 +969,13 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
// register. Skip the write barrier if the value written (r1) is a smi.
// The smi test is part of RecordWrite on other platforms, not on arm.
Label exit;
__ tst(r1, Operand(kSmiTagMask));
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &exit);
__ mov(r2, Operand(offset));
__ RecordWrite(r0, r2, r1);
__ RecordWrite(r1, r2, r0);
__ bind(&exit);
if (context != Expression::kEffect &&
context != Expression::kValue) {
if (context != Expression::kEffect && context != Expression::kValue) {
Move(context, r3);
}
break;
......@@ -1011,6 +985,10 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
UNREACHABLE();
break;
}
} else {
// Variables rewritten as properties are not treated as variables in
// assignments.
UNREACHABLE();
}
}
......
......@@ -219,9 +219,10 @@ class FastCodeGenerator: public AstVisitor {
void Move(Slot* dst, Register source, Register scratch1, Register scratch2);
void Move(Register dst, Slot* source);
// Templated to allow for Operand on intel and MemOperand on ARM.
template <typename MemoryLocation>
MemoryLocation CreateSlotOperand(Slot* slot, Register scratch);
// Return an operand used to read/write to a known (ie, non-LOOKUP) slot.
// May emit code to traverse the context chain, destroying the scratch
// register.
MemOperand EmitSlotSearch(Slot* slot, Register scratch);
// Drop the TOS, and store source to destination.
// If destination is TOS, just overwrite TOS with source.
......
......@@ -261,30 +261,28 @@ void FastCodeGenerator::MoveTOS(Expression::Context context) {
}
template <>
Operand FastCodeGenerator::CreateSlotOperand<Operand>(Slot* source,
Register scratch) {
switch (source->type()) {
MemOperand FastCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
return Operand(ebp, SlotOffset(source));
return Operand(ebp, SlotOffset(slot));
case Slot::CONTEXT: {
int context_chain_length =
function_->scope()->ContextChainLength(source->var()->scope());
function_->scope()->ContextChainLength(slot->var()->scope());
__ LoadContext(scratch, context_chain_length);
return CodeGenerator::ContextOperand(scratch, source->index());
return CodeGenerator::ContextOperand(scratch, slot->index());
}
case Slot::LOOKUP:
UNIMPLEMENTED();
UNREACHABLE();
}
UNREACHABLE();
return Operand(eax, 0);
}
void FastCodeGenerator::Move(Register dst, Slot* source) {
Operand location = CreateSlotOperand<Operand>(source, dst);
__ mov(dst, location);
void FastCodeGenerator::Move(Register destination, Slot* source) {
MemOperand location = EmitSlotSearch(source, destination);
__ mov(destination, location);
}
......@@ -297,7 +295,7 @@ void FastCodeGenerator::Move(Expression::Context context,
case Expression::kEffect:
break;
case Expression::kValue: {
Operand location = CreateSlotOperand<Operand>(source, scratch);
MemOperand location = EmitSlotSearch(source, scratch);
__ push(location);
break;
}
......@@ -334,25 +332,14 @@ void FastCodeGenerator::Move(Slot* dst,
Register src,
Register scratch1,
Register scratch2) {
switch (dst->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
__ mov(Operand(ebp, SlotOffset(dst)), src);
break;
case Slot::CONTEXT: {
ASSERT(!src.is(scratch1));
ASSERT(!src.is(scratch2));
ASSERT(!scratch1.is(scratch2));
int context_chain_length =
function_->scope()->ContextChainLength(dst->var()->scope());
__ LoadContext(scratch1, context_chain_length);
__ mov(Operand(scratch1, Context::SlotOffset(dst->index())), src);
int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
__ RecordWrite(scratch1, offset, src, scratch2);
break;
}
case Slot::LOOKUP:
UNIMPLEMENTED();
ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
ASSERT(!scratch1.is(src) && !scratch2.is(src));
MemOperand location = EmitSlotSearch(dst, scratch1);
__ mov(location, src);
// Emit the write barrier code if the location is in the heap.
if (dst->type() == Slot::CONTEXT) {
int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
__ RecordWrite(scratch1, offset, src, scratch2);
}
}
......@@ -448,15 +435,18 @@ void FastCodeGenerator::VisitDeclaration(Declaration* decl) {
case Slot::PARAMETER:
case Slot::LOCAL:
if (decl->mode() == Variable::CONST) {
__ mov(Operand(ebp, SlotOffset(var->slot())),
__ mov(Operand(ebp, SlotOffset(slot)),
Immediate(Factory::the_hole_value()));
} else if (decl->fun() != NULL) {
Visit(decl->fun());
__ pop(Operand(ebp, SlotOffset(var->slot())));
__ pop(Operand(ebp, SlotOffset(slot)));
}
break;
case Slot::CONTEXT:
// We bypass the general EmitSlotSearch because we know more about
// this specific context.
// The variable in the decl always resides in the current context.
ASSERT_EQ(0, function_->scope()->ContextChainLength(var->scope()));
if (FLAG_debug_code) {
......@@ -904,7 +894,7 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
switch (slot->type()) {
case Slot::LOCAL:
case Slot::PARAMETER: {
Operand target = Operand(ebp, SlotOffset(var->slot()));
Operand target = Operand(ebp, SlotOffset(slot));
switch (context) {
case Expression::kUninitialized:
UNREACHABLE();
......@@ -948,38 +938,18 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
}
case Slot::CONTEXT: {
int chain_length =
function_->scope()->ContextChainLength(slot->var()->scope());
if (chain_length > 0) {
// Move up the context chain to the context containing the slot.
__ mov(eax,
Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
// Load the function context (which is the incoming, outer context).
__ mov(eax, FieldOperand(eax, JSFunction::kContextOffset));
for (int i = 1; i < chain_length; i++) {
__ mov(eax,
Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)));
__ mov(eax, FieldOperand(eax, JSFunction::kContextOffset));
}
} else { // Slot is in the current context. Generate optimized code.
__ mov(eax, esi); // RecordWrite destroys the object register.
}
if (FLAG_debug_code) {
__ cmp(eax,
Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)));
__ Check(equal, "Context Slot chain length wrong.");
}
__ pop(ecx);
__ mov(Operand(eax, Context::SlotOffset(slot->index())), ecx);
MemOperand target = EmitSlotSearch(slot, ecx);
__ pop(eax);
__ mov(target, eax);
// RecordWrite may destroy all its register arguments.
if (context == Expression::kValue) {
__ push(ecx);
__ push(eax);
} else if (context != Expression::kEffect) {
__ mov(edx, ecx);
__ mov(edx, eax);
}
int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
__ RecordWrite(eax, offset, ecx, ebx);
__ RecordWrite(ecx, offset, eax, ebx);
if (context != Expression::kEffect &&
context != Expression::kValue) {
Move(context, edx);
......@@ -991,6 +961,10 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
UNREACHABLE();
break;
}
} else {
// Variables rewritten as properties are not treated as variables in
// assignments.
UNREACHABLE();
}
}
......
......@@ -33,10 +33,13 @@
namespace v8 {
namespace internal {
// Convenience for platform-independent signatures. We do not normally
// distinguish memory operands from other operands on ia32.
typedef Operand MemOperand;
// Forward declaration.
class JumpTarget;
// MacroAssembler implements a collection of frequently used macros.
class MacroAssembler: public Assembler {
public:
......
......@@ -270,30 +270,28 @@ void FastCodeGenerator::MoveTOS(Expression::Context context) {
}
template <>
Operand FastCodeGenerator::CreateSlotOperand<Operand>(Slot* source,
Register scratch) {
switch (source->type()) {
MemOperand FastCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
return Operand(rbp, SlotOffset(source));
return Operand(rbp, SlotOffset(slot));
case Slot::CONTEXT: {
int context_chain_length =
function_->scope()->ContextChainLength(source->var()->scope());
function_->scope()->ContextChainLength(slot->var()->scope());
__ LoadContext(scratch, context_chain_length);
return CodeGenerator::ContextOperand(scratch, source->index());
return CodeGenerator::ContextOperand(scratch, slot->index());
}
case Slot::LOOKUP:
UNIMPLEMENTED();
UNREACHABLE();
}
UNREACHABLE();
return Operand(rax, 0);
}
void FastCodeGenerator::Move(Register dst, Slot* source) {
Operand location = CreateSlotOperand<Operand>(source, dst);
__ movq(dst, location);
void FastCodeGenerator::Move(Register destination, Slot* source) {
MemOperand location = EmitSlotSearch(source, destination);
__ movq(destination, location);
}
......@@ -306,7 +304,7 @@ void FastCodeGenerator::Move(Expression::Context context,
case Expression::kEffect:
break;
case Expression::kValue: {
Operand location = CreateSlotOperand<Operand>(source, scratch);
MemOperand location = EmitSlotSearch(source, scratch);
__ push(location);
break;
}
......@@ -343,25 +341,14 @@ void FastCodeGenerator::Move(Slot* dst,
Register src,
Register scratch1,
Register scratch2) {
switch (dst->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
__ movq(Operand(rbp, SlotOffset(dst)), src);
break;
case Slot::CONTEXT: {
ASSERT(!src.is(scratch1));
ASSERT(!src.is(scratch2));
ASSERT(!scratch1.is(scratch2));
int context_chain_length =
function_->scope()->ContextChainLength(dst->var()->scope());
__ LoadContext(scratch1, context_chain_length);
__ movq(Operand(scratch1, Context::SlotOffset(dst->index())), src);
int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
__ RecordWrite(scratch1, offset, src, scratch2);
break;
}
case Slot::LOOKUP:
UNIMPLEMENTED();
ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
ASSERT(!scratch1.is(src) && !scratch2.is(src));
MemOperand location = EmitSlotSearch(dst, scratch1);
__ movq(location, src);
// Emit the write barrier code if the location is in the heap.
if (dst->type() == Slot::CONTEXT) {
int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
__ RecordWrite(scratch1, offset, src, scratch2);
}
}
......@@ -457,14 +444,17 @@ void FastCodeGenerator::VisitDeclaration(Declaration* decl) {
case Slot::LOCAL:
if (decl->mode() == Variable::CONST) {
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ movq(Operand(rbp, SlotOffset(var->slot())), kScratchRegister);
__ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
} else if (decl->fun() != NULL) {
Visit(decl->fun());
__ pop(Operand(rbp, SlotOffset(var->slot())));
__ pop(Operand(rbp, SlotOffset(slot)));
}
break;
case Slot::CONTEXT:
// We bypass the general EmitSlotSearch because we know more about
// this specific context.
// The variable in the decl always resides in the current context.
ASSERT_EQ(0, function_->scope()->ContextChainLength(var->scope()));
if (FLAG_debug_code) {
......@@ -908,34 +898,34 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
// Overwrite the global object on the stack with the result if needed.
DropAndMove(context, rax);
} else if (var->slot()) {
} else if (var->slot() != NULL) {
Slot* slot = var->slot();
ASSERT_NOT_NULL(slot); // Variables rewritten as properties not handled.
switch (slot->type()) {
case Slot::LOCAL:
case Slot::PARAMETER: {
Operand target = Operand(rbp, SlotOffset(slot));
switch (context) {
case Expression::kUninitialized:
UNREACHABLE();
case Expression::kEffect:
// Perform assignment and discard value.
__ pop(Operand(rbp, SlotOffset(var->slot())));
__ pop(target);
break;
case Expression::kValue:
// Perform assignment and preserve value.
__ movq(rax, Operand(rsp, 0));
__ movq(Operand(rbp, SlotOffset(var->slot())), rax);
__ movq(target, rax);
break;
case Expression::kTest:
// Perform assignment and test (and discard) value.
__ pop(rax);
__ movq(Operand(rbp, SlotOffset(var->slot())), rax);
__ movq(target, rax);
TestAndBranch(rax, true_label_, false_label_);
break;
case Expression::kValueTest: {
Label discard;
__ movq(rax, Operand(rsp, 0));
__ movq(Operand(rbp, SlotOffset(var->slot())), rax);
__ movq(target, rax);
TestAndBranch(rax, true_label_, &discard);
__ bind(&discard);
__ addq(rsp, Immediate(kPointerSize));
......@@ -945,7 +935,7 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
case Expression::kTestValue: {
Label discard;
__ movq(rax, Operand(rsp, 0));
__ movq(Operand(rbp, SlotOffset(var->slot())), rax);
__ movq(target, rax);
TestAndBranch(rax, &discard, false_label_);
__ bind(&discard);
__ addq(rsp, Immediate(kPointerSize));
......@@ -957,38 +947,18 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
}
case Slot::CONTEXT: {
int chain_length =
function_->scope()->ContextChainLength(slot->var()->scope());
if (chain_length > 0) {
// Move up the context chain to the context containing the slot.
__ movq(rax,
Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
// Load the function context (which is the incoming, outer context).
__ movq(rax, FieldOperand(rax, JSFunction::kContextOffset));
for (int i = 1; i < chain_length; i++) {
__ movq(rax,
Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)));
__ movq(rax, FieldOperand(rax, JSFunction::kContextOffset));
}
} else { // Slot is in the current context. Generate optimized code.
__ movq(rax, rsi); // RecordWrite destroys the object register.
}
if (FLAG_debug_code) {
__ cmpq(rax,
Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)));
__ Check(equal, "Context Slot chain length wrong.");
}
__ pop(rcx);
__ movq(Operand(rax, Context::SlotOffset(slot->index())), rcx);
MemOperand target = EmitSlotSearch(slot, rcx);
__ pop(rax);
__ movq(target, rax);
// RecordWrite may destroy all its register arguments.
if (context == Expression::kValue) {
__ push(rcx);
__ push(rax);
} else if (context != Expression::kEffect) {
__ movq(rdx, rcx);
__ movq(rdx, rax);
}
int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
__ RecordWrite(rax, offset, rcx, rbx);
__ RecordWrite(rcx, offset, rax, rbx);
if (context != Expression::kEffect &&
context != Expression::kValue) {
Move(context, rdx);
......@@ -1000,6 +970,10 @@ void FastCodeGenerator::EmitVariableAssignment(Variable* var,
UNREACHABLE();
break;
}
} else {
// Variables rewritten as properties are not treated as variables in
// assignments.
UNREACHABLE();
}
}
......
......@@ -38,6 +38,9 @@ namespace internal {
// function calling convention.
static const Register kScratchRegister = r10;
// Convenience for platform-independent signatures.
typedef Operand MemOperand;
// Forward declaration.
class JumpTarget;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment