Commit 7d23b48a authored by whesse@chromium.org's avatar whesse@chromium.org

Inline part of RawSyncElementAt, split the rest into two

functions.  Improve PrepareForCall, SyncRange, and SyncElementAt.

Review URL: http://codereview.chromium.org/49029

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@1618 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent dedd8118
......@@ -58,98 +58,13 @@ VirtualFrame::VirtualFrame(CodeGenerator* cgen)
}
// Clear the dirty bit for the element at a given index if it is a
// valid element. The stack address corresponding to the element must
// be allocated on the physical stack, or the first element above the
// stack pointer so it can be allocated by a single push instruction.
void VirtualFrame::RawSyncElementAt(int index) {
FrameElement element = elements_[index];
if (!element.is_valid() || element.is_synced()) return;
if (index <= stack_pointer_) {
// Emit code to write elements below the stack pointer to their
// (already allocated) stack address.
switch (element.type()) {
case FrameElement::INVALID: // Fall through.
case FrameElement::MEMORY:
// There was an early bailout for invalid and synced elements
// (memory elements are always synced).
UNREACHABLE();
break;
case FrameElement::REGISTER:
__ str(element.reg(), MemOperand(fp, fp_relative(index)));
break;
case FrameElement::CONSTANT: {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
__ mov(temp.reg(), Operand(element.handle()));
__ str(temp.reg(), MemOperand(fp, fp_relative(index)));
break;
}
case FrameElement::COPY: {
int backing_index = element.index();
FrameElement backing_element = elements_[backing_index];
if (backing_element.is_memory()) {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
__ ldr(temp.reg(), MemOperand(fp, fp_relative(backing_index)));
__ str(temp.reg(), MemOperand(fp, fp_relative(index)));
} else {
ASSERT(backing_element.is_register());
__ str(backing_element.reg(), MemOperand(fp, fp_relative(index)));
}
break;
}
}
void VirtualFrame::SyncElementBelowStackPointer(int index) {
UNREACHABLE();
}
} else {
// Push elements above the stack pointer to allocate space and
// sync them. Space should have already been allocated in the
// actual frame for all the elements below this one.
ASSERT(index == stack_pointer_ + 1);
stack_pointer_++;
switch (element.type()) {
case FrameElement::INVALID: // Fall through.
case FrameElement::MEMORY:
// There was an early bailout for invalid and synced elements
// (memory elements are always synced).
UNREACHABLE();
break;
case FrameElement::REGISTER:
__ push(element.reg());
break;
case FrameElement::CONSTANT: {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
__ mov(temp.reg(), Operand(element.handle()));
__ push(temp.reg());
break;
}
case FrameElement::COPY: {
int backing_index = element.index();
FrameElement backing = elements_[backing_index];
ASSERT(backing.is_memory() || backing.is_register());
if (backing.is_memory()) {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
__ ldr(temp.reg(), MemOperand(fp, fp_relative(backing_index)));
__ push(temp.reg());
} else {
__ push(backing.reg());
}
break;
}
}
}
elements_[index].set_sync();
void VirtualFrame::SyncElementByPushing(int index) {
UNREACHABLE();
}
......
......@@ -407,9 +407,11 @@ class VirtualFrame : public Malloced {
// Sync the range of elements in [begin, end).
void SyncRange(int begin, int end);
// Sync a single element, assuming that its index is less than
// or equal to stack pointer + 1.
void RawSyncElementAt(int index);
// Sync a single unsynced element that lies beneath or at the stack pointer.
void SyncElementBelowStackPointer(int index);
// Sync a single unsynced element that lies just above the stack pointer.
void SyncElementByPushing(int index);
// Push a copy of a frame slot (typically a local or parameter) on top of
// the frame.
......
......@@ -57,101 +57,101 @@ VirtualFrame::VirtualFrame(CodeGenerator* cgen)
}
// Clear the dirty bit for the element at a given index if it is a
// valid element. The stack address corresponding to the element must
// be allocated on the physical stack, or the first element above the
// stack pointer so it can be allocated by a single push instruction.
void VirtualFrame::RawSyncElementAt(int index) {
void VirtualFrame::SyncElementBelowStackPointer(int index) {
// Emit code to write elements below the stack pointer to their
// (already allocated) stack address.
ASSERT(index <= stack_pointer_);
FrameElement element = elements_[index];
ASSERT(!element.is_synced());
switch (element.type()) {
case FrameElement::INVALID:
break;
if (!element.is_valid() || element.is_synced()) return;
if (index <= stack_pointer_) {
// Emit code to write elements below the stack pointer to their
// (already allocated) stack address.
switch (element.type()) {
case FrameElement::INVALID: // Fall through.
case FrameElement::MEMORY:
// There was an early bailout for invalid and synced elements
// (memory elements are always synced).
UNREACHABLE();
break;
case FrameElement::MEMORY:
// This function should not be called with synced elements.
// (memory elements are always synced).
UNREACHABLE();
break;
case FrameElement::REGISTER:
__ mov(Operand(ebp, fp_relative(index)), element.reg());
break;
case FrameElement::REGISTER:
__ mov(Operand(ebp, fp_relative(index)), element.reg());
break;
case FrameElement::CONSTANT:
if (cgen_->IsUnsafeSmi(element.handle())) {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
cgen_->LoadUnsafeSmi(temp.reg(), element.handle());
__ mov(Operand(ebp, fp_relative(index)), temp.reg());
} else {
__ Set(Operand(ebp, fp_relative(index)),
Immediate(element.handle()));
}
break;
case FrameElement::CONSTANT:
if (cgen_->IsUnsafeSmi(element.handle())) {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
cgen_->LoadUnsafeSmi(temp.reg(), element.handle());
__ mov(Operand(ebp, fp_relative(index)), temp.reg());
} else {
__ Set(Operand(ebp, fp_relative(index)),
Immediate(element.handle()));
}
break;
case FrameElement::COPY: {
int backing_index = element.index();
FrameElement backing_element = elements_[backing_index];
if (backing_element.is_memory()) {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
__ mov(temp.reg(), Operand(ebp, fp_relative(backing_index)));
__ mov(Operand(ebp, fp_relative(index)), temp.reg());
} else {
ASSERT(backing_element.is_register());
__ mov(Operand(ebp, fp_relative(index)), backing_element.reg());
}
break;
case FrameElement::COPY: {
int backing_index = element.index();
FrameElement backing_element = elements_[backing_index];
if (backing_element.is_memory()) {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
__ mov(temp.reg(), Operand(ebp, fp_relative(backing_index)));
__ mov(Operand(ebp, fp_relative(index)), temp.reg());
} else {
ASSERT(backing_element.is_register());
__ mov(Operand(ebp, fp_relative(index)), backing_element.reg());
}
break;
}
}
elements_[index].set_sync();
}
} else {
// Push elements above the stack pointer to allocate space and
// sync them. Space should have already been allocated in the
// actual frame for all the elements below this one.
ASSERT(index == stack_pointer_ + 1);
stack_pointer_++;
switch (element.type()) {
case FrameElement::INVALID: // Fall through.
case FrameElement::MEMORY:
// There was an early bailout for invalid and synced elements
// (memory elements are always synced).
UNREACHABLE();
break;
case FrameElement::REGISTER:
__ push(element.reg());
break;
void VirtualFrame::SyncElementByPushing(int index) {
// Sync an element of the frame that is just above the stack pointer
// by pushing it.
ASSERT(index == stack_pointer_ + 1);
stack_pointer_++;
FrameElement element = elements_[index];
case FrameElement::CONSTANT:
if (cgen_->IsUnsafeSmi(element.handle())) {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
cgen_->LoadUnsafeSmi(temp.reg(), element.handle());
__ push(temp.reg());
} else {
__ push(Immediate(element.handle()));
}
break;
switch (element.type()) {
case FrameElement::INVALID:
__ push(Immediate(Smi::FromInt(0)));
break;
case FrameElement::COPY: {
int backing_index = element.index();
FrameElement backing = elements_[backing_index];
ASSERT(backing.is_memory() || backing.is_register());
if (backing.is_memory()) {
__ push(Operand(ebp, fp_relative(backing_index)));
} else {
__ push(backing.reg());
}
break;
case FrameElement::MEMORY:
// No memory elements exist above the stack pointer.
UNREACHABLE();
break;
case FrameElement::REGISTER:
__ push(element.reg());
break;
case FrameElement::CONSTANT:
if (cgen_->IsUnsafeSmi(element.handle())) {
Result temp = cgen_->allocator()->Allocate();
ASSERT(temp.is_valid());
cgen_->LoadUnsafeSmi(temp.reg(), element.handle());
__ push(temp.reg());
} else {
__ push(Immediate(element.handle()));
}
break;
case FrameElement::COPY: {
int backing_index = element.index();
FrameElement backing = elements_[backing_index];
ASSERT(backing.is_memory() || backing.is_register());
if (backing.is_memory()) {
__ push(Operand(ebp, fp_relative(backing_index)));
} else {
__ push(backing.reg());
}
break;
}
}
elements_[index].set_sync();
}
......
......@@ -414,9 +414,11 @@ class VirtualFrame : public Malloced {
// Sync the range of elements in [begin, end).
void SyncRange(int begin, int end);
// Sync a single element, assuming that its index is less than
// or equal to stack pointer + 1.
void RawSyncElementAt(int index);
// Sync a single unsynced element that lies beneath or at the stack pointer.
void SyncElementBelowStackPointer(int index);
// Sync a single unsynced element that lies just above the stack pointer.
void SyncElementByPushing(int index);
// Push a copy of a frame slot (typically a local or parameter) on top of
// the frame.
......
......@@ -212,22 +212,41 @@ void VirtualFrame::SpillElementAt(int index) {
}
// Clear the dirty bits for the range of elements in [begin, end).
// Clear the dirty bits for the range of elements in
// [min(stack_pointer_ + 1,begin), end).
void VirtualFrame::SyncRange(int begin, int end) {
ASSERT(begin >= 0);
ASSERT(end <= elements_.length());
for (int i = begin; i < end; i++) {
RawSyncElementAt(i);
if (begin > stack_pointer_) {
// Elements between stack_pointer_ + 1 and begin must also be synced.
for (int i = stack_pointer_ + 1; i < end; i++) {
SyncElementByPushing(i);
}
} else if (end <= stack_pointer_ + 1) {
for (int i = begin; i < end; i++) {
if (!elements_[i].is_synced()) {
SyncElementBelowStackPointer(i);
}
}
} else {
// Split into two ranges that each satisfy a condition above.
SyncRange(begin, stack_pointer_ + 1);
SyncRange(stack_pointer_ + 1, end);
}
}
// Clear the dirty bit for the element at a given index.
void VirtualFrame::SyncElementAt(int index) {
if (index > stack_pointer_ + 1) {
SyncRange(stack_pointer_ + 1, index);
if (index <= stack_pointer_) {
if (!elements_[index].is_synced()) {
SyncElementBelowStackPointer(index);
}
} else {
for (int i = stack_pointer_ + 1; i <= index; i++) {
SyncElementByPushing(i);
}
}
RawSyncElementAt(index);
}
......@@ -286,24 +305,18 @@ void VirtualFrame::PrepareForCall(int spilled_args, int dropped_args) {
ASSERT(height() >= spilled_args);
ASSERT(dropped_args <= spilled_args);
int arg_base_index = elements_.length() - spilled_args;
// Spill the arguments. We spill from the top down so that the
// backing stores of register copies will be spilled only after all
// the copies are spilled---it is better to spill via a
// register-to-memory move than a memory-to-memory move.
for (int i = elements_.length() - 1; i >= arg_base_index; i--) {
SpillElementAt(i);
SyncRange(0, elements_.length());
// Spill registers.
for (int i = 0; i < kNumRegisters; i++) {
if (is_used(i)) {
SpillElementAt(register_locations_[i]);
}
}
// Below the arguments, spill registers and sync everything else.
// Syncing is necessary for the locals and parameters to give the
// debugger a consistent view of the frame.
for (int i = arg_base_index - 1; i >= 0; i--) {
FrameElement element = elements_[i];
if (element.is_register()) {
// Spill the arguments.
for (int i = elements_.length() - spilled_args; i < elements_.length(); i++) {
if (!elements_[i].is_memory()) {
SpillElementAt(i);
} else if (element.is_valid()) {
SyncElementAt(i);
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment