Change OSR stack check patching to use the stack check table.

Change OSR stack check patching to use the stack check table to iterate over the calls to stack guards platform independent. Introduce Deoptimizer::PatchStackCheckAt for each platform to perform the platform specific patch at a given pc.

BUG=none
TEST=none

Review URL: http://codereview.chromium.org/6392027

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6551 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent aa779b38
...@@ -112,9 +112,9 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) { ...@@ -112,9 +112,9 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
} }
void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code, void Deoptimizer::PatchStackCheckAt(Address pc_after,
Code* check_code, Code* check_code,
Code* replacement_code) { Code* replacement_code) {
UNIMPLEMENTED(); UNIMPLEMENTED();
} }
......
...@@ -810,6 +810,25 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator, ...@@ -810,6 +810,25 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
} }
void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
// Iterate over the stack check table and patch every stack check
// call to an unconditional call to the replacement code.
ASSERT(unoptimized_code->kind() == Code::FUNCTION);
Address stack_check_cursor = unoptimized_code->instruction_start() +
unoptimized_code->stack_check_table_start();
uint32_t table_length = Memory::uint32_at(stack_check_cursor);
stack_check_cursor += kIntSize;
for (uint32_t i = 0; i < table_length; ++i) {
uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize);
Address pc_after = unoptimized_code->instruction_start() + pc_offset;
PatchStackCheckAt(pc_after, check_code, replacement_code);
stack_check_cursor += 2 * kIntSize;
}
}
unsigned Deoptimizer::ComputeInputFrameSize() const { unsigned Deoptimizer::ComputeInputFrameSize() const {
unsigned fixed_size = ComputeFixedSize(function_); unsigned fixed_size = ComputeFixedSize(function_);
// The fp-to-sp delta already takes the context and the function // The fp-to-sp delta already takes the context and the function
......
...@@ -134,6 +134,12 @@ class Deoptimizer : public Malloced { ...@@ -134,6 +134,12 @@ class Deoptimizer : public Malloced {
Code* check_code, Code* check_code,
Code* replacement_code); Code* replacement_code);
// Patch stack guard check at instruction before pc_after in
// the unoptimized code to unconditionally call replacement_code.
static void PatchStackCheckAt(Address pc_after,
Code* check_code,
Code* replacement_code);
// Change all patched stack guard checks in the unoptimized code // Change all patched stack guard checks in the unoptimized code
// back to a normal stack guard check. // back to a normal stack guard check.
static void RevertStackCheckCode(Code* unoptimized_code, static void RevertStackCheckCode(Code* unoptimized_code,
......
...@@ -106,48 +106,35 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) { ...@@ -106,48 +106,35 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
} }
void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code, void Deoptimizer::PatchStackCheckAt(Address pc_after,
Code* check_code, Code* check_code,
Code* replacement_code) { Code* replacement_code) {
// Iterate the unoptimized code and patch every stack check except at Address call_target_address = pc_after - kPointerSize;
// the function entry. This code assumes the function entry stack ASSERT(check_code->entry() ==
// check appears first i.e., is not deferred or otherwise reordered. Assembler::target_address_at(call_target_address));
ASSERT(unoptimized_code->kind() == Code::FUNCTION); // The stack check code matches the pattern:
bool first = true; //
for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask); // cmp esp, <limit>
!it.done(); // jae ok
it.next()) { // call <stack guard>
RelocInfo* rinfo = it.rinfo(); // test eax, <loop nesting depth>
if (rinfo->target_address() == Code::cast(check_code)->entry()) { // ok: ...
if (first) { //
first = false; // We will patch away the branch so the code is:
} else { //
// The stack check code matches the pattern: // cmp esp, <limit> ;; Not changed
// // nop
// cmp esp, <limit> // nop
// jae ok // call <on-stack replacment>
// call <stack guard> // test eax, <loop nesting depth>
// test eax, <loop nesting depth> // ok:
// ok: ... ASSERT(*(call_target_address - 3) == 0x73 && // jae
// *(call_target_address - 2) == 0x07 && // offset
// We will patch away the branch so the code is: *(call_target_address - 1) == 0xe8); // call
// *(call_target_address - 3) = 0x90; // nop
// cmp esp, <limit> ;; Not changed *(call_target_address - 2) = 0x90; // nop
// nop Assembler::set_target_address_at(call_target_address,
// nop replacement_code->entry());
// call <on-stack replacment>
// test eax, <loop nesting depth>
// ok:
Address call_target_address = rinfo->pc();
ASSERT(*(call_target_address - 3) == 0x73 && // jae
*(call_target_address - 2) == 0x07 && // offset
*(call_target_address - 1) == 0xe8); // call
*(call_target_address - 3) = 0x90; // nop
*(call_target_address - 2) = 0x90; // nop
rinfo->set_target_address(replacement_code->entry());
}
}
}
} }
......
...@@ -107,9 +107,9 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) { ...@@ -107,9 +107,9 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
} }
void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code, void Deoptimizer::PatchStackCheckAt(Address pc_after,
Code* check_code, Code* check_code,
Code* replacement_code) { Code* replacement_code) {
UNIMPLEMENTED(); UNIMPLEMENTED();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment