Commit 4c8fbaff authored by ager@chromium.org's avatar ager@chromium.org

Move stack check patching to the architecture dependent deoptimizer

files. On ARM it is not enough to iterate the relocation information
because that will only give us access to the constant pool and not to
the place in the instruction stream where the target in the constant
pool is called.

Review URL: http://codereview.chromium.org/6343005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6444 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 9c2d52eb
......@@ -112,13 +112,16 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
}
void Deoptimizer::PatchStackCheckCode(RelocInfo* rinfo,
void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
UNIMPLEMENTED();
}
void Deoptimizer::RevertStackCheckCode(RelocInfo* rinfo, Code* check_code) {
void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
UNIMPLEMENTED();
}
......
......@@ -128,14 +128,17 @@ class Deoptimizer : public Malloced {
static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
// Given the relocation info of a call to the stack check stub, patch the
// code so as to go unconditionally to the on-stack replacement builtin
// instead.
static void PatchStackCheckCode(RelocInfo* rinfo, Code* replacement_code);
// Given the relocation info of a call to the on-stack replacement
// builtin, patch the code back to the original stack check code.
static void RevertStackCheckCode(RelocInfo* rinfo, Code* check_code);
// Patch all stack guard checks in the unoptimized code to
// unconditionally call replacement_code.
static void PatchStackCheckCode(Code* unoptimized_code,
Code* check_code,
Code* replacement_code);
// Change all patched stack guard checks in the unoptimized code
// back to a normal stack guard check.
static void RevertStackCheckCode(Code* unoptimized_code,
Code* check_code,
Code* replacement_code);
~Deoptimizer();
......
......@@ -106,44 +106,71 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
}
void Deoptimizer::PatchStackCheckCode(RelocInfo* rinfo,
void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
// The stack check code matches the pattern:
//
// cmp esp, <limit>
// jae ok
// call <stack guard>
// test eax, <loop nesting depth>
// ok: ...
//
// We will patch away the branch so the code is:
//
// cmp esp, <limit> ;; Not changed
// nop
// nop
// call <on-stack replacment>
// test eax, <loop nesting depth>
// ok:
Address call_target_address = rinfo->pc();
ASSERT(*(call_target_address - 3) == 0x73 && // jae
*(call_target_address - 2) == 0x07 && // offset
*(call_target_address - 1) == 0xe8); // call
*(call_target_address - 3) = 0x90; // nop
*(call_target_address - 2) = 0x90; // nop
rinfo->set_target_address(replacement_code->entry());
// Iterate the unoptimized code and patch every stack check except at
// the function entry. This code assumes the function entry stack
// check appears first i.e., is not deferred or otherwise reordered.
ASSERT(unoptimized_code->kind() == Code::FUNCTION);
bool first = true;
for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask);
!it.done();
it.next()) {
RelocInfo* rinfo = it.rinfo();
if (rinfo->target_address() == Code::cast(check_code)->entry()) {
if (first) {
first = false;
} else {
// The stack check code matches the pattern:
//
// cmp esp, <limit>
// jae ok
// call <stack guard>
// test eax, <loop nesting depth>
// ok: ...
//
// We will patch away the branch so the code is:
//
// cmp esp, <limit> ;; Not changed
// nop
// nop
// call <on-stack replacment>
// test eax, <loop nesting depth>
// ok:
Address call_target_address = rinfo->pc();
ASSERT(*(call_target_address - 3) == 0x73 && // jae
*(call_target_address - 2) == 0x07 && // offset
*(call_target_address - 1) == 0xe8); // call
*(call_target_address - 3) = 0x90; // nop
*(call_target_address - 2) = 0x90; // nop
rinfo->set_target_address(replacement_code->entry());
}
}
}
}
void Deoptimizer::RevertStackCheckCode(RelocInfo* rinfo, Code* check_code) {
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
// restore the conditional branch.
Address call_target_address = rinfo->pc();
ASSERT(*(call_target_address - 3) == 0x90 && // nop
*(call_target_address - 2) == 0x90 && // nop
*(call_target_address - 1) == 0xe8); // call
*(call_target_address - 3) = 0x73; // jae
*(call_target_address - 2) = 0x07; // offset
rinfo->set_target_address(check_code->entry());
void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
// Iterate the unoptimized code and revert all the patched stack checks.
for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask);
!it.done();
it.next()) {
RelocInfo* rinfo = it.rinfo();
if (rinfo->target_address() == replacement_code->entry()) {
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
// restore the conditional branch.
Address call_target_address = rinfo->pc();
ASSERT(*(call_target_address - 3) == 0x90 && // nop
*(call_target_address - 2) == 0x90 && // nop
*(call_target_address - 1) == 0xe8); // call
*(call_target_address - 3) = 0x73; // jae
*(call_target_address - 2) = 0x07; // offset
rinfo->set_target_address(check_code->entry());
}
}
}
......
......@@ -193,22 +193,9 @@ static void AttemptOnStackReplacement(JSFunction* function) {
if (maybe_check_code->ToObject(&check_code)) {
Code* replacement_code = Builtins::builtin(Builtins::OnStackReplacement);
Code* unoptimized_code = shared->code();
// Iterate the unoptimized code and patch every stack check except at
// the function entry. This code assumes the function entry stack
// check appears first i.e., is not deferred or otherwise reordered.
bool first = true;
for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask);
!it.done();
it.next()) {
RelocInfo* rinfo = it.rinfo();
if (rinfo->target_address() == Code::cast(check_code)->entry()) {
if (first) {
first = false;
} else {
Deoptimizer::PatchStackCheckCode(rinfo, replacement_code);
}
}
}
Deoptimizer::PatchStackCheckCode(unoptimized_code,
Code::cast(check_code),
replacement_code);
}
}
......
......@@ -6944,15 +6944,9 @@ static MaybeObject* Runtime_CompileForOnStackReplacement(Arguments args) {
Handle<Code> check_code = check_stub.GetCode();
Handle<Code> replacement_code(
Builtins::builtin(Builtins::OnStackReplacement));
// Iterate the unoptimized code and revert all the patched stack checks.
for (RelocIterator it(*unoptimized, RelocInfo::kCodeTargetMask);
!it.done();
it.next()) {
RelocInfo* rinfo = it.rinfo();
if (rinfo->target_address() == replacement_code->entry()) {
Deoptimizer::RevertStackCheckCode(rinfo, *check_code);
}
}
Deoptimizer::RevertStackCheckCode(*unoptimized,
*check_code,
*replacement_code);
// Allow OSR only at nesting level zero again.
unoptimized->set_allow_osr_at_loop_nesting_level(0);
......
......@@ -46,13 +46,16 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
}
void Deoptimizer::PatchStackCheckCode(RelocInfo* rinfo,
void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
UNIMPLEMENTED();
}
void Deoptimizer::RevertStackCheckCode(RelocInfo* rinfo, Code* check_code) {
void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
UNIMPLEMENTED();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment