Commit 29914b6b authored by whesse@chromium.org's avatar whesse@chromium.org

Remove --check-stack flag from V8.

Review URL: http://codereview.chromium.org/338017

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@3149 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 299a491d
......@@ -1122,22 +1122,20 @@ void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
void CodeGenerator::CheckStack() {
VirtualFrame::SpilledScope spilled_scope;
if (FLAG_check_stack) {
Comment cmnt(masm_, "[ check stack");
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
// Put the lr setup instruction in the delay slot. kInstrSize is added to
// the implicit 8 byte offset that always applies to operations with pc and
// gives a return address 12 bytes down.
masm_->add(lr, pc, Operand(Assembler::kInstrSize));
masm_->cmp(sp, Operand(ip));
StackCheckStub stub;
// Call the stub if lower.
masm_->mov(pc,
Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
RelocInfo::CODE_TARGET),
LeaveCC,
lo);
}
Comment cmnt(masm_, "[ check stack");
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
// Put the lr setup instruction in the delay slot. kInstrSize is added to
// the implicit 8 byte offset that always applies to operations with pc and
// gives a return address 12 bytes down.
masm_->add(lr, pc, Operand(Assembler::kInstrSize));
masm_->cmp(sp, Operand(ip));
StackCheckStub stub;
// Call the stub if lower.
masm_->mov(pc,
Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
RelocInfo::CODE_TARGET),
LeaveCC,
lo);
}
......
......@@ -63,28 +63,25 @@ void FastCodeGenerator::Generate(FunctionLiteral* fun) {
if (locals_count > 0) {
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
}
if (FLAG_check_stack) {
__ LoadRoot(r2, Heap::kStackLimitRootIndex);
}
__ LoadRoot(r2, Heap::kStackLimitRootIndex);
for (int i = 0; i < locals_count; i++) {
__ push(ip);
}
}
if (FLAG_check_stack) {
// Put the lr setup instruction in the delay slot. The kInstrSize is
// added to the implicit 8 byte offset that always applies to operations
// with pc and gives a return address 12 bytes down.
Comment cmnt(masm_, "[ Stack check");
__ add(lr, pc, Operand(Assembler::kInstrSize));
__ cmp(sp, Operand(r2));
StackCheckStub stub;
__ mov(pc,
Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
RelocInfo::CODE_TARGET),
LeaveCC,
lo);
}
// Check the stack for overflow or break request.
// Put the lr setup instruction in the delay slot. The kInstrSize is
// added to the implicit 8 byte offset that always applies to operations
// with pc and gives a return address 12 bytes down.
Comment cmnt(masm_, "[ Stack check");
__ add(lr, pc, Operand(Assembler::kInstrSize));
__ cmp(sp, Operand(r2));
StackCheckStub stub;
__ mov(pc,
Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
RelocInfo::CODE_TARGET),
LeaveCC,
lo);
{ Comment cmnt(masm_, "[ Declarations");
VisitDeclarations(fun->scope()->declarations());
......
......@@ -1100,14 +1100,12 @@ void RegExpMacroAssemblerARM::CheckPreemption() {
void RegExpMacroAssemblerARM::CheckStackLimit() {
if (FLAG_check_stack) {
ExternalReference stack_limit =
ExternalReference::address_of_regexp_stack_limit();
__ mov(r0, Operand(stack_limit));
__ ldr(r0, MemOperand(r0));
__ cmp(backtrack_stackpointer(), Operand(r0));
SafeCall(&stack_overflow_label_, ls);
}
ExternalReference stack_limit =
ExternalReference::address_of_regexp_stack_limit();
__ mov(r0, Operand(stack_limit));
__ ldr(r0, MemOperand(r0));
__ cmp(backtrack_stackpointer(), Operand(r0));
SafeCall(&stack_overflow_label_, ls);
}
......
......@@ -146,29 +146,27 @@ void VirtualFrame::AllocateStackSlots() {
// Initialize stack slots with 'undefined' value.
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
}
if (FLAG_check_stack) {
__ LoadRoot(r2, Heap::kStackLimitRootIndex);
}
__ LoadRoot(r2, Heap::kStackLimitRootIndex);
for (int i = 0; i < count; i++) {
__ push(ip);
}
if (FLAG_check_stack) {
// Put the lr setup instruction in the delay slot. The kInstrSize is added
// to the implicit 8 byte offset that always applies to operations with pc
// and gives a return address 12 bytes down.
masm()->add(lr, pc, Operand(Assembler::kInstrSize));
masm()->cmp(sp, Operand(r2));
StackCheckStub stub;
// Call the stub if lower.
masm()->mov(pc,
Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
RelocInfo::CODE_TARGET),
LeaveCC,
lo);
}
// Check the stack for overflow or a break request.
// Put the lr setup instruction in the delay slot. The kInstrSize is added
// to the implicit 8 byte offset that always applies to operations with pc
// and gives a return address 12 bytes down.
masm()->add(lr, pc, Operand(Assembler::kInstrSize));
masm()->cmp(sp, Operand(r2));
StackCheckStub stub;
// Call the stub if lower.
masm()->mov(pc,
Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
RelocInfo::CODE_TARGET),
LeaveCC,
lo);
}
void VirtualFrame::SaveContextRegister() {
UNIMPLEMENTED();
}
......
......@@ -132,8 +132,6 @@ DEFINE_bool(stack_trace_on_abort, true,
// codegen-ia32.cc / codegen-arm.cc
DEFINE_bool(trace, false, "trace function calls")
DEFINE_bool(defer_negation, true, "defer negation operation")
DEFINE_bool(check_stack, true,
"check stack for overflow, interrupt, breakpoint")
// codegen.cc
DEFINE_bool(lazy, true, "use lazy compilation")
......
......@@ -520,48 +520,48 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ push(Operand(ebp, 2 * kPointerSize)); // push arguments
__ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
if (FLAG_check_stack) {
// We need to catch preemptions right here, otherwise an unlucky preemption
// could show up as a failed apply.
ExternalReference stack_guard_limit =
ExternalReference::address_of_stack_guard_limit();
Label retry_preemption;
Label no_preemption;
__ bind(&retry_preemption);
__ mov(edi, Operand::StaticVariable(stack_guard_limit));
__ cmp(esp, Operand(edi));
__ j(above, &no_preemption, taken);
// Preemption!
// Because builtins always remove the receiver from the stack, we
// have to fake one to avoid underflowing the stack.
__ push(eax);
__ push(Immediate(Smi::FromInt(0)));
// Check the stack for overflow or a break request.
// We need to catch preemptions right here, otherwise an unlucky preemption
// could show up as a failed apply.
ExternalReference stack_guard_limit =
ExternalReference::address_of_stack_guard_limit();
Label retry_preemption;
Label no_preemption;
__ bind(&retry_preemption);
__ mov(edi, Operand::StaticVariable(stack_guard_limit));
__ cmp(esp, Operand(edi));
__ j(above, &no_preemption, taken);
// Preemption!
// Because builtins always remove the receiver from the stack, we
// have to fake one to avoid underflowing the stack.
__ push(eax);
__ push(Immediate(Smi::FromInt(0)));
// Do call to runtime routine.
__ CallRuntime(Runtime::kStackGuard, 1);
__ pop(eax);
__ jmp(&retry_preemption);
__ bind(&no_preemption);
Label okay;
// Make ecx the space we have left.
__ mov(ecx, Operand(esp));
__ sub(ecx, Operand(edi));
// Make edx the space we need for the array when it is unrolled onto the
// stack.
__ mov(edx, Operand(eax));
__ shl(edx, kPointerSizeLog2 - kSmiTagSize);
__ cmp(ecx, Operand(edx));
__ j(greater, &okay, taken);
// Too bad: Out of stack space.
__ push(Operand(ebp, 4 * kPointerSize)); // push this
__ push(eax);
__ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
__ bind(&okay);
}
// Do call to runtime routine.
__ CallRuntime(Runtime::kStackGuard, 1);
__ pop(eax);
__ jmp(&retry_preemption);
__ bind(&no_preemption);
Label okay;
// Make ecx the space we have left.
__ mov(ecx, Operand(esp));
__ sub(ecx, Operand(edi));
// Make edx the space we need for the array when it is unrolled onto the
// stack.
__ mov(edx, Operand(eax));
__ shl(edx, kPointerSizeLog2 - kSmiTagSize);
__ cmp(ecx, Operand(edx));
__ j(greater, &okay, taken);
// Too bad: Out of stack space.
__ push(Operand(ebp, 4 * kPointerSize)); // push this
__ push(eax);
__ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
__ bind(&okay);
// End of stack check.
// Push current index and limit.
const int kLimitOffset =
......
......@@ -2203,14 +2203,12 @@ void DeferredStackCheck::Generate() {
void CodeGenerator::CheckStack() {
if (FLAG_check_stack) {
DeferredStackCheck* deferred = new DeferredStackCheck;
ExternalReference stack_guard_limit =
ExternalReference::address_of_stack_guard_limit();
__ cmp(esp, Operand::StaticVariable(stack_guard_limit));
deferred->Branch(below);
deferred->BindExit();
}
DeferredStackCheck* deferred = new DeferredStackCheck;
ExternalReference stack_guard_limit =
ExternalReference::address_of_stack_guard_limit();
__ cmp(esp, Operand::StaticVariable(stack_guard_limit));
deferred->Branch(below);
deferred->BindExit();
}
......
......@@ -1093,17 +1093,15 @@ void RegExpMacroAssemblerIA32::CheckPreemption() {
void RegExpMacroAssemblerIA32::CheckStackLimit() {
if (FLAG_check_stack) {
Label no_stack_overflow;
ExternalReference stack_limit =
ExternalReference::address_of_regexp_stack_limit();
__ cmp(backtrack_stackpointer(), Operand::StaticVariable(stack_limit));
__ j(above, &no_stack_overflow);
Label no_stack_overflow;
ExternalReference stack_limit =
ExternalReference::address_of_regexp_stack_limit();
__ cmp(backtrack_stackpointer(), Operand::StaticVariable(stack_limit));
__ j(above, &no_stack_overflow);
SafeCall(&stack_overflow_label_);
SafeCall(&stack_overflow_label_);
__ bind(&no_stack_overflow);
}
__ bind(&no_stack_overflow);
}
......
......@@ -318,47 +318,47 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ push(Operand(rbp, kArgumentsOffset));
__ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
if (FLAG_check_stack) {
// We need to catch preemptions right here, otherwise an unlucky preemption
// could show up as a failed apply.
Label retry_preemption;
Label no_preemption;
__ bind(&retry_preemption);
ExternalReference stack_guard_limit =
ExternalReference::address_of_stack_guard_limit();
__ movq(kScratchRegister, stack_guard_limit);
__ movq(rcx, rsp);
__ subq(rcx, Operand(kScratchRegister, 0));
// rcx contains the difference between the stack limit and the stack top.
// We use it below to check that there is enough room for the arguments.
__ j(above, &no_preemption);
// Preemption!
// Because runtime functions always remove the receiver from the stack, we
// have to fake one to avoid underflowing the stack.
__ push(rax);
__ Push(Smi::FromInt(0));
// Check the stack for overflow or a break request.
// We need to catch preemptions right here, otherwise an unlucky preemption
// could show up as a failed apply.
Label retry_preemption;
Label no_preemption;
__ bind(&retry_preemption);
ExternalReference stack_guard_limit =
ExternalReference::address_of_stack_guard_limit();
__ movq(kScratchRegister, stack_guard_limit);
__ movq(rcx, rsp);
__ subq(rcx, Operand(kScratchRegister, 0));
// rcx contains the difference between the stack limit and the stack top.
// We use it below to check that there is enough room for the arguments.
__ j(above, &no_preemption);
// Preemption!
// Because runtime functions always remove the receiver from the stack, we
// have to fake one to avoid underflowing the stack.
__ push(rax);
__ Push(Smi::FromInt(0));
// Do call to runtime routine.
__ CallRuntime(Runtime::kStackGuard, 1);
__ pop(rax);
__ jmp(&retry_preemption);
// Do call to runtime routine.
__ CallRuntime(Runtime::kStackGuard, 1);
__ pop(rax);
__ jmp(&retry_preemption);
__ bind(&no_preemption);
__ bind(&no_preemption);
Label okay;
// Make rdx the space we need for the array when it is unrolled onto the
// stack.
__ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
__ cmpq(rcx, rdx);
__ j(greater, &okay);
Label okay;
// Make rdx the space we need for the array when it is unrolled onto the
// stack.
__ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
__ cmpq(rcx, rdx);
__ j(greater, &okay);
// Too bad: Out of stack space.
__ push(Operand(rbp, kFunctionOffset));
__ push(rax);
__ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
__ bind(&okay);
}
// Too bad: Out of stack space.
__ push(Operand(rbp, kFunctionOffset));
__ push(rax);
__ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
__ bind(&okay);
// End of stack check.
// Push current index and limit.
const int kLimitOffset =
......
......@@ -852,12 +852,10 @@ void DeferredStackCheck::Generate() {
void CodeGenerator::CheckStack() {
if (FLAG_check_stack) {
DeferredStackCheck* deferred = new DeferredStackCheck;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
deferred->Branch(below);
deferred->BindExit();
}
DeferredStackCheck* deferred = new DeferredStackCheck;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
deferred->Branch(below);
deferred->BindExit();
}
......
......@@ -1209,18 +1209,16 @@ void RegExpMacroAssemblerX64::CheckPreemption() {
void RegExpMacroAssemblerX64::CheckStackLimit() {
if (FLAG_check_stack) {
Label no_stack_overflow;
ExternalReference stack_limit =
ExternalReference::address_of_regexp_stack_limit();
__ load_rax(stack_limit);
__ cmpq(backtrack_stackpointer(), rax);
__ j(above, &no_stack_overflow);
Label no_stack_overflow;
ExternalReference stack_limit =
ExternalReference::address_of_regexp_stack_limit();
__ load_rax(stack_limit);
__ cmpq(backtrack_stackpointer(), rax);
__ j(above, &no_stack_overflow);
SafeCall(&stack_overflow_label_);
SafeCall(&stack_overflow_label_);
__ bind(&no_stack_overflow);
}
__ bind(&no_stack_overflow);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment