Commit 6cfc2d36 authored by Toon Verwaest's avatar Toon Verwaest Committed by Commit Bot

[builtins] Remove smi-tagged argc support in CheckStackOverflow

Bug: v8:6921
Change-Id: I370b1c3f8fbf3f5478ac7779205083e05710ea15
Reviewed-on: https://chromium-review.googlesource.com/726081
Commit-Queue: Toon Verwaest <verwaest@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48695}
parent 9db8f821
......@@ -585,11 +585,8 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable);
}
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
// Clobbers r2; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
IsTagged argc_is_tagged) {
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
......@@ -599,12 +596,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
// here which will cause r2 to become negative.
__ sub(r2, sp, r2);
// Check if the arguments will overflow the stack.
if (argc_is_tagged == kArgcIsSmiTagged) {
__ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
} else {
DCHECK_EQ(argc_is_tagged, kArgcIsUntaggedInt);
__ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
}
__ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
__ b(gt, &okay); // Signed comparison.
// Out of stack space.
......@@ -641,7 +633,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Check if we have enough stack space to push all arguments.
// Clobbers r2.
Generate_CheckStackOverflow(masm, r3, kArgcIsUntaggedInt);
Generate_CheckStackOverflow(masm, r3);
// Remember new.target.
__ mov(r5, r0);
......
......@@ -585,11 +585,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ B(&stepping_prepared);
}
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
// Clobbers x10, x15; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
IsTagged argc_is_tagged) {
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
// Check the stack for overflow.
// We are not trying to catch interruptions (e.g. debug break and
// preemption) here, so the "real stack limit" is checked.
......@@ -600,12 +597,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
// TODO(jbramley): Check that the stack usage here is safe.
__ Sub(x10, jssp, x10);
// Check if the arguments will overflow the stack.
if (argc_is_tagged == kArgcIsSmiTagged) {
__ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
} else {
DCHECK_EQ(argc_is_tagged, kArgcIsUntaggedInt);
__ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
}
__ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
__ B(gt, &enough_stack_space);
__ CallRuntime(Runtime::kThrowStackOverflow);
// We should never return from the APPLY_OVERFLOW builtin.
......@@ -651,7 +643,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ Push(function, receiver);
// Check if we have enough stack space to push all arguments.
Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
Generate_CheckStackOverflow(masm, argc);
// Copy arguments to the stack in a loop, in reverse order.
// x3: argc.
......
......@@ -396,11 +396,8 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable);
}
enum IsTagged { kEaxIsSmiTagged, kEaxIsUntaggedInt };
// Clobbers ecx, edx, edi; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm,
IsTagged eax_is_tagged) {
static void Generate_CheckStackOverflow(MacroAssembler* masm) {
// eax : the number of items to be pushed to the stack
//
// Check the stack for overflow. We are not trying to catch
......@@ -417,8 +414,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm,
// Make edx the space we need for the array when it is unrolled onto the
// stack.
__ mov(edx, eax);
int smi_tag = eax_is_tagged == kEaxIsSmiTagged ? kSmiTagSize : 0;
__ shl(edx, kPointerSizeLog2 - smi_tag);
__ shl(edx, kPointerSizeLog2);
// Check if the arguments will overflow the stack.
__ cmp(ecx, edx);
__ j(greater, &okay); // Signed comparison.
......@@ -454,7 +450,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Check if we have enough stack space to push all arguments.
// Expects argument count in eax. Clobbers ecx, edx, edi.
Generate_CheckStackOverflow(masm, kEaxIsUntaggedInt);
Generate_CheckStackOverflow(masm);
// Copy arguments to the stack in a loop.
Label loop, entry;
......
......@@ -454,11 +454,8 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable);
}
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
// Clobbers a2; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
IsTagged argc_is_tagged) {
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
......@@ -468,12 +465,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
// here which will cause a2 to become negative.
__ Subu(a2, sp, a2);
// Check if the arguments will overflow the stack.
if (argc_is_tagged == kArgcIsSmiTagged) {
__ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
} else {
DCHECK_EQ(argc_is_tagged, kArgcIsUntaggedInt);
__ sll(t3, argc, kPointerSizeLog2);
}
__ sll(t3, argc, kPointerSizeLog2);
// Signed comparison.
__ Branch(&okay, gt, a2, Operand(t3));
......@@ -511,7 +503,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Check if we have enough stack space to push all arguments.
// Clobbers a2.
Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
Generate_CheckStackOverflow(masm, a3);
// Remember new.target.
__ mov(t1, a0);
......
......@@ -570,11 +570,8 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable);
}
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
// Clobbers a2; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
IsTagged argc_is_tagged) {
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
......@@ -584,12 +581,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
// here which will cause r2 to become negative.
__ dsubu(a2, sp, a2);
// Check if the arguments will overflow the stack.
if (argc_is_tagged == kArgcIsSmiTagged) {
__ SmiScale(a7, v0, kPointerSizeLog2);
} else {
DCHECK_EQ(argc_is_tagged, kArgcIsUntaggedInt);
__ dsll(a7, argc, kPointerSizeLog2);
}
__ dsll(a7, argc, kPointerSizeLog2);
__ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison.
// Out of stack space.
......@@ -626,7 +618,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Check if we have enough stack space to push all arguments.
// Clobbers a2.
Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
Generate_CheckStackOverflow(masm, a3);
// Remember new.target.
__ mov(a5, a0);
......
......@@ -586,11 +586,8 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable);
}
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
// Clobbers r5; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
IsTagged argc_is_tagged) {
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
......@@ -600,12 +597,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
// here which will cause r5 to become negative.
__ sub(r5, sp, r5);
// Check if the arguments will overflow the stack.
if (argc_is_tagged == kArgcIsSmiTagged) {
__ SmiToPtrArrayOffset(r0, argc);
} else {
DCHECK_EQ(argc_is_tagged, kArgcIsUntaggedInt);
__ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
}
__ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
__ cmp(r5, r0);
__ bgt(&okay); // Signed comparison.
......@@ -643,7 +635,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Check if we have enough stack space to push all arguments.
// Clobbers r5.
Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt);
Generate_CheckStackOverflow(masm, r6);
// Copy arguments to the stack in a loop.
// r4: function
......
......@@ -581,11 +581,8 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable);
}
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
// Clobbers r4; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
IsTagged argc_is_tagged) {
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
......@@ -595,12 +592,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
// here which will cause r4 to become negative.
__ SubP(r4, sp, r4);
// Check if the arguments will overflow the stack.
if (argc_is_tagged == kArgcIsSmiTagged) {
__ SmiToPtrArrayOffset(r0, argc);
} else {
DCHECK_EQ(argc_is_tagged, kArgcIsUntaggedInt);
__ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2));
}
__ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2));
__ CmpP(r4, r0);
__ bgt(&okay); // Signed comparison.
......@@ -639,7 +631,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Check if we have enough stack space to push all arguments.
// Clobbers r4.
Generate_CheckStackOverflow(masm, r5, kArgcIsUntaggedInt);
Generate_CheckStackOverflow(masm, r5);
// Copy arguments to the stack in a loop from argv to sp.
// The arguments are actually placed in reverse order on sp
......
......@@ -401,11 +401,8 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ CallRuntime(Runtime::kThrowConstructedNonConstructable);
}
enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
// Clobbers rcx, r11, kScratchRegister; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm,
IsTagged rax_is_tagged) {
static void Generate_CheckStackOverflow(MacroAssembler* masm) {
// rax : the number of items to be pushed to the stack
//
// Check the stack for overflow. We are not trying to catch
......@@ -419,13 +416,8 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm,
__ subp(rcx, kScratchRegister);
// Make r11 the space we need for the array when it is unrolled onto the
// stack.
if (rax_is_tagged == kRaxIsSmiTagged) {
__ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2);
} else {
DCHECK_EQ(rax_is_tagged, kRaxIsUntaggedInt);
__ movp(r11, rax);
__ shlq(r11, Immediate(kPointerSizeLog2));
}
__ movp(r11, rax);
__ shlq(r11, Immediate(kPointerSizeLog2));
// Check if the arguments will overflow the stack.
__ cmpp(rcx, r11);
__ j(greater, &okay); // Signed comparison.
......@@ -534,7 +526,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Check if we have enough stack space to push all arguments.
// Expects argument count in rax. Clobbers rcx, r11.
Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt);
Generate_CheckStackOverflow(masm);
// Copy arguments to the stack in a loop.
// Register rbx points to array of pointers to handle locations.
......
......@@ -960,18 +960,6 @@ void MacroAssembler::SmiToInteger64(Register dst, Register src) {
}
}
void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
if (SmiValuesAre32Bits()) {
movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
} else {
DCHECK(SmiValuesAre31Bits());
movp(dst, src);
SmiToInteger64(dst, dst);
}
}
void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
AssertSmi(smi1);
AssertSmi(smi2);
......@@ -1029,25 +1017,6 @@ void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
}
void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
Register src,
int power) {
DCHECK_GE(power, 0);
DCHECK_LT(power, 64);
if (power == 0) {
SmiToInteger64(dst, src);
return;
}
if (dst != src) {
movp(dst, src);
}
if (power < kSmiShift) {
sarp(dst, Immediate(kSmiShift - power));
} else if (power > kSmiShift) {
shlp(dst, Immediate(power - kSmiShift));
}
}
Condition TurboAssembler::CheckSmi(Register src) {
STATIC_ASSERT(kSmiTag == 0);
testb(src, Immediate(kSmiTagMask));
......
......@@ -643,13 +643,6 @@ class MacroAssembler : public TurboAssembler {
// Convert smi to 64-bit integer (sign extended if necessary).
void SmiToInteger64(Register dst, Register src);
void SmiToInteger64(Register dst, const Operand& src);
// Multiply a positive smi's integer value by a power of two.
// Provides result as 64-bit integer value.
void PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
Register src,
int power);
// Simple comparison of smis. Both sides must be known smis to use these,
// otherwise use Cmp.
......
......@@ -465,70 +465,6 @@ TEST(SmiIndex) {
CHECK_EQ(0, result);
}
void TestPositiveSmiPowerUp(MacroAssembler* masm, Label* exit, int id, int x) {
CHECK_GE(x, 0);
int powers[] = { 0, 1, 2, 3, 8, 16, 24, 31 };
int power_count = 8;
__ movl(rax, Immediate(id));
for (int i = 0; i < power_count; i++) {
int power = powers[i];
intptr_t result = static_cast<intptr_t>(x) << power;
__ Set(r8, result);
__ Move(rcx, Smi::FromInt(x));
__ movq(r11, rcx);
__ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rcx, power);
__ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
__ cmpq(r11, rcx); // rcx unchanged.
__ j(not_equal, exit);
__ incq(rax);
__ PositiveSmiTimesPowerOfTwoToInteger64(rcx, rcx, power);
__ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
}
}
TEST(PositiveSmiTimesPowerOfTwoToInteger64) {
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
Assembler::kMinimalBufferSize * 4, &actual_size, true));
CHECK(buffer);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
v8::internal::CodeObjectRequired::kYes);
MacroAssembler* masm = &assembler;
EntryCode(masm);
Label exit;
TestPositiveSmiPowerUp(masm, &exit, 0x20, 0);
TestPositiveSmiPowerUp(masm, &exit, 0x40, 1);
TestPositiveSmiPowerUp(masm, &exit, 0x60, 127);
TestPositiveSmiPowerUp(masm, &exit, 0x80, 128);
TestPositiveSmiPowerUp(masm, &exit, 0xA0, 255);
TestPositiveSmiPowerUp(masm, &exit, 0xC0, 256);
TestPositiveSmiPowerUp(masm, &exit, 0x100, 65535);
TestPositiveSmiPowerUp(masm, &exit, 0x120, 65536);
TestPositiveSmiPowerUp(masm, &exit, 0x140, Smi::kMaxValue);
__ xorq(rax, rax); // Success.
__ bind(&exit);
ExitCode(masm);
__ ret(0);
CodeDesc desc;
masm->GetCode(isolate, &desc);
// Call the function from C++.
int result = FUNCTION_CAST<F0>(buffer)();
CHECK_EQ(0, result);
}
TEST(OperandOffset) {
uint32_t data[256];
for (uint32_t i = 0; i < 256; i++) { data[i] = i * 0x01010101; }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment