Commit 247bc200 authored by Martyn Capewell's avatar Martyn Capewell Committed by Commit Bot

Reland "[arm64] Add slot copier to masm and use in builtins"

This is a reland of 7c80f9ce with fixed restore
of system stack pointer in the tests.

Original change's description:
> Abstract some stack slot copies through a macro assembler function. This
> eliminates some non-paired stack operations.
>
> This is a reland of 1cc93be0 with
> additional tests, originally reviewed on
> https://chromium-review.googlesource.com/685238 and reverted due to an
> unrelated intermittent x64 failure.
>
> Bug: v8:6644
> Change-Id: If22b359dbda4bab1cb83cd8c44a2af5801012c37
> Reviewed-on: https://chromium-review.googlesource.com/707247
> Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
> Commit-Queue: Martyn Capewell <martyn.capewell@arm.com>
> Cr-Commit-Position: refs/heads/master@{#48419}

Bug: v8:6644
Change-Id: Ie8b45c73acc13df36c978a9ae4bee77082cb7c8d
Reviewed-on: https://chromium-review.googlesource.com/709515Reviewed-by: 's avatarBenedikt Meurer <bmeurer@chromium.org>
Commit-Queue: Martyn Capewell <martyn.capewell@arm.com>
Cr-Commit-Position: refs/heads/master@{#48449}
parent 9941c1e3
......@@ -1482,6 +1482,62 @@ void TurboAssembler::AssertCspAligned() {
}
}
void TurboAssembler::CopySlots(int dst, Register src, Register slot_count) {
DCHECK(!src.IsZero());
UseScratchRegisterScope scope(this);
Register dst_reg = scope.AcquireX();
Add(dst_reg, StackPointer(), dst << kPointerSizeLog2);
Add(src, StackPointer(), Operand(src, LSL, kPointerSizeLog2));
CopyDoubleWords(dst_reg, src, slot_count);
}
void TurboAssembler::CopySlots(Register dst, Register src,
Register slot_count) {
DCHECK(!dst.IsZero() && !src.IsZero());
Add(dst, StackPointer(), Operand(dst, LSL, kPointerSizeLog2));
Add(src, StackPointer(), Operand(src, LSL, kPointerSizeLog2));
CopyDoubleWords(dst, src, slot_count);
}
void TurboAssembler::CopyDoubleWords(Register dst, Register src,
Register count) {
if (emit_debug_code()) {
// Copy requires dst < src || (dst - src) >= count.
Label dst_below_src;
Subs(dst, dst, src);
B(lt, &dst_below_src);
Cmp(dst, count);
Check(ge, kOffsetOutOfRange);
Bind(&dst_below_src);
Add(dst, dst, src);
}
static_assert(kPointerSize == kDRegSize,
"pointers must be the same size as doubles");
UseScratchRegisterScope scope(this);
VRegister temp0 = scope.AcquireD();
VRegister temp1 = scope.AcquireD();
Label pairs, done;
Tbz(count, 0, &pairs);
Ldr(temp0, MemOperand(src, kPointerSize, PostIndex));
Sub(count, count, 1);
Str(temp0, MemOperand(dst, kPointerSize, PostIndex));
Bind(&pairs);
Cbz(count, &done);
Ldp(temp0, temp1, MemOperand(src, 2 * kPointerSize, PostIndex));
Sub(count, count, 2);
Stp(temp0, temp1, MemOperand(dst, 2 * kPointerSize, PostIndex));
B(&pairs);
// TODO(all): large copies may benefit from using temporary Q registers
// to copy four double words per iteration.
Bind(&done);
}
void TurboAssembler::AssertFPCRState(Register fpcr) {
if (emit_debug_code()) {
Label unexpected_mode, done;
......
......@@ -659,6 +659,20 @@ class TurboAssembler : public Assembler {
// Emits a runtime assert that the CSP is aligned.
void AssertCspAligned();
// Copy slot_count stack slots from the stack offset specified by src to
// the stack offset specified by dst. The offsets and count are expressed in
// slot-sized units. Offset dst must be less than src, or the gap between
// them must be greater than or equal to slot_count, otherwise the result is
// unpredictable. The function may corrupt its register arguments.
void CopySlots(int dst, Register src, Register slot_count);
void CopySlots(Register dst, Register src, Register slot_count);
// Copy count double words from the address in register src to the address
// in register dst. Address dst must be less than src, or the gap between
// them must be greater than or equal to count double words, otherwise the
// result is unpredictable. The function may corrupt its register arguments.
void CopyDoubleWords(Register dst, Register src, Register count);
// Load a literal from the inline constant pool.
inline void Ldr(const CPURegister& rt, const Operand& imm);
// Helper function for double immediate.
......
......@@ -208,37 +208,26 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
__ SmiTag(x11, x0);
__ Push(cp, x11, x10);
// Set up pointer to last argument.
__ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
// Copy arguments and receiver to the expression stack.
// Copy 2 values every loop to use ldp/stp.
// Compute pointer behind the first argument.
__ Add(x4, x2, Operand(x0, LSL, kPointerSizeLog2));
Label loop, entry, done_copying_arguments;
// ----------- S t a t e -------------
// -- x0: number of arguments (untagged)
// -- x1: constructor function
// -- x3: new target
// -- x2: pointer to last argument (caller sp)
// -- x4: pointer to argument last copied
// -- sp[0*kPointerSize]: the hole (receiver)
// -- sp[1*kPointerSize]: number of arguments (tagged)
// -- sp[2*kPointerSize]: context
// -----------------------------------
__ B(&entry);
__ Bind(&loop);
__ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
__ Push(x11, x10);
__ Bind(&entry);
__ Cmp(x4, x2);
__ B(gt, &loop);
// Because we copied values 2 by 2 we may have copied one extra value.
// Drop it if that is the case.
__ B(eq, &done_copying_arguments);
__ Drop(1);
__ Bind(&done_copying_arguments);
// Copy arguments to the expression stack.
__ Claim(x0);
{
Register count = x2;
Register dst = x10;
Register src = x11;
__ Mov(count, x0);
__ Mov(dst, __ StackPointer());
__ Add(src, fp, StandardFrameConstants::kCallerSPOffset);
__ CopyDoubleWords(dst, src, count);
}
// Call the function.
// x0: number of arguments
......@@ -292,8 +281,8 @@ void Generate_JSConstructStubGeneric(MacroAssembler* masm,
__ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kCompilerHintsOffset));
__ tst(w4, Operand(SharedFunctionInfo::kDerivedConstructorMask));
__ B(ne, &not_create_implicit_receiver);
__ TestAndBranchIfAnySet(w4, SharedFunctionInfo::kDerivedConstructorMask,
&not_create_implicit_receiver);
// If not derived class constructor: Allocate the new receiver object.
__ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
......@@ -303,7 +292,7 @@ void Generate_JSConstructStubGeneric(MacroAssembler* masm,
__ B(&post_instantiation_deopt_entry);
// Else: use TheHoleValue as receiver for constructor call
__ bind(&not_create_implicit_receiver);
__ Bind(&not_create_implicit_receiver);
__ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
// ----------- S t a t e -------------
......@@ -316,7 +305,7 @@ void Generate_JSConstructStubGeneric(MacroAssembler* masm,
// Deoptimizer enters here.
masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
masm->pc_offset());
__ bind(&post_instantiation_deopt_entry);
__ Bind(&post_instantiation_deopt_entry);
// Restore new target.
__ Pop(x3);
......@@ -325,52 +314,32 @@ void Generate_JSConstructStubGeneric(MacroAssembler* masm,
// conventions dictate that the called function pops the receiver.
__ Push(x0, x0);
// ----------- S t a t e -------------
// -- x3: new target
// -- sp[0*kPointerSize]: implicit receiver
// -- sp[1*kPointerSize]: implicit receiver
// -- sp[2*kPointerSize]: constructor function
// -- sp[3*kPointerSize]: number of arguments (tagged)
// -- sp[4*kPointerSize]: context
// -----------------------------------
// Restore constructor function and argument count.
__ Ldr(x1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
__ Ldr(x0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
__ SmiUntag(x0);
// Set up pointer to last argument.
__ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
__ Ldrsw(x0,
UntagSmiMemOperand(fp, ConstructFrameConstants::kLengthOffset));
// Copy arguments and receiver to the expression stack.
// Copy 2 values every loop to use ldp/stp.
// Compute pointer behind the first argument.
__ Add(x4, x2, Operand(x0, LSL, kPointerSizeLog2));
Label loop, entry, done_copying_arguments;
// ----------- S t a t e -------------
// -- x0: number of arguments (untagged)
// -- x3: new target
// -- x2: pointer to last argument (caller sp)
// -- x4: pointer to argument last copied
// -- sp[0*kPointerSize]: implicit receiver
// -- sp[1*kPointerSize]: implicit receiver
// -- x1 and sp[2*kPointerSize]: constructor function
// -- sp[3*kPointerSize]: number of arguments (tagged)
// -- sp[4*kPointerSize]: context
// -----------------------------------
__ B(&entry);
__ Bind(&loop);
__ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
__ Push(x11, x10);
__ Bind(&entry);
__ Cmp(x4, x2);
__ B(gt, &loop);
// Because we copied values 2 by 2 we may have copied one extra value.
// Drop it if that is the case.
__ B(eq, &done_copying_arguments);
__ Drop(1);
__ Bind(&done_copying_arguments);
// Copy arguments to the expression stack.
__ Claim(x0);
{
Register count = x2;
Register dst = x10;
Register src = x11;
__ Mov(count, x0);
__ Mov(dst, __ StackPointer());
__ Add(src, fp, StandardFrameConstants::kCallerSPOffset);
__ CopyDoubleWords(dst, src, count);
}
// Call the function.
ParameterCount actual(x0);
......@@ -416,13 +385,14 @@ void Generate_JSConstructStubGeneric(MacroAssembler* masm,
__ Ldr(x4, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
__ Ldr(x4, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kCompilerHintsOffset));
__ tst(w4, Operand(SharedFunctionInfo::kClassConstructorMask));
if (restrict_constructor_return) {
// Throw if constructor function is a class constructor
__ B(eq, &use_receiver);
__ TestAndBranchIfAllClear(w4, SharedFunctionInfo::kClassConstructorMask,
&use_receiver);
} else {
__ B(ne, &use_receiver);
__ TestAndBranchIfAnySet(w4, SharedFunctionInfo::kClassConstructorMask,
&use_receiver);
__ CallRuntime(
Runtime::kIncrementUseCounterConstructorReturnNonUndefinedPrimitive);
__ B(&use_receiver);
......@@ -2212,7 +2182,6 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
// -- x4 : the number of [[BoundArguments]]
// -----------------------------------
// Reserve stack space for the [[BoundArguments]].
{
Label done;
__ Claim(x4);
......@@ -2231,22 +2200,14 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
__ Bind(&done);
}
UseScratchRegisterScope temps(masm);
Register argc = temps.AcquireX();
// Relocate arguments down the stack.
{
Label loop, done_loop;
__ Mov(x5, 0);
__ Bind(&loop);
__ Cmp(x5, x0);
__ B(gt, &done_loop);
__ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
__ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
__ Add(x4, x4, 1);
__ Add(x5, x5, 1);
__ B(&loop);
__ Bind(&done_loop);
}
__ Mov(argc, x0);
__ CopySlots(0, x4, argc);
// Copy [[BoundArguments]] to the stack (below the arguments).
// Copy [[BoundArguments]] to the stack (below the arguments). The first
// element of the array is copied to the highest address.
{
Label loop;
__ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
......
......@@ -13263,6 +13263,148 @@ TEST(pop_queued) {
TEARDOWN();
}
TEST(copy_slots_down) {
INIT_V8();
SETUP();
const uint64_t ones = 0x1111111111111111UL;
const uint64_t twos = 0x2222222222222222UL;
const uint64_t threes = 0x3333333333333333UL;
const uint64_t fours = 0x4444444444444444UL;
START();
// Test copying 12 slots down one slot.
__ Mov(jssp, __ StackPointer());
__ SetStackPointer(jssp);
__ Mov(x1, ones);
__ Mov(x2, twos);
__ Mov(x3, threes);
__ Mov(x4, fours);
__ Push(x1, x2, x3, x4);
__ Push(x1, x2, x1, x2);
__ Push(x3, x4, x3, x4);
__ Push(xzr);
__ Mov(x5, 0);
__ Mov(x6, 1);
__ Mov(x7, 12);
__ CopySlots(x5, x6, x7);
__ Pop(x4, x5, x6, x7);
__ Pop(x8, x9, x10, x11);
__ Pop(x12, x13, x14, x15);
__ Drop(1);
// Test copying one slot down one slot.
__ Push(x1, xzr, xzr);
__ Mov(x1, 1);
__ Mov(x2, 2);
__ Mov(x3, 1);
__ CopySlots(x1, x2, x3);
__ Drop(1);
__ Pop(x0);
__ Drop(1);
__ Mov(csp, jssp);
__ SetStackPointer(csp);
END();
RUN();
CHECK_EQUAL_64(fours, x4);
CHECK_EQUAL_64(threes, x5);
CHECK_EQUAL_64(fours, x6);
CHECK_EQUAL_64(threes, x7);
CHECK_EQUAL_64(twos, x8);
CHECK_EQUAL_64(ones, x9);
CHECK_EQUAL_64(twos, x10);
CHECK_EQUAL_64(ones, x11);
CHECK_EQUAL_64(fours, x12);
CHECK_EQUAL_64(threes, x13);
CHECK_EQUAL_64(twos, x14);
CHECK_EQUAL_64(ones, x15);
CHECK_EQUAL_64(ones, x0);
TEARDOWN();
}
TEST(copy_slots_up) {
INIT_V8();
SETUP();
const uint64_t ones = 0x1111111111111111UL;
const uint64_t twos = 0x2222222222222222UL;
const uint64_t threes = 0x3333333333333333UL;
START();
__ Mov(jssp, __ StackPointer());
__ SetStackPointer(jssp);
__ Mov(x1, ones);
__ Mov(x2, twos);
__ Mov(x3, threes);
// Test copying one slot to the next slot higher in memory.
__ Push(xzr, x1);
__ Mov(x5, 1);
__ Mov(x6, 0);
__ Mov(x7, 1);
__ CopySlots(x5, x6, x7);
__ Drop(1);
__ Pop(x10);
// Test copying two slots to the next two slots higher in memory.
__ Push(xzr, xzr);
__ Push(x1, x2);
__ Mov(x5, 2);
__ Mov(x6, 0);
__ Mov(x7, 2);
__ CopySlots(x5, x6, x7);
__ Drop(2);
__ Pop(x11, x12);
// Test copying three slots to the next three slots higher in memory.
__ Push(xzr, xzr, xzr);
__ Push(x1, x2, x3);
__ Mov(x5, 3);
__ Mov(x6, 0);
__ Mov(x7, 3);
__ CopySlots(x5, x6, x7);
__ Drop(3);
__ Pop(x0, x1, x2);
__ Mov(csp, jssp);
__ SetStackPointer(csp);
END();
RUN();
CHECK_EQUAL_64(ones, x10);
CHECK_EQUAL_64(twos, x11);
CHECK_EQUAL_64(ones, x12);
CHECK_EQUAL_64(threes, x0);
CHECK_EQUAL_64(twos, x1);
CHECK_EQUAL_64(ones, x2);
TEARDOWN();
}
TEST(jump_both_smi) {
INIT_V8();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment