Commit 88a1acef authored by Junliang Yan's avatar Junliang Yan Committed by V8 LUCI CQ

ppc: rename LoadP to LoadU64

Change-Id: I0c763d15f584f3b6d71f034412f736087824a2a6
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2892605Reviewed-by: 's avatarMilad Fa <mfarazma@redhat.com>
Commit-Queue: Junliang Yan <junyan@redhat.com>
Cr-Commit-Position: refs/heads/master@{#74544}
parent c7d85563
...@@ -118,9 +118,9 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { ...@@ -118,9 +118,9 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
} }
// Restore context from the frame. // Restore context from the frame.
__ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); __ LoadU64(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
// Restore smi-tagged arguments count from the frame. // Restore smi-tagged arguments count from the frame.
__ LoadP(scratch, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); __ LoadU64(scratch, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
// Leave construct frame. // Leave construct frame.
} }
...@@ -230,8 +230,8 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -230,8 +230,8 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// ----------------------------------- // -----------------------------------
// Restore constructor function and argument count. // Restore constructor function and argument count.
__ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kConstructorOffset)); __ LoadU64(r4, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
__ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); __ LoadU64(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
__ SmiUntag(r3); __ SmiUntag(r3);
Label stack_overflow; Label stack_overflow;
...@@ -276,12 +276,12 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -276,12 +276,12 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// Throw away the result of the constructor invocation and use the // Throw away the result of the constructor invocation and use the
// on-stack receiver as the result. // on-stack receiver as the result.
__ bind(&use_receiver); __ bind(&use_receiver);
__ LoadP(r3, MemOperand(sp)); __ LoadU64(r3, MemOperand(sp));
__ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw); __ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw);
__ bind(&leave_and_return); __ bind(&leave_and_return);
// Restore smi-tagged arguments count from the frame. // Restore smi-tagged arguments count from the frame.
__ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); __ LoadU64(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
// Leave construct frame. // Leave construct frame.
__ LeaveFrame(StackFrame::CONSTRUCT); __ LeaveFrame(StackFrame::CONSTRUCT);
...@@ -306,13 +306,13 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -306,13 +306,13 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ bind(&do_throw); __ bind(&do_throw);
// Restore the context from the frame. // Restore the context from the frame.
__ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); __ LoadU64(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
__ CallRuntime(Runtime::kThrowConstructorReturnedNonObject); __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
__ bkpt(0); __ bkpt(0);
__ bind(&stack_overflow); __ bind(&stack_overflow);
// Restore the context from the frame. // Restore the context from the frame.
__ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); __ LoadU64(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
__ CallRuntime(Runtime::kThrowStackOverflow); __ CallRuntime(Runtime::kThrowStackOverflow);
// Unreachable code. // Unreachable code.
__ bkpt(0); __ bkpt(0);
...@@ -374,7 +374,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -374,7 +374,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
ExternalReference::debug_suspended_generator_address(masm->isolate()); ExternalReference::debug_suspended_generator_address(masm->isolate());
__ Move(scratch, debug_suspended_generator); __ Move(scratch, debug_suspended_generator);
__ LoadP(scratch, MemOperand(scratch)); __ LoadU64(scratch, MemOperand(scratch));
__ cmp(scratch, r4); __ cmp(scratch, r4);
__ beq(&prepare_step_in_suspended_generator); __ beq(&prepare_step_in_suspended_generator);
__ bind(&stepping_prepared); __ bind(&stepping_prepared);
...@@ -552,7 +552,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type, ...@@ -552,7 +552,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
// Save copies of the top frame descriptor on the stack. // Save copies of the top frame descriptor on the stack.
__ Move(r3, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, __ Move(r3, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
masm->isolate())); masm->isolate()));
__ LoadP(r0, MemOperand(r3)); __ LoadU64(r0, MemOperand(r3));
__ push(r0); __ push(r0);
// Clear c_entry_fp, now we've pushed its previous value to the stack. // Clear c_entry_fp, now we've pushed its previous value to the stack.
...@@ -572,7 +572,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type, ...@@ -572,7 +572,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress, ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress,
masm->isolate()); masm->isolate());
__ Move(r3, js_entry_sp); __ Move(r3, js_entry_sp);
__ LoadP(scratch, MemOperand(r3)); __ LoadU64(scratch, MemOperand(r3));
__ cmpi(scratch, Operand::Zero()); __ cmpi(scratch, Operand::Zero());
__ bne(&non_outermost_js); __ bne(&non_outermost_js);
__ StoreP(fp, MemOperand(r3)); __ StoreP(fp, MemOperand(r3));
...@@ -661,7 +661,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type, ...@@ -661,7 +661,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
__ MultiPop(kCalleeSaved); __ MultiPop(kCalleeSaved);
// Return // Return
__ LoadP(r0, MemOperand(sp, kStackFrameLRSlot * kSystemPointerSize)); __ LoadU64(r0, MemOperand(sp, kStackFrameLRSlot * kSystemPointerSize));
__ mtlr(r0); __ mtlr(r0);
__ blr(); __ blr();
} }
...@@ -701,7 +701,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ...@@ -701,7 +701,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
ExternalReference context_address = ExternalReference::Create( ExternalReference context_address = ExternalReference::Create(
IsolateAddressId::kContextAddress, masm->isolate()); IsolateAddressId::kContextAddress, masm->isolate());
__ Move(cp, context_address); __ Move(cp, context_address);
__ LoadP(cp, MemOperand(cp)); __ LoadU64(cp, MemOperand(cp));
// Push the function. // Push the function.
__ Push(r5); __ Push(r5);
...@@ -732,7 +732,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ...@@ -732,7 +732,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ mtctr(r7); __ mtctr(r7);
__ bind(&loop); __ bind(&loop);
__ LoadPU(r9, MemOperand(r8, -kSystemPointerSize)); // read next parameter __ LoadPU(r9, MemOperand(r8, -kSystemPointerSize)); // read next parameter
__ LoadP(r0, MemOperand(r9)); // dereference handle __ LoadU64(r0, MemOperand(r9)); // dereference handle
__ push(r0); // push parameter __ push(r0); // push parameter
__ bdnz(&loop); __ bdnz(&loop);
__ bind(&done); __ bind(&done);
...@@ -806,15 +806,15 @@ static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1, ...@@ -806,15 +806,15 @@ static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
Register scratch2) { Register scratch2) {
Register params_size = scratch1; Register params_size = scratch1;
// Get the size of the formal parameters + receiver (in bytes). // Get the size of the formal parameters + receiver (in bytes).
__ LoadP(params_size, __ LoadU64(params_size,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ lwz(params_size, __ lwz(params_size,
FieldMemOperand(params_size, BytecodeArray::kParameterSizeOffset)); FieldMemOperand(params_size, BytecodeArray::kParameterSizeOffset));
Register actual_params_size = scratch2; Register actual_params_size = scratch2;
// Compute the size of the actual parameters + receiver (in bytes). // Compute the size of the actual parameters + receiver (in bytes).
__ LoadP(actual_params_size, __ LoadU64(actual_params_size,
MemOperand(fp, StandardFrameConstants::kArgCOffset)); MemOperand(fp, StandardFrameConstants::kArgCOffset));
__ ShiftLeftImm(actual_params_size, actual_params_size, __ ShiftLeftImm(actual_params_size, actual_params_size,
Operand(kSystemPointerSizeLog2)); Operand(kSystemPointerSizeLog2));
__ addi(actual_params_size, actual_params_size, Operand(kSystemPointerSize)); __ addi(actual_params_size, actual_params_size, Operand(kSystemPointerSize));
...@@ -1203,10 +1203,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1203,10 +1203,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// or the interpreter tail calling a builtin and then a dispatch. // or the interpreter tail calling a builtin and then a dispatch.
// Get bytecode array and bytecode offset from the stack frame. // Get bytecode array and bytecode offset from the stack frame.
__ LoadP(kInterpreterBytecodeArrayRegister, __ LoadU64(kInterpreterBytecodeArrayRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ LoadP(kInterpreterBytecodeOffsetRegister, __ LoadU64(kInterpreterBytecodeOffsetRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister); __ SmiUntag(kInterpreterBytecodeOffsetRegister);
// Either return, or advance to the next bytecode and dispatch. // Either return, or advance to the next bytecode and dispatch.
...@@ -1236,8 +1236,8 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1236,8 +1236,8 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// After the call, restore the bytecode array, bytecode offset and accumulator // After the call, restore the bytecode array, bytecode offset and accumulator
// registers again. Also, restore the bytecode offset in the stack to its // registers again. Also, restore the bytecode offset in the stack to its
// previous value. // previous value.
__ LoadP(kInterpreterBytecodeArrayRegister, __ LoadU64(kInterpreterBytecodeArrayRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ mov(kInterpreterBytecodeOffsetRegister, __ mov(kInterpreterBytecodeOffsetRegister,
Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue); __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
...@@ -1311,7 +1311,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl( ...@@ -1311,7 +1311,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
// Pass the spread in the register r3. // Pass the spread in the register r3.
// r2 already points to the penultimate argument, the spread // r2 already points to the penultimate argument, the spread
// lies in the next interpreter register. // lies in the next interpreter register.
__ LoadP(r5, MemOperand(r5, -kSystemPointerSize)); __ LoadU64(r5, MemOperand(r5, -kSystemPointerSize));
} }
// Call the target. // Call the target.
...@@ -1362,7 +1362,7 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl( ...@@ -1362,7 +1362,7 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// r4 already points to the penultimate argument, the spread // r4 already points to the penultimate argument, the spread
// lies in the next interpreter register. // lies in the next interpreter register.
__ subi(r7, r7, Operand(kSystemPointerSize)); __ subi(r7, r7, Operand(kSystemPointerSize));
__ LoadP(r5, MemOperand(r7)); __ LoadU64(r5, MemOperand(r7));
} else { } else {
__ AssertUndefinedOrAllocationSite(r5, r8); __ AssertUndefinedOrAllocationSite(r5, r8);
} }
...@@ -1404,7 +1404,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { ...@@ -1404,7 +1404,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
// custom copy of the interpreter entry trampoline for profiling. If so, // custom copy of the interpreter entry trampoline for profiling. If so,
// get the custom trampoline, otherwise grab the entry address of the global // get the custom trampoline, otherwise grab the entry address of the global
// trampoline. // trampoline.
__ LoadP(r5, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); __ LoadU64(r5, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
__ LoadTaggedPointerField( __ LoadTaggedPointerField(
r5, FieldMemOperand(r5, JSFunction::kSharedFunctionInfoOffset)); r5, FieldMemOperand(r5, JSFunction::kSharedFunctionInfoOffset));
__ LoadTaggedPointerField( __ LoadTaggedPointerField(
...@@ -1423,7 +1423,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { ...@@ -1423,7 +1423,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
__ Move(r5, ExternalReference:: __ Move(r5, ExternalReference::
address_of_interpreter_entry_trampoline_instruction_start( address_of_interpreter_entry_trampoline_instruction_start(
masm->isolate())); masm->isolate()));
__ LoadP(r5, MemOperand(r5)); __ LoadU64(r5, MemOperand(r5));
__ bind(&trampoline_loaded); __ bind(&trampoline_loaded);
__ addi(r0, r5, Operand(interpreter_entry_return_pc_offset.value())); __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset.value()));
...@@ -1435,8 +1435,8 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { ...@@ -1435,8 +1435,8 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
ExternalReference::interpreter_dispatch_table_address(masm->isolate())); ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
// Get the bytecode array pointer from the frame. // Get the bytecode array pointer from the frame.
__ LoadP(kInterpreterBytecodeArrayRegister, __ LoadU64(kInterpreterBytecodeArrayRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
if (FLAG_debug_code) { if (FLAG_debug_code) {
// Check function data field is actually a BytecodeArray object. // Check function data field is actually a BytecodeArray object.
...@@ -1451,8 +1451,8 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { ...@@ -1451,8 +1451,8 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
} }
// Get the target bytecode offset from the frame. // Get the target bytecode offset from the frame.
__ LoadP(kInterpreterBytecodeOffsetRegister, __ LoadU64(kInterpreterBytecodeOffsetRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister); __ SmiUntag(kInterpreterBytecodeOffsetRegister);
if (FLAG_debug_code) { if (FLAG_debug_code) {
...@@ -1478,10 +1478,10 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { ...@@ -1478,10 +1478,10 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) { void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
// Get bytecode array and bytecode offset from the stack frame. // Get bytecode array and bytecode offset from the stack frame.
__ LoadP(kInterpreterBytecodeArrayRegister, __ LoadU64(kInterpreterBytecodeArrayRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ LoadP(kInterpreterBytecodeOffsetRegister, __ LoadU64(kInterpreterBytecodeOffsetRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister); __ SmiUntag(kInterpreterBytecodeOffsetRegister);
Label enter_bytecode, function_entry_bytecode; Label enter_bytecode, function_entry_bytecode;
...@@ -1565,7 +1565,7 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm, ...@@ -1565,7 +1565,7 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
__ subi(r3, r3, __ subi(r3, r3,
Operand(BuiltinContinuationFrameConstants::kFixedSlotCount)); Operand(BuiltinContinuationFrameConstants::kFixedSlotCount));
} }
__ LoadP( __ LoadU64(
fp, fp,
MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp)); MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
// Load builtin index (stored as a Smi) and use it to get the builtin start // Load builtin index (stored as a Smi) and use it to get the builtin start
...@@ -1607,7 +1607,7 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { ...@@ -1607,7 +1607,7 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
} }
DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code()); DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code());
__ LoadP(r3, MemOperand(sp, 0 * kSystemPointerSize)); __ LoadU64(r3, MemOperand(sp, 0 * kSystemPointerSize));
__ addi(sp, sp, Operand(1 * kSystemPointerSize)); __ addi(sp, sp, Operand(1 * kSystemPointerSize));
__ Ret(); __ Ret();
} }
...@@ -1675,13 +1675,13 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { ...@@ -1675,13 +1675,13 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
__ mr(r5, r8); __ mr(r5, r8);
Label done; Label done;
__ LoadP(r4, MemOperand(sp)); // receiver __ LoadU64(r4, MemOperand(sp)); // receiver
__ cmpi(r3, Operand(1)); __ cmpi(r3, Operand(1));
__ blt(&done); __ blt(&done);
__ LoadP(r8, MemOperand(sp, kSystemPointerSize)); // thisArg __ LoadU64(r8, MemOperand(sp, kSystemPointerSize)); // thisArg
__ cmpi(r3, Operand(2)); __ cmpi(r3, Operand(2));
__ blt(&done); __ blt(&done);
__ LoadP(r5, MemOperand(sp, 2 * kSystemPointerSize)); // argArray __ LoadU64(r5, MemOperand(sp, 2 * kSystemPointerSize)); // argArray
__ bind(&done); __ bind(&done);
__ ShiftLeftImm(ip, r3, Operand(kSystemPointerSizeLog2)); __ ShiftLeftImm(ip, r3, Operand(kSystemPointerSizeLog2));
...@@ -1760,13 +1760,13 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) { ...@@ -1760,13 +1760,13 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
Label done; Label done;
__ cmpi(r3, Operand(1)); __ cmpi(r3, Operand(1));
__ blt(&done); __ blt(&done);
__ LoadP(r4, MemOperand(sp, kSystemPointerSize)); // thisArg __ LoadU64(r4, MemOperand(sp, kSystemPointerSize)); // thisArg
__ cmpi(r3, Operand(2)); __ cmpi(r3, Operand(2));
__ blt(&done); __ blt(&done);
__ LoadP(r8, MemOperand(sp, 2 * kSystemPointerSize)); // argArray __ LoadU64(r8, MemOperand(sp, 2 * kSystemPointerSize)); // argArray
__ cmpi(r3, Operand(3)); __ cmpi(r3, Operand(3));
__ blt(&done); __ blt(&done);
__ LoadP(r5, MemOperand(sp, 3 * kSystemPointerSize)); // argArray __ LoadU64(r5, MemOperand(sp, 3 * kSystemPointerSize)); // argArray
__ bind(&done); __ bind(&done);
__ ShiftLeftImm(ip, r3, Operand(kSystemPointerSizeLog2)); __ ShiftLeftImm(ip, r3, Operand(kSystemPointerSizeLog2));
...@@ -1810,14 +1810,14 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { ...@@ -1810,14 +1810,14 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
__ mr(r7, r4); __ mr(r7, r4);
__ cmpi(r3, Operand(1)); __ cmpi(r3, Operand(1));
__ blt(&done); __ blt(&done);
__ LoadP(r4, MemOperand(sp, kSystemPointerSize)); // thisArg __ LoadU64(r4, MemOperand(sp, kSystemPointerSize)); // thisArg
__ mr(r6, r4); __ mr(r6, r4);
__ cmpi(r3, Operand(2)); __ cmpi(r3, Operand(2));
__ blt(&done); __ blt(&done);
__ LoadP(r5, MemOperand(sp, 2 * kSystemPointerSize)); // argArray __ LoadU64(r5, MemOperand(sp, 2 * kSystemPointerSize)); // argArray
__ cmpi(r3, Operand(3)); __ cmpi(r3, Operand(3));
__ blt(&done); __ blt(&done);
__ LoadP(r6, MemOperand(sp, 3 * kSystemPointerSize)); // argArray __ LoadU64(r6, MemOperand(sp, 3 * kSystemPointerSize)); // argArray
__ bind(&done); __ bind(&done);
__ ShiftLeftImm(r0, r3, Operand(kSystemPointerSizeLog2)); __ ShiftLeftImm(r0, r3, Operand(kSystemPointerSizeLog2));
__ add(sp, sp, r0); __ add(sp, sp, r0);
...@@ -1962,7 +1962,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -1962,7 +1962,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
} }
Label stack_done, stack_overflow; Label stack_done, stack_overflow;
__ LoadP(r8, MemOperand(fp, StandardFrameConstants::kArgCOffset)); __ LoadU64(r8, MemOperand(fp, StandardFrameConstants::kArgCOffset));
__ sub(r8, r8, r5, LeaveOE, SetRC); __ sub(r8, r8, r5, LeaveOE, SetRC);
__ ble(&stack_done, cr0); __ ble(&stack_done, cr0);
{ {
...@@ -2588,8 +2588,8 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2588,8 +2588,8 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
// If return value is on the stack, pop it to registers. // If return value is on the stack, pop it to registers.
if (needs_return_buffer) { if (needs_return_buffer) {
__ LoadP(r4, MemOperand(r3, kSystemPointerSize)); __ LoadU64(r4, MemOperand(r3, kSystemPointerSize));
__ LoadP(r3, MemOperand(r3)); __ LoadU64(r3, MemOperand(r3));
} }
// Check result for exception sentinel. // Check result for exception sentinel.
...@@ -2605,7 +2605,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2605,7 +2605,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
IsolateAddressId::kPendingExceptionAddress, masm->isolate()); IsolateAddressId::kPendingExceptionAddress, masm->isolate());
__ Move(r6, pending_exception_address); __ Move(r6, pending_exception_address);
__ LoadP(r6, MemOperand(r6)); __ LoadU64(r6, MemOperand(r6));
__ CompareRoot(r6, RootIndex::kTheHoleValue); __ CompareRoot(r6, RootIndex::kTheHoleValue);
// Cannot use check here as it attempts to generate call into runtime. // Cannot use check here as it attempts to generate call into runtime.
__ beq(&okay); __ beq(&okay);
...@@ -2657,11 +2657,11 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2657,11 +2657,11 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
// Retrieve the handler context, SP and FP. // Retrieve the handler context, SP and FP.
__ Move(cp, pending_handler_context_address); __ Move(cp, pending_handler_context_address);
__ LoadP(cp, MemOperand(cp)); __ LoadU64(cp, MemOperand(cp));
__ Move(sp, pending_handler_sp_address); __ Move(sp, pending_handler_sp_address);
__ LoadP(sp, MemOperand(sp)); __ LoadU64(sp, MemOperand(sp));
__ Move(fp, pending_handler_fp_address); __ Move(fp, pending_handler_fp_address);
__ LoadP(fp, MemOperand(fp)); __ LoadU64(fp, MemOperand(fp));
// If the handler is a JS frame, restore the context to the frame. Note that // If the handler is a JS frame, restore the context to the frame. Note that
// the context will be set to (cp == 0) for non-JS frames. // the context will be set to (cp == 0) for non-JS frames.
...@@ -2689,10 +2689,10 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2689,10 +2689,10 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
// Compute the handler entry address and jump to it. // Compute the handler entry address and jump to it.
ConstantPoolUnavailableScope constant_pool_unavailable(masm); ConstantPoolUnavailableScope constant_pool_unavailable(masm);
__ Move(ip, pending_handler_entrypoint_address); __ Move(ip, pending_handler_entrypoint_address);
__ LoadP(ip, MemOperand(ip)); __ LoadU64(ip, MemOperand(ip));
if (FLAG_enable_embedded_constant_pool) { if (FLAG_enable_embedded_constant_pool) {
__ Move(kConstantPoolRegister, pending_handler_constant_pool_address); __ Move(kConstantPoolRegister, pending_handler_constant_pool_address);
__ LoadP(kConstantPoolRegister, MemOperand(kConstantPoolRegister)); __ LoadU64(kConstantPoolRegister, MemOperand(kConstantPoolRegister));
} }
__ Jump(ip); __ Jump(ip);
} }
...@@ -2877,8 +2877,8 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -2877,8 +2877,8 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
// r15 - next_address->kLimitOffset // r15 - next_address->kLimitOffset
// r16 - next_address->kLevelOffset // r16 - next_address->kLevelOffset
__ Move(r17, next_address); __ Move(r17, next_address);
__ LoadP(r14, MemOperand(r17, kNextOffset)); __ LoadU64(r14, MemOperand(r17, kNextOffset));
__ LoadP(r15, MemOperand(r17, kLimitOffset)); __ LoadU64(r15, MemOperand(r17, kLimitOffset));
__ lwz(r16, MemOperand(r17, kLevelOffset)); __ lwz(r16, MemOperand(r17, kLevelOffset));
__ addi(r16, r16, Operand(1)); __ addi(r16, r16, Operand(1));
__ stw(r16, MemOperand(r17, kLevelOffset)); __ stw(r16, MemOperand(r17, kLevelOffset));
...@@ -2891,7 +2891,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -2891,7 +2891,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
Label return_value_loaded; Label return_value_loaded;
// load value from ReturnValue // load value from ReturnValue
__ LoadP(r3, return_value_operand); __ LoadU64(r3, return_value_operand);
__ bind(&return_value_loaded); __ bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore // No more valid handles (the result handle was the last one). Restore
// previous handle scope. // previous handle scope.
...@@ -2903,7 +2903,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -2903,7 +2903,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
} }
__ subi(r16, r16, Operand(1)); __ subi(r16, r16, Operand(1));
__ stw(r16, MemOperand(r17, kLevelOffset)); __ stw(r16, MemOperand(r17, kLevelOffset));
__ LoadP(r0, MemOperand(r17, kLimitOffset)); __ LoadU64(r0, MemOperand(r17, kLimitOffset));
__ cmp(r15, r0); __ cmp(r15, r0);
__ bne(&delete_allocated_handles); __ bne(&delete_allocated_handles);
...@@ -2911,7 +2911,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -2911,7 +2911,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ bind(&leave_exit_frame); __ bind(&leave_exit_frame);
// LeaveExitFrame expects unwind space to be in a register. // LeaveExitFrame expects unwind space to be in a register.
if (stack_space_operand != nullptr) { if (stack_space_operand != nullptr) {
__ LoadP(r14, *stack_space_operand); __ LoadU64(r14, *stack_space_operand);
} else { } else {
__ mov(r14, Operand(stack_space)); __ mov(r14, Operand(stack_space));
} }
...@@ -2920,7 +2920,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -2920,7 +2920,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
// Check if the function scheduled an exception. // Check if the function scheduled an exception.
__ LoadRoot(r14, RootIndex::kTheHoleValue); __ LoadRoot(r14, RootIndex::kTheHoleValue);
__ Move(r15, ExternalReference::scheduled_exception_address(isolate)); __ Move(r15, ExternalReference::scheduled_exception_address(isolate));
__ LoadP(r15, MemOperand(r15)); __ LoadU64(r15, MemOperand(r15));
__ cmp(r14, r15); __ cmp(r14, r15);
__ bne(&promote_scheduled_exception); __ bne(&promote_scheduled_exception);
...@@ -3155,8 +3155,8 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { ...@@ -3155,8 +3155,8 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
__ LoadTaggedPointerField( __ LoadTaggedPointerField(
scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
__ LoadP(api_function_address, __ LoadU64(api_function_address,
FieldMemOperand(scratch, Foreign::kForeignAddressOffset)); FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
// +3 is to skip prolog, return address and name handle. // +3 is to skip prolog, return address and name handle.
MemOperand return_value_operand( MemOperand return_value_operand(
...@@ -3178,13 +3178,14 @@ void Builtins::Generate_DirectCEntry(MacroAssembler* masm) { ...@@ -3178,13 +3178,14 @@ void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
if (ABI_USES_FUNCTION_DESCRIPTORS) { if (ABI_USES_FUNCTION_DESCRIPTORS) {
// AIX/PPC64BE Linux use a function descriptor; // AIX/PPC64BE Linux use a function descriptor;
__ LoadP(ToRegister(ABI_TOC_REGISTER), __ LoadU64(ToRegister(ABI_TOC_REGISTER),
MemOperand(temp2, kSystemPointerSize)); MemOperand(temp2, kSystemPointerSize));
__ LoadP(temp2, MemOperand(temp2, 0)); // Instruction address __ LoadU64(temp2, MemOperand(temp2, 0)); // Instruction address
} }
__ Call(temp2); // Call the C++ function. __ Call(temp2); // Call the C++ function.
__ LoadP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kSystemPointerSize)); __ LoadU64(r0,
MemOperand(sp, kStackFrameExtraParamSlot * kSystemPointerSize));
__ mtlr(r0); __ mtlr(r0);
__ blr(); __ blr();
} }
...@@ -3247,9 +3248,10 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm, ...@@ -3247,9 +3248,10 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
__ PrepareCallCFunction(6, r8); __ PrepareCallCFunction(6, r8);
__ li(r3, Operand::Zero()); __ li(r3, Operand::Zero());
Label context_check; Label context_check;
__ LoadP(r4, MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset)); __ LoadU64(r4,
MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
__ JumpIfSmi(r4, &context_check); __ JumpIfSmi(r4, &context_check);
__ LoadP(r3, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); __ LoadU64(r3, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
__ bind(&context_check); __ bind(&context_check);
__ li(r4, Operand(static_cast<int>(deopt_kind))); __ li(r4, Operand(static_cast<int>(deopt_kind)));
// r5: bailout id already loaded. // r5: bailout id already loaded.
...@@ -3264,14 +3266,14 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm, ...@@ -3264,14 +3266,14 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
// Preserve "deoptimizer" object in register r3 and get the input // Preserve "deoptimizer" object in register r3 and get the input
// frame descriptor pointer to r4 (deoptimizer->input_); // frame descriptor pointer to r4 (deoptimizer->input_);
__ LoadP(r4, MemOperand(r3, Deoptimizer::input_offset())); __ LoadU64(r4, MemOperand(r3, Deoptimizer::input_offset()));
// Copy core registers into FrameDescription::registers_[kNumRegisters]. // Copy core registers into FrameDescription::registers_[kNumRegisters].
DCHECK_EQ(Register::kNumRegisters, kNumberOfRegisters); DCHECK_EQ(Register::kNumRegisters, kNumberOfRegisters);
for (int i = 0; i < kNumberOfRegisters; i++) { for (int i = 0; i < kNumberOfRegisters; i++) {
int offset = int offset =
(i * kSystemPointerSize) + FrameDescription::registers_offset(); (i * kSystemPointerSize) + FrameDescription::registers_offset();
__ LoadP(r5, MemOperand(sp, i * kSystemPointerSize)); __ LoadU64(r5, MemOperand(sp, i * kSystemPointerSize));
__ StoreP(r5, MemOperand(r4, offset)); __ StoreP(r5, MemOperand(r4, offset));
} }
...@@ -3303,7 +3305,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm, ...@@ -3303,7 +3305,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
// Compute a pointer to the unwinding limit in register r5; that is // Compute a pointer to the unwinding limit in register r5; that is
// the first stack slot not part of the input frame. // the first stack slot not part of the input frame.
__ LoadP(r5, MemOperand(r4, FrameDescription::frame_size_offset())); __ LoadU64(r5, MemOperand(r4, FrameDescription::frame_size_offset()));
__ add(r5, r5, sp); __ add(r5, r5, sp);
// Unwind the stack down to - but not including - the unwinding // Unwind the stack down to - but not including - the unwinding
...@@ -3332,28 +3334,29 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm, ...@@ -3332,28 +3334,29 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
} }
__ pop(r3); // Restore deoptimizer object (class Deoptimizer). __ pop(r3); // Restore deoptimizer object (class Deoptimizer).
__ LoadP(sp, MemOperand(r3, Deoptimizer::caller_frame_top_offset())); __ LoadU64(sp, MemOperand(r3, Deoptimizer::caller_frame_top_offset()));
// Replace the current (input) frame with the output frames. // Replace the current (input) frame with the output frames.
Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header; Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
// Outer loop state: r7 = current "FrameDescription** output_", // Outer loop state: r7 = current "FrameDescription** output_",
// r4 = one past the last FrameDescription**. // r4 = one past the last FrameDescription**.
__ lwz(r4, MemOperand(r3, Deoptimizer::output_count_offset())); __ lwz(r4, MemOperand(r3, Deoptimizer::output_count_offset()));
__ LoadP(r7, MemOperand(r3, Deoptimizer::output_offset())); // r7 is output_. __ LoadU64(r7,
MemOperand(r3, Deoptimizer::output_offset())); // r7 is output_.
__ ShiftLeftImm(r4, r4, Operand(kSystemPointerSizeLog2)); __ ShiftLeftImm(r4, r4, Operand(kSystemPointerSizeLog2));
__ add(r4, r7, r4); __ add(r4, r7, r4);
__ b(&outer_loop_header); __ b(&outer_loop_header);
__ bind(&outer_push_loop); __ bind(&outer_push_loop);
// Inner loop state: r5 = current FrameDescription*, r6 = loop index. // Inner loop state: r5 = current FrameDescription*, r6 = loop index.
__ LoadP(r5, MemOperand(r7, 0)); // output_[ix] __ LoadU64(r5, MemOperand(r7, 0)); // output_[ix]
__ LoadP(r6, MemOperand(r5, FrameDescription::frame_size_offset())); __ LoadU64(r6, MemOperand(r5, FrameDescription::frame_size_offset()));
__ b(&inner_loop_header); __ b(&inner_loop_header);
__ bind(&inner_push_loop); __ bind(&inner_push_loop);
__ addi(r6, r6, Operand(-sizeof(intptr_t))); __ addi(r6, r6, Operand(-sizeof(intptr_t)));
__ add(r9, r5, r6); __ add(r9, r5, r6);
__ LoadP(r9, MemOperand(r9, FrameDescription::frame_content_offset())); __ LoadU64(r9, MemOperand(r9, FrameDescription::frame_content_offset()));
__ push(r9); __ push(r9);
__ bind(&inner_loop_header); __ bind(&inner_loop_header);
...@@ -3365,7 +3368,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm, ...@@ -3365,7 +3368,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
__ cmp(r7, r4); __ cmp(r7, r4);
__ blt(&outer_push_loop); __ blt(&outer_push_loop);
__ LoadP(r4, MemOperand(r3, Deoptimizer::input_offset())); __ LoadU64(r4, MemOperand(r3, Deoptimizer::input_offset()));
for (int i = 0; i < config->num_allocatable_double_registers(); ++i) { for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
int code = config->GetAllocatableDoubleCode(i); int code = config->GetAllocatableDoubleCode(i);
const DoubleRegister dreg = DoubleRegister::from_code(code); const DoubleRegister dreg = DoubleRegister::from_code(code);
...@@ -3374,9 +3377,9 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm, ...@@ -3374,9 +3377,9 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
} }
// Push pc, and continuation from the last output frame. // Push pc, and continuation from the last output frame.
__ LoadP(r9, MemOperand(r5, FrameDescription::pc_offset())); __ LoadU64(r9, MemOperand(r5, FrameDescription::pc_offset()));
__ push(r9); __ push(r9);
__ LoadP(r9, MemOperand(r5, FrameDescription::continuation_offset())); __ LoadU64(r9, MemOperand(r5, FrameDescription::continuation_offset()));
__ push(r9); __ push(r9);
// Restore the registers from the last output frame. // Restore the registers from the last output frame.
...@@ -3389,7 +3392,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm, ...@@ -3389,7 +3392,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
int offset = int offset =
(i * kSystemPointerSize) + FrameDescription::registers_offset(); (i * kSystemPointerSize) + FrameDescription::registers_offset();
if ((restored_regs & (1 << i)) != 0) { if ((restored_regs & (1 << i)) != 0) {
__ LoadP(ToRegister(i), MemOperand(scratch, offset)); __ LoadU64(ToRegister(i), MemOperand(scratch, offset));
} }
} }
} }
...@@ -3466,11 +3469,12 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) { ...@@ -3466,11 +3469,12 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kSlot); descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kSlot);
Register handler_arg = Register handler_arg =
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kHandler); descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kHandler);
__ LoadP(handler_arg, MemOperand(fp, CommonFrameConstants::kCallerPCOffset)); __ LoadU64(handler_arg,
__ LoadP( MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
__ LoadU64(
slot_arg, slot_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs1PcOffset)); MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
__ LoadP( __ LoadU64(
handler_arg, handler_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset)); MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
......
...@@ -134,7 +134,7 @@ void TurboAssembler::LoadFromConstantsTable(Register destination, ...@@ -134,7 +134,7 @@ void TurboAssembler::LoadFromConstantsTable(Register destination,
} }
void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) { void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
LoadP(destination, MemOperand(kRootRegister, offset), r0); LoadU64(destination, MemOperand(kRootRegister, offset), r0);
} }
void TurboAssembler::LoadRootRegisterOffset(Register destination, void TurboAssembler::LoadRootRegisterOffset(Register destination,
...@@ -185,7 +185,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, ...@@ -185,7 +185,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
Register scratch = ip; Register scratch = ip;
int offset = code->builtin_index() * kSystemPointerSize + int offset = code->builtin_index() * kSystemPointerSize +
IsolateData::builtin_entry_table_offset(); IsolateData::builtin_entry_table_offset();
LoadP(scratch, MemOperand(kRootRegister, offset), r0); LoadU64(scratch, MemOperand(kRootRegister, offset), r0);
if (cond != al) b(NegateCondition(cond), &skip, cr); if (cond != al) b(NegateCondition(cond), &skip, cr);
Jump(scratch); Jump(scratch);
bind(&skip); bind(&skip);
...@@ -215,9 +215,9 @@ void TurboAssembler::Jump(const ExternalReference& reference) { ...@@ -215,9 +215,9 @@ void TurboAssembler::Jump(const ExternalReference& reference) {
if (ABI_USES_FUNCTION_DESCRIPTORS) { if (ABI_USES_FUNCTION_DESCRIPTORS) {
// AIX uses a function descriptor. When calling C code be // AIX uses a function descriptor. When calling C code be
// aware of this descriptor and pick up values from it. // aware of this descriptor and pick up values from it.
LoadP(ToRegister(ABI_TOC_REGISTER), LoadU64(ToRegister(ABI_TOC_REGISTER),
MemOperand(scratch, kSystemPointerSize)); MemOperand(scratch, kSystemPointerSize));
LoadP(scratch, MemOperand(scratch, 0)); LoadU64(scratch, MemOperand(scratch, 0));
} }
Jump(scratch); Jump(scratch);
} }
...@@ -273,7 +273,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode, ...@@ -273,7 +273,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
Label skip; Label skip;
int offset = code->builtin_index() * kSystemPointerSize + int offset = code->builtin_index() * kSystemPointerSize +
IsolateData::builtin_entry_table_offset(); IsolateData::builtin_entry_table_offset();
LoadP(ip, MemOperand(kRootRegister, offset)); LoadU64(ip, MemOperand(kRootRegister, offset));
if (cond != al) b(NegateCondition(cond), &skip); if (cond != al) b(NegateCondition(cond), &skip);
Call(ip); Call(ip);
bind(&skip); bind(&skip);
...@@ -412,7 +412,7 @@ void TurboAssembler::MultiPop(RegList regs, Register location) { ...@@ -412,7 +412,7 @@ void TurboAssembler::MultiPop(RegList regs, Register location) {
for (int16_t i = 0; i < Register::kNumRegisters; i++) { for (int16_t i = 0; i < Register::kNumRegisters; i++) {
if ((regs & (1 << i)) != 0) { if ((regs & (1 << i)) != 0) {
LoadP(ToRegister(i), MemOperand(location, stack_offset)); LoadU64(ToRegister(i), MemOperand(location, stack_offset));
stack_offset += kSystemPointerSize; stack_offset += kSystemPointerSize;
} }
} }
...@@ -478,8 +478,8 @@ void TurboAssembler::MultiPopV128(RegList dregs, Register location) { ...@@ -478,8 +478,8 @@ void TurboAssembler::MultiPopV128(RegList dregs, Register location) {
void TurboAssembler::LoadRoot(Register destination, RootIndex index, void TurboAssembler::LoadRoot(Register destination, RootIndex index,
Condition cond) { Condition cond) {
DCHECK(cond == al); DCHECK(cond == al);
LoadP(destination, LoadU64(destination,
MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index)), r0); MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index)), r0);
} }
void TurboAssembler::LoadTaggedPointerField(const Register& destination, void TurboAssembler::LoadTaggedPointerField(const Register& destination,
...@@ -488,7 +488,7 @@ void TurboAssembler::LoadTaggedPointerField(const Register& destination, ...@@ -488,7 +488,7 @@ void TurboAssembler::LoadTaggedPointerField(const Register& destination,
if (COMPRESS_POINTERS_BOOL) { if (COMPRESS_POINTERS_BOOL) {
DecompressTaggedPointer(destination, field_operand); DecompressTaggedPointer(destination, field_operand);
} else { } else {
LoadP(destination, field_operand, scratch); LoadU64(destination, field_operand, scratch);
} }
} }
...@@ -498,7 +498,7 @@ void TurboAssembler::LoadAnyTaggedField(const Register& destination, ...@@ -498,7 +498,7 @@ void TurboAssembler::LoadAnyTaggedField(const Register& destination,
if (COMPRESS_POINTERS_BOOL) { if (COMPRESS_POINTERS_BOOL) {
DecompressAnyTagged(destination, field_operand); DecompressAnyTagged(destination, field_operand);
} else { } else {
LoadP(destination, field_operand, scratch); LoadU64(destination, field_operand, scratch);
} }
} }
...@@ -506,7 +506,7 @@ void TurboAssembler::SmiUntag(Register dst, const MemOperand& src, RCBit rc) { ...@@ -506,7 +506,7 @@ void TurboAssembler::SmiUntag(Register dst, const MemOperand& src, RCBit rc) {
if (SmiValuesAre31Bits()) { if (SmiValuesAre31Bits()) {
lwz(dst, src); lwz(dst, src);
} else { } else {
LoadP(dst, src); LoadU64(dst, src);
} }
SmiUntag(dst, rc); SmiUntag(dst, rc);
...@@ -850,12 +850,12 @@ void TurboAssembler::PushStandardFrame(Register function_reg) { ...@@ -850,12 +850,12 @@ void TurboAssembler::PushStandardFrame(Register function_reg) {
void TurboAssembler::RestoreFrameStateForTailCall() { void TurboAssembler::RestoreFrameStateForTailCall() {
if (FLAG_enable_embedded_constant_pool) { if (FLAG_enable_embedded_constant_pool) {
LoadP(kConstantPoolRegister, LoadU64(kConstantPoolRegister,
MemOperand(fp, StandardFrameConstants::kConstantPoolOffset)); MemOperand(fp, StandardFrameConstants::kConstantPoolOffset));
set_constant_pool_available(false); set_constant_pool_available(false);
} }
LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); LoadU64(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); LoadU64(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
mtlr(r0); mtlr(r0);
} }
...@@ -1175,11 +1175,11 @@ int TurboAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) { ...@@ -1175,11 +1175,11 @@ int TurboAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) {
// Drop the execution stack down to the frame pointer and restore // Drop the execution stack down to the frame pointer and restore
// the caller's state. // the caller's state.
int frame_ends; int frame_ends;
LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); LoadU64(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); LoadU64(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
if (FLAG_enable_embedded_constant_pool) { if (FLAG_enable_embedded_constant_pool) {
LoadP(kConstantPoolRegister, LoadU64(kConstantPoolRegister,
MemOperand(fp, StandardFrameConstants::kConstantPoolOffset)); MemOperand(fp, StandardFrameConstants::kConstantPoolOffset));
} }
mtlr(r0); mtlr(r0);
frame_ends = pc_offset(); frame_ends = pc_offset();
...@@ -1306,7 +1306,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count, ...@@ -1306,7 +1306,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
// Restore current context from top and clear it in debug mode. // Restore current context from top and clear it in debug mode.
Move(ip, Move(ip,
ExternalReference::Create(IsolateAddressId::kContextAddress, isolate())); ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
LoadP(cp, MemOperand(ip)); LoadU64(cp, MemOperand(ip));
#ifdef DEBUG #ifdef DEBUG
mov(r6, Operand(Context::kInvalidContext)); mov(r6, Operand(Context::kInvalidContext));
...@@ -1394,7 +1394,7 @@ void MacroAssembler::LoadStackLimit(Register destination, StackLimitKind kind) { ...@@ -1394,7 +1394,7 @@ void MacroAssembler::LoadStackLimit(Register destination, StackLimitKind kind) {
intptr_t offset = intptr_t offset =
TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit); TurboAssembler::RootRegisterOffsetForExternalReference(isolate, limit);
CHECK(is_int32(offset)); CHECK(is_int32(offset));
LoadP(destination, MemOperand(kRootRegister, offset), r0); LoadU64(destination, MemOperand(kRootRegister, offset), r0);
} }
void MacroAssembler::StackOverflowCheck(Register num_args, Register scratch, void MacroAssembler::StackOverflowCheck(Register num_args, Register scratch,
...@@ -1617,7 +1617,7 @@ void MacroAssembler::PushStackHandler() { ...@@ -1617,7 +1617,7 @@ void MacroAssembler::PushStackHandler() {
// Preserve r4-r8. // Preserve r4-r8.
Move(r3, Move(r3,
ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate())); ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate()));
LoadP(r0, MemOperand(r3)); LoadU64(r0, MemOperand(r3));
push(r0); push(r0);
// Set this new handler as the current one. // Set this new handler as the current one.
...@@ -1798,7 +1798,7 @@ void TurboAssembler::TruncateDoubleToI(Isolate* isolate, Zone* zone, ...@@ -1798,7 +1798,7 @@ void TurboAssembler::TruncateDoubleToI(Isolate* isolate, Zone* zone,
Call(BUILTIN_CODE(isolate, DoubleToI), RelocInfo::CODE_TARGET); Call(BUILTIN_CODE(isolate, DoubleToI), RelocInfo::CODE_TARGET);
} }
LoadP(result, MemOperand(sp)); LoadU64(result, MemOperand(sp));
addi(sp, sp, Operand(kDoubleSize)); addi(sp, sp, Operand(kDoubleSize));
pop(r0); pop(r0);
mtlr(r0); mtlr(r0);
...@@ -2209,9 +2209,9 @@ void TurboAssembler::CallCFunctionHelper(Register function, ...@@ -2209,9 +2209,9 @@ void TurboAssembler::CallCFunctionHelper(Register function,
if (ABI_USES_FUNCTION_DESCRIPTORS && has_function_descriptor) { if (ABI_USES_FUNCTION_DESCRIPTORS && has_function_descriptor) {
// AIX/PPC64BE Linux uses a function descriptor. When calling C code be // AIX/PPC64BE Linux uses a function descriptor. When calling C code be
// aware of this descriptor and pick up values from it // aware of this descriptor and pick up values from it
LoadP(ToRegister(ABI_TOC_REGISTER), LoadU64(ToRegister(ABI_TOC_REGISTER),
MemOperand(function, kSystemPointerSize)); MemOperand(function, kSystemPointerSize));
LoadP(ip, MemOperand(function, 0)); LoadU64(ip, MemOperand(function, 0));
dest = ip; dest = ip;
} else if (ABI_CALL_VIA_IP) { } else if (ABI_CALL_VIA_IP) {
// pLinux and Simualtor, not AIX // pLinux and Simualtor, not AIX
...@@ -2243,7 +2243,7 @@ void TurboAssembler::CallCFunctionHelper(Register function, ...@@ -2243,7 +2243,7 @@ void TurboAssembler::CallCFunctionHelper(Register function,
CalculateStackPassedWords(num_reg_arguments, num_double_arguments); CalculateStackPassedWords(num_reg_arguments, num_double_arguments);
int stack_space = kNumRequiredStackFrameSlots + stack_passed_arguments; int stack_space = kNumRequiredStackFrameSlots + stack_passed_arguments;
if (ActivationFrameAlignment() > kSystemPointerSize) { if (ActivationFrameAlignment() > kSystemPointerSize) {
LoadP(sp, MemOperand(sp, stack_space * kSystemPointerSize)); LoadU64(sp, MemOperand(sp, stack_space * kSystemPointerSize));
} else { } else {
addi(sp, sp, Operand(stack_space * kSystemPointerSize)); addi(sp, sp, Operand(stack_space * kSystemPointerSize));
} }
...@@ -2255,7 +2255,7 @@ void TurboAssembler::CheckPageFlag( ...@@ -2255,7 +2255,7 @@ void TurboAssembler::CheckPageFlag(
int mask, Condition cc, Label* condition_met) { int mask, Condition cc, Label* condition_met) {
DCHECK(cc == ne || cc == eq); DCHECK(cc == ne || cc == eq);
ClearRightImm(scratch, object, Operand(kPageSizeBits)); ClearRightImm(scratch, object, Operand(kPageSizeBits));
LoadP(scratch, MemOperand(scratch, BasicMemoryChunk::kFlagsOffset)); LoadU64(scratch, MemOperand(scratch, BasicMemoryChunk::kFlagsOffset));
mov(r0, Operand(mask)); mov(r0, Operand(mask));
and_(r0, scratch, r0, SetRC); and_(r0, scratch, r0, SetRC);
...@@ -2695,8 +2695,8 @@ void MacroAssembler::AndSmiLiteral(Register dst, Register src, Smi smi, ...@@ -2695,8 +2695,8 @@ void MacroAssembler::AndSmiLiteral(Register dst, Register src, Smi smi,
} }
// Load a "pointer" sized value from the memory location // Load a "pointer" sized value from the memory location
void TurboAssembler::LoadP(Register dst, const MemOperand& mem, void TurboAssembler::LoadU64(Register dst, const MemOperand& mem,
Register scratch) { Register scratch) {
DCHECK_EQ(mem.rb(), no_reg); DCHECK_EQ(mem.rb(), no_reg);
int offset = mem.offset(); int offset = mem.offset();
int misaligned = (offset & 3); int misaligned = (offset & 3);
...@@ -3080,7 +3080,7 @@ void TurboAssembler::SwapP(Register src, MemOperand dst, Register scratch) { ...@@ -3080,7 +3080,7 @@ void TurboAssembler::SwapP(Register src, MemOperand dst, Register scratch) {
DCHECK(!AreAliased(src, dst.rb(), scratch)); DCHECK(!AreAliased(src, dst.rb(), scratch));
DCHECK(!AreAliased(src, scratch)); DCHECK(!AreAliased(src, scratch));
mr(scratch, src); mr(scratch, src);
LoadP(src, dst, r0); LoadU64(src, dst, r0);
StoreP(scratch, dst, r0); StoreP(scratch, dst, r0);
} }
...@@ -3102,14 +3102,14 @@ void TurboAssembler::SwapP(MemOperand src, MemOperand dst, Register scratch_0, ...@@ -3102,14 +3102,14 @@ void TurboAssembler::SwapP(MemOperand src, MemOperand dst, Register scratch_0,
src = dst; src = dst;
dst = temp; dst = temp;
} }
LoadP(scratch_1, dst, scratch_0); LoadU64(scratch_1, dst, scratch_0);
LoadP(scratch_0, src); LoadU64(scratch_0, src);
StoreP(scratch_1, src); StoreP(scratch_1, src);
StoreP(scratch_0, dst, scratch_1); StoreP(scratch_0, dst, scratch_1);
} else { } else {
LoadP(scratch_1, dst, scratch_0); LoadU64(scratch_1, dst, scratch_0);
push(scratch_1); push(scratch_1);
LoadP(scratch_0, src, scratch_1); LoadU64(scratch_0, src, scratch_1);
StoreP(scratch_0, dst, scratch_1); StoreP(scratch_0, dst, scratch_1);
pop(scratch_1); pop(scratch_1);
StoreP(scratch_1, src, scratch_0); StoreP(scratch_1, src, scratch_0);
...@@ -3295,9 +3295,9 @@ void TurboAssembler::LoadCodeObjectEntry(Register destination, ...@@ -3295,9 +3295,9 @@ void TurboAssembler::LoadCodeObjectEntry(Register destination,
FieldMemOperand(code_object, Code::kBuiltinIndexOffset)); FieldMemOperand(code_object, Code::kBuiltinIndexOffset));
ShiftLeftImm(destination, scratch, Operand(kSystemPointerSizeLog2)); ShiftLeftImm(destination, scratch, Operand(kSystemPointerSizeLog2));
add(destination, destination, kRootRegister); add(destination, destination, kRootRegister);
LoadP(destination, LoadU64(destination,
MemOperand(destination, IsolateData::builtin_entry_table_offset()), MemOperand(destination, IsolateData::builtin_entry_table_offset()),
r0); r0);
bind(&out); bind(&out);
} else { } else {
...@@ -3331,8 +3331,9 @@ void TurboAssembler::StoreReturnAddressAndCall(Register target) { ...@@ -3331,8 +3331,9 @@ void TurboAssembler::StoreReturnAddressAndCall(Register target) {
if (ABI_USES_FUNCTION_DESCRIPTORS) { if (ABI_USES_FUNCTION_DESCRIPTORS) {
// AIX/PPC64BE Linux uses a function descriptor. When calling C code be // AIX/PPC64BE Linux uses a function descriptor. When calling C code be
// aware of this descriptor and pick up values from it // aware of this descriptor and pick up values from it
LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(target, kSystemPointerSize)); LoadU64(ToRegister(ABI_TOC_REGISTER),
LoadP(ip, MemOperand(target, 0)); MemOperand(target, kSystemPointerSize));
LoadU64(ip, MemOperand(target, 0));
dest = ip; dest = ip;
} else if (ABI_CALL_VIA_IP && dest != ip) { } else if (ABI_CALL_VIA_IP && dest != ip) {
Move(ip, target); Move(ip, target);
...@@ -3353,8 +3354,8 @@ void TurboAssembler::CallForDeoptimization(Builtins::Name target, int, ...@@ -3353,8 +3354,8 @@ void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind, Label* exit, DeoptimizeKind kind,
Label* ret, Label*) { Label* ret, Label*) {
BlockTrampolinePoolScope block_trampoline_pool(this); BlockTrampolinePoolScope block_trampoline_pool(this);
LoadP(ip, MemOperand(kRootRegister, LoadU64(ip, MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(target))); IsolateData::builtin_entry_slot_offset(target)));
Call(ip); Call(ip);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit), DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
(kind == DeoptimizeKind::kLazy) (kind == DeoptimizeKind::kLazy)
......
...@@ -147,7 +147,10 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { ...@@ -147,7 +147,10 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
} }
// These exist to provide portability between 32 and 64bit // These exist to provide portability between 32 and 64bit
void LoadP(Register dst, const MemOperand& mem, Register scratch = no_reg); void LoadU64(Register dst, const MemOperand& mem, Register scratch = no_reg);
void LoadP(Register dst, const MemOperand& mem, Register scratch = no_reg) {
LoadU64(dst, mem, no_reg);
}
void LoadPU(Register dst, const MemOperand& mem, Register scratch = no_reg); void LoadPU(Register dst, const MemOperand& mem, Register scratch = no_reg);
void LoadWordArith(Register dst, const MemOperand& mem, void LoadWordArith(Register dst, const MemOperand& mem,
Register scratch = no_reg); Register scratch = no_reg);
...@@ -249,36 +252,36 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { ...@@ -249,36 +252,36 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// Pop two registers. Pops rightmost register first (from lower address). // Pop two registers. Pops rightmost register first (from lower address).
void Pop(Register src1, Register src2) { void Pop(Register src1, Register src2) {
LoadP(src2, MemOperand(sp, 0)); LoadU64(src2, MemOperand(sp, 0));
LoadP(src1, MemOperand(sp, kSystemPointerSize)); LoadU64(src1, MemOperand(sp, kSystemPointerSize));
addi(sp, sp, Operand(2 * kSystemPointerSize)); addi(sp, sp, Operand(2 * kSystemPointerSize));
} }
// Pop three registers. Pops rightmost register first (from lower address). // Pop three registers. Pops rightmost register first (from lower address).
void Pop(Register src1, Register src2, Register src3) { void Pop(Register src1, Register src2, Register src3) {
LoadP(src3, MemOperand(sp, 0)); LoadU64(src3, MemOperand(sp, 0));
LoadP(src2, MemOperand(sp, kSystemPointerSize)); LoadU64(src2, MemOperand(sp, kSystemPointerSize));
LoadP(src1, MemOperand(sp, 2 * kSystemPointerSize)); LoadU64(src1, MemOperand(sp, 2 * kSystemPointerSize));
addi(sp, sp, Operand(3 * kSystemPointerSize)); addi(sp, sp, Operand(3 * kSystemPointerSize));
} }
// Pop four registers. Pops rightmost register first (from lower address). // Pop four registers. Pops rightmost register first (from lower address).
void Pop(Register src1, Register src2, Register src3, Register src4) { void Pop(Register src1, Register src2, Register src3, Register src4) {
LoadP(src4, MemOperand(sp, 0)); LoadU64(src4, MemOperand(sp, 0));
LoadP(src3, MemOperand(sp, kSystemPointerSize)); LoadU64(src3, MemOperand(sp, kSystemPointerSize));
LoadP(src2, MemOperand(sp, 2 * kSystemPointerSize)); LoadU64(src2, MemOperand(sp, 2 * kSystemPointerSize));
LoadP(src1, MemOperand(sp, 3 * kSystemPointerSize)); LoadU64(src1, MemOperand(sp, 3 * kSystemPointerSize));
addi(sp, sp, Operand(4 * kSystemPointerSize)); addi(sp, sp, Operand(4 * kSystemPointerSize));
} }
// Pop five registers. Pops rightmost register first (from lower address). // Pop five registers. Pops rightmost register first (from lower address).
void Pop(Register src1, Register src2, Register src3, Register src4, void Pop(Register src1, Register src2, Register src3, Register src4,
Register src5) { Register src5) {
LoadP(src5, MemOperand(sp, 0)); LoadU64(src5, MemOperand(sp, 0));
LoadP(src4, MemOperand(sp, kSystemPointerSize)); LoadU64(src4, MemOperand(sp, kSystemPointerSize));
LoadP(src3, MemOperand(sp, 2 * kSystemPointerSize)); LoadU64(src3, MemOperand(sp, 2 * kSystemPointerSize));
LoadP(src2, MemOperand(sp, 3 * kSystemPointerSize)); LoadU64(src2, MemOperand(sp, 3 * kSystemPointerSize));
LoadP(src1, MemOperand(sp, 4 * kSystemPointerSize)); LoadU64(src1, MemOperand(sp, 4 * kSystemPointerSize));
addi(sp, sp, Operand(5 * kSystemPointerSize)); addi(sp, sp, Operand(5 * kSystemPointerSize));
} }
...@@ -739,7 +742,7 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { ...@@ -739,7 +742,7 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
// TODO(victorgomes): Remove this function once we stick with the reversed // TODO(victorgomes): Remove this function once we stick with the reversed
// arguments order. // arguments order.
void LoadReceiver(Register dest, Register argc) { void LoadReceiver(Register dest, Register argc) {
LoadP(dest, MemOperand(sp, 0)); LoadU64(dest, MemOperand(sp, 0));
} }
void StoreReceiver(Register rec, Register argc, Register scratch) { void StoreReceiver(Register rec, Register argc, Register scratch) {
......
...@@ -1130,7 +1130,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1130,7 +1130,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break; break;
case kArchParentFramePointer: case kArchParentFramePointer:
if (frame_access_state()->has_frame()) { if (frame_access_state()->has_frame()) {
__ LoadP(i.OutputRegister(), MemOperand(fp, 0)); __ LoadU64(i.OutputRegister(), MemOperand(fp, 0));
} else { } else {
__ mr(i.OutputRegister(), fp); __ mr(i.OutputRegister(), fp);
} }
...@@ -1228,7 +1228,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1228,7 +1228,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ LoadSimd128(i.OutputSimd128Register(), MemOperand(fp, ip)); __ LoadSimd128(i.OutputSimd128Register(), MemOperand(fp, ip));
} }
} else { } else {
__ LoadP(i.OutputRegister(), MemOperand(fp, offset), r0); __ LoadU64(i.OutputRegister(), MemOperand(fp, offset), r0);
} }
break; break;
} }
...@@ -4142,11 +4142,11 @@ void CodeGenerator::AssembleConstructFrame() { ...@@ -4142,11 +4142,11 @@ void CodeGenerator::AssembleConstructFrame() {
// check in the condition code. // check in the condition code.
if ((required_slots * kSystemPointerSize) < (FLAG_stack_size * 1024)) { if ((required_slots * kSystemPointerSize) < (FLAG_stack_size * 1024)) {
Register scratch = ip; Register scratch = ip;
__ LoadP( __ LoadU64(
scratch, scratch,
FieldMemOperand(kWasmInstanceRegister, FieldMemOperand(kWasmInstanceRegister,
WasmInstanceObject::kRealStackLimitAddressOffset)); WasmInstanceObject::kRealStackLimitAddressOffset));
__ LoadP(scratch, MemOperand(scratch), r0); __ LoadU64(scratch, MemOperand(scratch), r0);
__ Add(scratch, scratch, required_slots * kSystemPointerSize, r0); __ Add(scratch, scratch, required_slots * kSystemPointerSize, r0);
__ cmpl(sp, scratch); __ cmpl(sp, scratch);
__ bge(&done); __ bge(&done);
...@@ -4257,7 +4257,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { ...@@ -4257,7 +4257,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) {
} }
if (drop_jsargs) { if (drop_jsargs) {
// Get the actual argument count. // Get the actual argument count.
__ LoadP(argc_reg, MemOperand(fp, StandardFrameConstants::kArgCOffset)); __ LoadU64(argc_reg, MemOperand(fp, StandardFrameConstants::kArgCOffset));
} }
AssembleDeconstructFrame(); AssembleDeconstructFrame();
} }
...@@ -4320,10 +4320,10 @@ void CodeGenerator::AssembleMove(InstructionOperand* source, ...@@ -4320,10 +4320,10 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
DCHECK(destination->IsRegister() || destination->IsStackSlot()); DCHECK(destination->IsRegister() || destination->IsStackSlot());
MemOperand src = g.ToMemOperand(source); MemOperand src = g.ToMemOperand(source);
if (destination->IsRegister()) { if (destination->IsRegister()) {
__ LoadP(g.ToRegister(destination), src, r0); __ LoadU64(g.ToRegister(destination), src, r0);
} else { } else {
Register temp = kScratchReg; Register temp = kScratchReg;
__ LoadP(temp, src, r0); __ LoadU64(temp, src, r0);
__ StoreP(temp, g.ToMemOperand(destination), r0); __ StoreP(temp, g.ToMemOperand(destination), r0);
} }
} else if (source->IsConstant()) { } else if (source->IsConstant()) {
......
...@@ -162,7 +162,7 @@ void RegExpMacroAssemblerPPC::AdvanceRegister(int reg, int by) { ...@@ -162,7 +162,7 @@ void RegExpMacroAssemblerPPC::AdvanceRegister(int reg, int by) {
DCHECK_LE(0, reg); DCHECK_LE(0, reg);
DCHECK_GT(num_registers_, reg); DCHECK_GT(num_registers_, reg);
if (by != 0) { if (by != 0) {
__ LoadP(r3, register_location(reg), r0); __ LoadU64(r3, register_location(reg), r0);
__ mov(r0, Operand(by)); __ mov(r0, Operand(by));
__ add(r3, r3, r0); __ add(r3, r3, r0);
__ StoreP(r3, register_location(reg), r0); __ StoreP(r3, register_location(reg), r0);
...@@ -174,7 +174,7 @@ void RegExpMacroAssemblerPPC::Backtrack() { ...@@ -174,7 +174,7 @@ void RegExpMacroAssemblerPPC::Backtrack() {
CheckPreemption(); CheckPreemption();
if (has_backtrack_limit()) { if (has_backtrack_limit()) {
Label next; Label next;
__ LoadP(r3, MemOperand(frame_pointer(), kBacktrackCount), r0); __ LoadU64(r3, MemOperand(frame_pointer(), kBacktrackCount), r0);
__ addi(r3, r3, Operand(1)); __ addi(r3, r3, Operand(1));
__ StoreP(r3, MemOperand(frame_pointer(), kBacktrackCount), r0); __ StoreP(r3, MemOperand(frame_pointer(), kBacktrackCount), r0);
__ mov(r0, Operand(backtrack_limit())); __ mov(r0, Operand(backtrack_limit()));
...@@ -213,7 +213,7 @@ void RegExpMacroAssemblerPPC::CheckCharacterGT(uc16 limit, Label* on_greater) { ...@@ -213,7 +213,7 @@ void RegExpMacroAssemblerPPC::CheckCharacterGT(uc16 limit, Label* on_greater) {
} }
void RegExpMacroAssemblerPPC::CheckAtStart(int cp_offset, Label* on_at_start) { void RegExpMacroAssemblerPPC::CheckAtStart(int cp_offset, Label* on_at_start) {
__ LoadP(r4, MemOperand(frame_pointer(), kStringStartMinusOne)); __ LoadU64(r4, MemOperand(frame_pointer(), kStringStartMinusOne));
__ addi(r3, current_input_offset(), __ addi(r3, current_input_offset(),
Operand(-char_size() + cp_offset * char_size())); Operand(-char_size() + cp_offset * char_size()));
__ cmp(r3, r4); __ cmp(r3, r4);
...@@ -222,7 +222,7 @@ void RegExpMacroAssemblerPPC::CheckAtStart(int cp_offset, Label* on_at_start) { ...@@ -222,7 +222,7 @@ void RegExpMacroAssemblerPPC::CheckAtStart(int cp_offset, Label* on_at_start) {
void RegExpMacroAssemblerPPC::CheckNotAtStart(int cp_offset, void RegExpMacroAssemblerPPC::CheckNotAtStart(int cp_offset,
Label* on_not_at_start) { Label* on_not_at_start) {
__ LoadP(r4, MemOperand(frame_pointer(), kStringStartMinusOne)); __ LoadU64(r4, MemOperand(frame_pointer(), kStringStartMinusOne));
__ addi(r3, current_input_offset(), __ addi(r3, current_input_offset(),
Operand(-char_size() + cp_offset * char_size())); Operand(-char_size() + cp_offset * char_size()));
__ cmp(r3, r4); __ cmp(r3, r4);
...@@ -238,7 +238,7 @@ void RegExpMacroAssemblerPPC::CheckCharacterLT(uc16 limit, Label* on_less) { ...@@ -238,7 +238,7 @@ void RegExpMacroAssemblerPPC::CheckCharacterLT(uc16 limit, Label* on_less) {
void RegExpMacroAssemblerPPC::CheckGreedyLoop(Label* on_equal) { void RegExpMacroAssemblerPPC::CheckGreedyLoop(Label* on_equal) {
Label backtrack_non_equal; Label backtrack_non_equal;
__ LoadP(r3, MemOperand(backtrack_stackpointer(), 0)); __ LoadU64(r3, MemOperand(backtrack_stackpointer(), 0));
__ cmp(current_input_offset(), r3); __ cmp(current_input_offset(), r3);
__ bne(&backtrack_non_equal); __ bne(&backtrack_non_equal);
__ addi(backtrack_stackpointer(), backtrack_stackpointer(), __ addi(backtrack_stackpointer(), backtrack_stackpointer(),
...@@ -251,8 +251,9 @@ void RegExpMacroAssemblerPPC::CheckGreedyLoop(Label* on_equal) { ...@@ -251,8 +251,9 @@ void RegExpMacroAssemblerPPC::CheckGreedyLoop(Label* on_equal) {
void RegExpMacroAssemblerPPC::CheckNotBackReferenceIgnoreCase( void RegExpMacroAssemblerPPC::CheckNotBackReferenceIgnoreCase(
int start_reg, bool read_backward, bool unicode, Label* on_no_match) { int start_reg, bool read_backward, bool unicode, Label* on_no_match) {
Label fallthrough; Label fallthrough;
__ LoadP(r3, register_location(start_reg), r0); // Index of start of capture __ LoadU64(r3, register_location(start_reg),
__ LoadP(r4, register_location(start_reg + 1), r0); // Index of end r0); // Index of start of capture
__ LoadU64(r4, register_location(start_reg + 1), r0); // Index of end
__ sub(r4, r4, r3, LeaveOE, SetRC); // Length of capture. __ sub(r4, r4, r3, LeaveOE, SetRC); // Length of capture.
// At this point, the capture registers are either both set or both cleared. // At this point, the capture registers are either both set or both cleared.
...@@ -262,7 +263,7 @@ void RegExpMacroAssemblerPPC::CheckNotBackReferenceIgnoreCase( ...@@ -262,7 +263,7 @@ void RegExpMacroAssemblerPPC::CheckNotBackReferenceIgnoreCase(
// Check that there are enough characters left in the input. // Check that there are enough characters left in the input.
if (read_backward) { if (read_backward) {
__ LoadP(r6, MemOperand(frame_pointer(), kStringStartMinusOne)); __ LoadU64(r6, MemOperand(frame_pointer(), kStringStartMinusOne));
__ add(r6, r6, r4); __ add(r6, r6, r4);
__ cmp(current_input_offset(), r6); __ cmp(current_input_offset(), r6);
BranchOrBacktrack(le, on_no_match); BranchOrBacktrack(le, on_no_match);
...@@ -325,9 +326,10 @@ void RegExpMacroAssemblerPPC::CheckNotBackReferenceIgnoreCase( ...@@ -325,9 +326,10 @@ void RegExpMacroAssemblerPPC::CheckNotBackReferenceIgnoreCase(
// Compute new value of character position after the matched part. // Compute new value of character position after the matched part.
__ sub(current_input_offset(), r5, end_of_input_address()); __ sub(current_input_offset(), r5, end_of_input_address());
if (read_backward) { if (read_backward) {
__ LoadP(r3, register_location(start_reg)); // Index of start of capture __ LoadU64(r3,
__ LoadP(r4, register_location(start_reg)); // Index of start of capture
register_location(start_reg + 1)); // Index of end of capture __ LoadU64(r4,
register_location(start_reg + 1)); // Index of end of capture
__ add(current_input_offset(), current_input_offset(), r3); __ add(current_input_offset(), current_input_offset(), r3);
__ sub(current_input_offset(), current_input_offset(), r4); __ sub(current_input_offset(), current_input_offset(), r4);
} }
...@@ -391,8 +393,8 @@ void RegExpMacroAssemblerPPC::CheckNotBackReference(int start_reg, ...@@ -391,8 +393,8 @@ void RegExpMacroAssemblerPPC::CheckNotBackReference(int start_reg,
Label fallthrough; Label fallthrough;
// Find length of back-referenced capture. // Find length of back-referenced capture.
__ LoadP(r3, register_location(start_reg), r0); __ LoadU64(r3, register_location(start_reg), r0);
__ LoadP(r4, register_location(start_reg + 1), r0); __ LoadU64(r4, register_location(start_reg + 1), r0);
__ sub(r4, r4, r3, LeaveOE, SetRC); // Length to check. __ sub(r4, r4, r3, LeaveOE, SetRC); // Length to check.
// At this point, the capture registers are either both set or both cleared. // At this point, the capture registers are either both set or both cleared.
...@@ -402,7 +404,7 @@ void RegExpMacroAssemblerPPC::CheckNotBackReference(int start_reg, ...@@ -402,7 +404,7 @@ void RegExpMacroAssemblerPPC::CheckNotBackReference(int start_reg,
// Check that there are enough characters left in the input. // Check that there are enough characters left in the input.
if (read_backward) { if (read_backward) {
__ LoadP(r6, MemOperand(frame_pointer(), kStringStartMinusOne)); __ LoadU64(r6, MemOperand(frame_pointer(), kStringStartMinusOne));
__ add(r6, r6, r4); __ add(r6, r6, r4);
__ cmp(current_input_offset(), r6); __ cmp(current_input_offset(), r6);
BranchOrBacktrack(le, on_no_match); BranchOrBacktrack(le, on_no_match);
...@@ -442,8 +444,9 @@ void RegExpMacroAssemblerPPC::CheckNotBackReference(int start_reg, ...@@ -442,8 +444,9 @@ void RegExpMacroAssemblerPPC::CheckNotBackReference(int start_reg,
// Move current character position to position after match. // Move current character position to position after match.
__ sub(current_input_offset(), r5, end_of_input_address()); __ sub(current_input_offset(), r5, end_of_input_address());
if (read_backward) { if (read_backward) {
__ LoadP(r3, register_location(start_reg)); // Index of start of capture __ LoadU64(r3, register_location(start_reg)); // Index of start of capture
__ LoadP(r4, register_location(start_reg + 1)); // Index of end of capture __ LoadU64(r4,
register_location(start_reg + 1)); // Index of end of capture
__ add(current_input_offset(), current_input_offset(), r3); __ add(current_input_offset(), current_input_offset(), r3);
__ sub(current_input_offset(), current_input_offset(), r4); __ sub(current_input_offset(), current_input_offset(), r4);
} }
...@@ -715,7 +718,7 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) { ...@@ -715,7 +718,7 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
ExternalReference stack_limit = ExternalReference stack_limit =
ExternalReference::address_of_jslimit(isolate()); ExternalReference::address_of_jslimit(isolate());
__ mov(r3, Operand(stack_limit)); __ mov(r3, Operand(stack_limit));
__ LoadP(r3, MemOperand(r3)); __ LoadU64(r3, MemOperand(r3));
__ sub(r3, sp, r3, LeaveOE, SetRC); __ sub(r3, sp, r3, LeaveOE, SetRC);
// Handle it if the stack pointer is already below the stack limit. // Handle it if the stack pointer is already below the stack limit.
__ ble(&stack_limit_hit, cr0); __ ble(&stack_limit_hit, cr0);
...@@ -739,14 +742,14 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) { ...@@ -739,14 +742,14 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
// Allocate space on stack for registers. // Allocate space on stack for registers.
__ Add(sp, sp, -num_registers_ * kSystemPointerSize, r0); __ Add(sp, sp, -num_registers_ * kSystemPointerSize, r0);
// Load string end. // Load string end.
__ LoadP(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd)); __ LoadU64(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd));
// Load input start. // Load input start.
__ LoadP(r3, MemOperand(frame_pointer(), kInputStart)); __ LoadU64(r3, MemOperand(frame_pointer(), kInputStart));
// Find negative length (offset of start relative to end). // Find negative length (offset of start relative to end).
__ sub(current_input_offset(), r3, end_of_input_address()); __ sub(current_input_offset(), r3, end_of_input_address());
// Set r3 to address of char before start of the input string // Set r3 to address of char before start of the input string
// (effectively string position -1). // (effectively string position -1).
__ LoadP(r4, MemOperand(frame_pointer(), kStartIndex)); __ LoadU64(r4, MemOperand(frame_pointer(), kStartIndex));
__ subi(r3, current_input_offset(), Operand(char_size())); __ subi(r3, current_input_offset(), Operand(char_size()));
if (mode_ == UC16) { if (mode_ == UC16) {
__ ShiftLeftImm(r0, r4, Operand(1)); __ ShiftLeftImm(r0, r4, Operand(1));
...@@ -795,8 +798,8 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) { ...@@ -795,8 +798,8 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
} }
// Initialize backtrack stack pointer. // Initialize backtrack stack pointer.
__ LoadP(backtrack_stackpointer(), __ LoadU64(backtrack_stackpointer(),
MemOperand(frame_pointer(), kStackHighEnd)); MemOperand(frame_pointer(), kStackHighEnd));
__ b(&start_label_); __ b(&start_label_);
...@@ -806,9 +809,9 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) { ...@@ -806,9 +809,9 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
__ bind(&success_label_); __ bind(&success_label_);
if (num_saved_registers_ > 0) { if (num_saved_registers_ > 0) {
// copy captures to output // copy captures to output
__ LoadP(r4, MemOperand(frame_pointer(), kInputStart)); __ LoadU64(r4, MemOperand(frame_pointer(), kInputStart));
__ LoadP(r3, MemOperand(frame_pointer(), kRegisterOutput)); __ LoadU64(r3, MemOperand(frame_pointer(), kRegisterOutput));
__ LoadP(r5, MemOperand(frame_pointer(), kStartIndex)); __ LoadU64(r5, MemOperand(frame_pointer(), kStartIndex));
__ sub(r4, end_of_input_address(), r4); __ sub(r4, end_of_input_address(), r4);
// r4 is length of input in bytes. // r4 is length of input in bytes.
if (mode_ == UC16) { if (mode_ == UC16) {
...@@ -823,8 +826,8 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) { ...@@ -823,8 +826,8 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
// unroll the loop once to add an operation between a load of a register // unroll the loop once to add an operation between a load of a register
// and the following use of that register. // and the following use of that register.
for (int i = 0; i < num_saved_registers_; i += 2) { for (int i = 0; i < num_saved_registers_; i += 2) {
__ LoadP(r5, register_location(i), r0); __ LoadU64(r5, register_location(i), r0);
__ LoadP(r6, register_location(i + 1), r0); __ LoadU64(r6, register_location(i + 1), r0);
if (i == 0 && global_with_zero_length_check()) { if (i == 0 && global_with_zero_length_check()) {
// Keep capture start in r25 for the zero-length check later. // Keep capture start in r25 for the zero-length check later.
__ mr(r25, r5); __ mr(r25, r5);
...@@ -847,9 +850,9 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) { ...@@ -847,9 +850,9 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
if (global()) { if (global()) {
// Restart matching if the regular expression is flagged as global. // Restart matching if the regular expression is flagged as global.
__ LoadP(r3, MemOperand(frame_pointer(), kSuccessfulCaptures)); __ LoadU64(r3, MemOperand(frame_pointer(), kSuccessfulCaptures));
__ LoadP(r4, MemOperand(frame_pointer(), kNumOutputRegisters)); __ LoadU64(r4, MemOperand(frame_pointer(), kNumOutputRegisters));
__ LoadP(r5, MemOperand(frame_pointer(), kRegisterOutput)); __ LoadU64(r5, MemOperand(frame_pointer(), kRegisterOutput));
// Increment success counter. // Increment success counter.
__ addi(r3, r3, Operand(1)); __ addi(r3, r3, Operand(1));
__ StoreP(r3, MemOperand(frame_pointer(), kSuccessfulCaptures)); __ StoreP(r3, MemOperand(frame_pointer(), kSuccessfulCaptures));
...@@ -866,7 +869,7 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) { ...@@ -866,7 +869,7 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
__ StoreP(r5, MemOperand(frame_pointer(), kRegisterOutput)); __ StoreP(r5, MemOperand(frame_pointer(), kRegisterOutput));
// Prepare r3 to initialize registers with its value in the next run. // Prepare r3 to initialize registers with its value in the next run.
__ LoadP(r3, MemOperand(frame_pointer(), kStringStartMinusOne)); __ LoadU64(r3, MemOperand(frame_pointer(), kStringStartMinusOne));
if (global_with_zero_length_check()) { if (global_with_zero_length_check()) {
// Special case for zero-length matches. // Special case for zero-length matches.
...@@ -894,7 +897,7 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) { ...@@ -894,7 +897,7 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
// Exit and return r3 // Exit and return r3
__ bind(&exit_label_); __ bind(&exit_label_);
if (global()) { if (global()) {
__ LoadP(r3, MemOperand(frame_pointer(), kSuccessfulCaptures)); __ LoadU64(r3, MemOperand(frame_pointer(), kSuccessfulCaptures));
} }
__ bind(&return_r3); __ bind(&return_r3);
...@@ -925,7 +928,8 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) { ...@@ -925,7 +928,8 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
__ bne(&return_r3); __ bne(&return_r3);
// String might have moved: Reload end of string from frame. // String might have moved: Reload end of string from frame.
__ LoadP(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd)); __ LoadU64(end_of_input_address(),
MemOperand(frame_pointer(), kInputEnd));
SafeReturn(); SafeReturn();
} }
...@@ -984,7 +988,7 @@ void RegExpMacroAssemblerPPC::GoTo(Label* to) { BranchOrBacktrack(al, to); } ...@@ -984,7 +988,7 @@ void RegExpMacroAssemblerPPC::GoTo(Label* to) { BranchOrBacktrack(al, to); }
void RegExpMacroAssemblerPPC::IfRegisterGE(int reg, int comparand, void RegExpMacroAssemblerPPC::IfRegisterGE(int reg, int comparand,
Label* if_ge) { Label* if_ge) {
__ LoadP(r3, register_location(reg), r0); __ LoadU64(r3, register_location(reg), r0);
__ Cmpi(r3, Operand(comparand), r0); __ Cmpi(r3, Operand(comparand), r0);
BranchOrBacktrack(ge, if_ge); BranchOrBacktrack(ge, if_ge);
} }
...@@ -992,14 +996,14 @@ void RegExpMacroAssemblerPPC::IfRegisterGE(int reg, int comparand, ...@@ -992,14 +996,14 @@ void RegExpMacroAssemblerPPC::IfRegisterGE(int reg, int comparand,
void RegExpMacroAssemblerPPC::IfRegisterLT(int reg, int comparand, void RegExpMacroAssemblerPPC::IfRegisterLT(int reg, int comparand,
Label* if_lt) { Label* if_lt) {
__ LoadP(r3, register_location(reg), r0); __ LoadU64(r3, register_location(reg), r0);
__ Cmpi(r3, Operand(comparand), r0); __ Cmpi(r3, Operand(comparand), r0);
BranchOrBacktrack(lt, if_lt); BranchOrBacktrack(lt, if_lt);
} }
void RegExpMacroAssemblerPPC::IfRegisterEqPos(int reg, Label* if_eq) { void RegExpMacroAssemblerPPC::IfRegisterEqPos(int reg, Label* if_eq) {
__ LoadP(r3, register_location(reg), r0); __ LoadU64(r3, register_location(reg), r0);
__ cmp(r3, current_input_offset()); __ cmp(r3, current_input_offset());
BranchOrBacktrack(eq, if_eq); BranchOrBacktrack(eq, if_eq);
} }
...@@ -1036,20 +1040,20 @@ void RegExpMacroAssemblerPPC::PushCurrentPosition() { ...@@ -1036,20 +1040,20 @@ void RegExpMacroAssemblerPPC::PushCurrentPosition() {
void RegExpMacroAssemblerPPC::PushRegister(int register_index, void RegExpMacroAssemblerPPC::PushRegister(int register_index,
StackCheckFlag check_stack_limit) { StackCheckFlag check_stack_limit) {
__ LoadP(r3, register_location(register_index), r0); __ LoadU64(r3, register_location(register_index), r0);
Push(r3); Push(r3);
if (check_stack_limit) CheckStackLimit(); if (check_stack_limit) CheckStackLimit();
} }
void RegExpMacroAssemblerPPC::ReadCurrentPositionFromRegister(int reg) { void RegExpMacroAssemblerPPC::ReadCurrentPositionFromRegister(int reg) {
__ LoadP(current_input_offset(), register_location(reg), r0); __ LoadU64(current_input_offset(), register_location(reg), r0);
} }
void RegExpMacroAssemblerPPC::ReadStackPointerFromRegister(int reg) { void RegExpMacroAssemblerPPC::ReadStackPointerFromRegister(int reg) {
__ LoadP(backtrack_stackpointer(), register_location(reg), r0); __ LoadU64(backtrack_stackpointer(), register_location(reg), r0);
__ LoadP(r3, MemOperand(frame_pointer(), kStackHighEnd)); __ LoadU64(r3, MemOperand(frame_pointer(), kStackHighEnd));
__ add(backtrack_stackpointer(), backtrack_stackpointer(), r3); __ add(backtrack_stackpointer(), backtrack_stackpointer(), r3);
} }
...@@ -1094,7 +1098,7 @@ void RegExpMacroAssemblerPPC::WriteCurrentPositionToRegister(int reg, ...@@ -1094,7 +1098,7 @@ void RegExpMacroAssemblerPPC::WriteCurrentPositionToRegister(int reg,
void RegExpMacroAssemblerPPC::ClearRegisters(int reg_from, int reg_to) { void RegExpMacroAssemblerPPC::ClearRegisters(int reg_from, int reg_to) {
DCHECK(reg_from <= reg_to); DCHECK(reg_from <= reg_to);
__ LoadP(r3, MemOperand(frame_pointer(), kStringStartMinusOne)); __ LoadU64(r3, MemOperand(frame_pointer(), kStringStartMinusOne));
for (int reg = reg_from; reg <= reg_to; reg++) { for (int reg = reg_from; reg <= reg_to; reg++) {
__ StoreP(r3, register_location(reg), r0); __ StoreP(r3, register_location(reg), r0);
} }
...@@ -1102,7 +1106,7 @@ void RegExpMacroAssemblerPPC::ClearRegisters(int reg_from, int reg_to) { ...@@ -1102,7 +1106,7 @@ void RegExpMacroAssemblerPPC::ClearRegisters(int reg_from, int reg_to) {
void RegExpMacroAssemblerPPC::WriteStackPointerToRegister(int reg) { void RegExpMacroAssemblerPPC::WriteStackPointerToRegister(int reg) {
__ LoadP(r4, MemOperand(frame_pointer(), kStackHighEnd)); __ LoadU64(r4, MemOperand(frame_pointer(), kStackHighEnd));
__ sub(r3, backtrack_stackpointer(), r4); __ sub(r3, backtrack_stackpointer(), r4);
__ StoreP(r3, register_location(reg), r0); __ StoreP(r3, register_location(reg), r0);
} }
...@@ -1160,7 +1164,7 @@ void RegExpMacroAssemblerPPC::CallCheckStackGuardState(Register scratch) { ...@@ -1160,7 +1164,7 @@ void RegExpMacroAssemblerPPC::CallCheckStackGuardState(Register scratch) {
// Restore the stack pointer // Restore the stack pointer
stack_space = kNumRequiredStackFrameSlots + stack_passed_arguments; stack_space = kNumRequiredStackFrameSlots + stack_passed_arguments;
if (frame_alignment > kSystemPointerSize) { if (frame_alignment > kSystemPointerSize) {
__ LoadP(sp, MemOperand(sp, stack_space * kSystemPointerSize)); __ LoadU64(sp, MemOperand(sp, stack_space * kSystemPointerSize));
} else { } else {
__ addi(sp, sp, Operand(stack_space * kSystemPointerSize)); __ addi(sp, sp, Operand(stack_space * kSystemPointerSize));
} }
...@@ -1213,7 +1217,7 @@ void RegExpMacroAssemblerPPC::CheckPosition(int cp_offset, ...@@ -1213,7 +1217,7 @@ void RegExpMacroAssemblerPPC::CheckPosition(int cp_offset,
__ Cmpi(current_input_offset(), Operand(-cp_offset * char_size()), r0); __ Cmpi(current_input_offset(), Operand(-cp_offset * char_size()), r0);
BranchOrBacktrack(ge, on_outside_input); BranchOrBacktrack(ge, on_outside_input);
} else { } else {
__ LoadP(r4, MemOperand(frame_pointer(), kStringStartMinusOne)); __ LoadU64(r4, MemOperand(frame_pointer(), kStringStartMinusOne));
__ addi(r3, current_input_offset(), Operand(cp_offset * char_size())); __ addi(r3, current_input_offset(), Operand(cp_offset * char_size()));
__ cmp(r3, r4); __ cmp(r3, r4);
BranchOrBacktrack(le, on_outside_input); BranchOrBacktrack(le, on_outside_input);
...@@ -1271,7 +1275,7 @@ void RegExpMacroAssemblerPPC::Push(Register source) { ...@@ -1271,7 +1275,7 @@ void RegExpMacroAssemblerPPC::Push(Register source) {
void RegExpMacroAssemblerPPC::Pop(Register target) { void RegExpMacroAssemblerPPC::Pop(Register target) {
DCHECK(target != backtrack_stackpointer()); DCHECK(target != backtrack_stackpointer());
__ LoadP(target, MemOperand(backtrack_stackpointer())); __ LoadU64(target, MemOperand(backtrack_stackpointer()));
__ addi(backtrack_stackpointer(), backtrack_stackpointer(), __ addi(backtrack_stackpointer(), backtrack_stackpointer(),
Operand(kSystemPointerSize)); Operand(kSystemPointerSize));
} }
...@@ -1282,7 +1286,7 @@ void RegExpMacroAssemblerPPC::CheckPreemption() { ...@@ -1282,7 +1286,7 @@ void RegExpMacroAssemblerPPC::CheckPreemption() {
ExternalReference stack_limit = ExternalReference stack_limit =
ExternalReference::address_of_jslimit(isolate()); ExternalReference::address_of_jslimit(isolate());
__ mov(r3, Operand(stack_limit)); __ mov(r3, Operand(stack_limit));
__ LoadP(r3, MemOperand(r3)); __ LoadU64(r3, MemOperand(r3));
__ cmpl(sp, r3); __ cmpl(sp, r3);
SafeCall(&check_preempt_label_, le); SafeCall(&check_preempt_label_, le);
} }
...@@ -1292,7 +1296,7 @@ void RegExpMacroAssemblerPPC::CheckStackLimit() { ...@@ -1292,7 +1296,7 @@ void RegExpMacroAssemblerPPC::CheckStackLimit() {
ExternalReference stack_limit = ExternalReference stack_limit =
ExternalReference::address_of_regexp_stack_limit_address(isolate()); ExternalReference::address_of_regexp_stack_limit_address(isolate());
__ mov(r3, Operand(stack_limit)); __ mov(r3, Operand(stack_limit));
__ LoadP(r3, MemOperand(r3)); __ LoadU64(r3, MemOperand(r3));
__ cmpl(backtrack_stackpointer(), r3); __ cmpl(backtrack_stackpointer(), r3);
SafeCall(&stack_overflow_label_, le); SafeCall(&stack_overflow_label_, le);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment