Commit 2ef4e3d7 authored by Santiago Aboy Solanes's avatar Santiago Aboy Solanes Committed by Commit Bot

[ptr-compr][arm64] Introduce bottlenecks for accessing on-heap tagged fields.

This CL introduces TurboAssembler::LoadTaggedPointerField() and
TurboAssembler::LoadAnyTaggedField(), which respectively loads a field
containing a HeapObject, or any tagged value, and decompresses it if necessary.

Bug: v8:7703
Change-Id: I71ace74d7433a3a78d56bdcef6d2ec041df630e4
Reviewed-on: https://chromium-review.googlesource.com/c/1456098
Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarSigurd Schneider <sigurds@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#59501}
parent 62b85421
...@@ -1616,7 +1616,8 @@ void MacroAssembler::AssertConstructor(Register object) { ...@@ -1616,7 +1616,8 @@ void MacroAssembler::AssertConstructor(Register object) {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX(); Register temp = temps.AcquireX();
Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset)); LoadTaggedPointerField(temp,
FieldMemOperand(object, HeapObject::kMapOffset));
Ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset)); Ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
Tst(temp, Operand(Map::IsConstructorBit::kMask)); Tst(temp, Operand(Map::IsConstructorBit::kMask));
...@@ -1656,7 +1657,7 @@ void MacroAssembler::AssertGeneratorObject(Register object) { ...@@ -1656,7 +1657,7 @@ void MacroAssembler::AssertGeneratorObject(Register object) {
// Load map // Load map
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX(); Register temp = temps.AcquireX();
Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset)); LoadTaggedPointerField(temp, FieldMemOperand(object, HeapObject::kMapOffset));
Label do_check; Label do_check;
// Load instance type and check if JSGeneratorObject // Load instance type and check if JSGeneratorObject
...@@ -1682,7 +1683,8 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) { ...@@ -1682,7 +1683,8 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
Label done_checking; Label done_checking;
AssertNotSmi(object); AssertNotSmi(object);
JumpIfRoot(object, RootIndex::kUndefinedValue, &done_checking); JumpIfRoot(object, RootIndex::kUndefinedValue, &done_checking);
Ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); LoadTaggedPointerField(scratch,
FieldMemOperand(object, HeapObject::kMapOffset));
CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE); CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE);
Assert(eq, AbortReason::kExpectedUndefinedOrCell); Assert(eq, AbortReason::kExpectedUndefinedOrCell);
Bind(&done_checking); Bind(&done_checking);
...@@ -1848,9 +1850,9 @@ void TurboAssembler::LoadFromConstantsTable(Register destination, ...@@ -1848,9 +1850,9 @@ void TurboAssembler::LoadFromConstantsTable(Register destination,
int constant_index) { int constant_index) {
DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable)); DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable));
LoadRoot(destination, RootIndex::kBuiltinsConstantsTable); LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
Ldr(destination, LoadTaggedPointerField(
FieldMemOperand(destination, destination, FieldMemOperand(destination, FixedArray::OffsetOfElementAt(
FixedArray::kHeaderSize + constant_index * kPointerSize)); constant_index)));
} }
void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) { void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
...@@ -2357,7 +2359,8 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target, ...@@ -2357,7 +2359,8 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
// allow recompilation to take effect without changing any of the // allow recompilation to take effect without changing any of the
// call sites. // call sites.
Register code = kJavaScriptCallCodeStartRegister; Register code = kJavaScriptCallCodeStartRegister;
Ldr(code, FieldMemOperand(function, JSFunction::kCodeOffset)); LoadTaggedPointerField(code,
FieldMemOperand(function, JSFunction::kCodeOffset));
if (flag == CALL_FUNCTION) { if (flag == CALL_FUNCTION) {
CallCodeObject(code); CallCodeObject(code);
} else { } else {
...@@ -2383,12 +2386,14 @@ void MacroAssembler::InvokeFunction(Register function, Register new_target, ...@@ -2383,12 +2386,14 @@ void MacroAssembler::InvokeFunction(Register function, Register new_target,
Register expected_reg = x2; Register expected_reg = x2;
Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); LoadTaggedPointerField(cp,
FieldMemOperand(function, JSFunction::kContextOffset));
// The number of arguments is stored as an int32_t, and -1 is a marker // The number of arguments is stored as an int32_t, and -1 is a marker
// (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
// extension to correctly handle it. // extension to correctly handle it.
Ldr(expected_reg, FieldMemOperand(function, LoadTaggedPointerField(
JSFunction::kSharedFunctionInfoOffset)); expected_reg,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
Ldrh(expected_reg, Ldrh(expected_reg,
FieldMemOperand(expected_reg, FieldMemOperand(expected_reg,
SharedFunctionInfo::kFormalParameterCountOffset)); SharedFunctionInfo::kFormalParameterCountOffset));
...@@ -2409,7 +2414,8 @@ void MacroAssembler::InvokeFunction(Register function, ...@@ -2409,7 +2414,8 @@ void MacroAssembler::InvokeFunction(Register function,
DCHECK(function.Is(x1)); DCHECK(function.Is(x1));
// Set up the context. // Set up the context.
Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); LoadTaggedPointerField(cp,
FieldMemOperand(function, JSFunction::kContextOffset));
InvokeFunctionCode(function, no_reg, expected, actual, flag); InvokeFunctionCode(function, no_reg, expected, actual, flag);
} }
...@@ -2697,7 +2703,7 @@ void MacroAssembler::CompareObjectType(Register object, ...@@ -2697,7 +2703,7 @@ void MacroAssembler::CompareObjectType(Register object,
Register map, Register map,
Register type_reg, Register type_reg,
InstanceType type) { InstanceType type) {
Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); LoadTaggedPointerField(map, FieldMemOperand(object, HeapObject::kMapOffset));
CompareInstanceType(map, type_reg, type); CompareInstanceType(map, type_reg, type);
} }
...@@ -2738,6 +2744,24 @@ void MacroAssembler::JumpIfNotRoot(const Register& obj, RootIndex index, ...@@ -2738,6 +2744,24 @@ void MacroAssembler::JumpIfNotRoot(const Register& obj, RootIndex index,
B(ne, if_not_equal); B(ne, if_not_equal);
} }
void TurboAssembler::LoadTaggedPointerField(const Register& destination,
const MemOperand& field_operand) {
#ifdef V8_COMPRESS_POINTERS
DecompressTaggedPointer(destination, field_operand);
#else
Ldr(destination, field_operand);
#endif
}
void TurboAssembler::LoadAnyTaggedField(const Register& destination,
const MemOperand& field_operand) {
#ifdef V8_COMPRESS_POINTERS
DecompressAnyTagged(destination, field_operand);
#else
Ldr(destination, field_operand);
#endif
}
void TurboAssembler::DecompressTaggedSigned(const Register& destination, void TurboAssembler::DecompressTaggedSigned(const Register& destination,
const MemOperand& field_operand) { const MemOperand& field_operand) {
RecordComment("[ DecompressTaggedSigned"); RecordComment("[ DecompressTaggedSigned");
...@@ -3188,8 +3212,8 @@ void TurboAssembler::Abort(AbortReason reason) { ...@@ -3188,8 +3212,8 @@ void TurboAssembler::Abort(AbortReason reason) {
} }
void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
Ldr(dst, NativeContextMemOperand()); LoadTaggedPointerField(dst, NativeContextMemOperand());
Ldr(dst, ContextMemOperand(dst, index)); LoadTaggedPointerField(dst, ContextMemOperand(dst, index));
} }
......
...@@ -1176,7 +1176,16 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { ...@@ -1176,7 +1176,16 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
void ResetSpeculationPoisonRegister(); void ResetSpeculationPoisonRegister();
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Pointer compresstion Support // Pointer compression Support
// Loads a field containing a HeapObject and decompresses it if pointer
// compression is enabled.
void LoadTaggedPointerField(const Register& destination,
const MemOperand& field_operand);
// Loads a field containing any tagged value and decompresses it if necessary.
void LoadAnyTaggedField(const Register& destination,
const MemOperand& field_operand);
void DecompressTaggedSigned(const Register& destination, void DecompressTaggedSigned(const Register& destination,
const MemOperand& field_operand); const MemOperand& field_operand);
......
...@@ -50,7 +50,8 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) { ...@@ -50,7 +50,8 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
if (FLAG_debug_code) { if (FLAG_debug_code) {
// Initial map for the builtin InternalArray functions should be maps. // Initial map for the builtin InternalArray functions should be maps.
__ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); __ LoadTaggedPointerField(
x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
__ Tst(x10, kSmiTagMask); __ Tst(x10, kSmiTagMask);
__ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction); __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
__ CompareObjectType(x10, x11, x12, MAP_TYPE); __ CompareObjectType(x10, x11, x12, MAP_TYPE);
...@@ -272,7 +273,8 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -272,7 +273,8 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// -- sp[4*kPointerSize]: context (pushed by FrameScope) // -- sp[4*kPointerSize]: context (pushed by FrameScope)
// ----------------------------------- // -----------------------------------
__ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kFlagsOffset)); __ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kFlagsOffset));
__ TestAndBranchIfAnySet(w4, __ TestAndBranchIfAnySet(w4,
SharedFunctionInfo::IsDerivedConstructorBit::kMask, SharedFunctionInfo::IsDerivedConstructorBit::kMask,
...@@ -443,8 +445,9 @@ static void GetSharedFunctionInfoBytecode(MacroAssembler* masm, ...@@ -443,8 +445,9 @@ static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
Label done; Label done;
__ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE); __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
__ B(ne, &done); __ B(ne, &done);
__ Ldr(sfi_data, __ LoadTaggedPointerField(
FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset)); sfi_data,
FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
__ Bind(&done); __ Bind(&done);
} }
...@@ -463,8 +466,10 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -463,8 +466,10 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
kLRHasNotBeenSaved, kDontSaveFPRegs); kLRHasNotBeenSaved, kDontSaveFPRegs);
// Load suspended function and context. // Load suspended function and context.
__ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); __ LoadTaggedPointerField(
__ Ldr(cp, FieldMemOperand(x4, JSFunction::kContextOffset)); x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
__ LoadTaggedPointerField(cp,
FieldMemOperand(x4, JSFunction::kContextOffset));
// Flood function if we are stepping. // Flood function if we are stepping.
Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
...@@ -491,7 +496,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -491,7 +496,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ B(lo, &stack_overflow); __ B(lo, &stack_overflow);
// Get number of arguments for generator function. // Get number of arguments for generator function.
__ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
__ Ldrh(w10, FieldMemOperand( __ Ldrh(w10, FieldMemOperand(
x10, SharedFunctionInfo::kFormalParameterCountOffset)); x10, SharedFunctionInfo::kFormalParameterCountOffset));
...@@ -520,8 +526,9 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -520,8 +526,9 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Copy the function arguments from the generator object's register file. // Copy the function arguments from the generator object's register file.
__ Ldr(x5, __ LoadTaggedPointerField(
FieldMemOperand(x1, JSGeneratorObject::kParametersAndRegistersOffset)); x5,
FieldMemOperand(x1, JSGeneratorObject::kParametersAndRegistersOffset));
{ {
Label loop, done; Label loop, done;
__ Cbz(x10, &done); __ Cbz(x10, &done);
...@@ -530,7 +537,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -530,7 +537,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ Bind(&loop); __ Bind(&loop);
__ Sub(x10, x10, 1); __ Sub(x10, x10, 1);
__ Add(x11, x5, Operand(x12, LSL, kPointerSizeLog2)); __ Add(x11, x5, Operand(x12, LSL, kPointerSizeLog2));
__ Ldr(x11, FieldMemOperand(x11, FixedArray::kHeaderSize)); __ LoadAnyTaggedField(x11, FieldMemOperand(x11, FixedArray::kHeaderSize));
__ Poke(x11, Operand(x10, LSL, kPointerSizeLog2)); __ Poke(x11, Operand(x10, LSL, kPointerSizeLog2));
__ Add(x12, x12, 1); __ Add(x12, x12, 1);
__ Cbnz(x10, &loop); __ Cbnz(x10, &loop);
...@@ -539,8 +546,10 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -539,8 +546,10 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Underlying function needs to have bytecode available. // Underlying function needs to have bytecode available.
if (FLAG_debug_code) { if (FLAG_debug_code) {
__ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
__ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset)); x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
__ LoadTaggedPointerField(
x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
GetSharedFunctionInfoBytecode(masm, x3, x0); GetSharedFunctionInfoBytecode(masm, x3, x0);
__ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE); __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE);
__ Assert(eq, AbortReason::kMissingBytecodeArray); __ Assert(eq, AbortReason::kMissingBytecodeArray);
...@@ -548,7 +557,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -548,7 +557,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Resume (Ignition/TurboFan) generator object. // Resume (Ignition/TurboFan) generator object.
{ {
__ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
__ Ldrh(w0, FieldMemOperand( __ Ldrh(w0, FieldMemOperand(
x0, SharedFunctionInfo::kFormalParameterCountOffset)); x0, SharedFunctionInfo::kFormalParameterCountOffset));
// We abuse new.target both to indicate that this is a resume call and to // We abuse new.target both to indicate that this is a resume call and to
...@@ -557,7 +567,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -557,7 +567,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ Mov(x3, x1); __ Mov(x3, x1);
__ Mov(x1, x4); __ Mov(x1, x4);
static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch"); static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch");
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kCodeOffset)); __ LoadTaggedPointerField(x2, FieldMemOperand(x1, JSFunction::kCodeOffset));
__ JumpCodeObject(x2); __ JumpCodeObject(x2);
} }
...@@ -569,7 +579,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -569,7 +579,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ Push(x1, padreg, x4, x5); __ Push(x1, padreg, x4, x5);
__ CallRuntime(Runtime::kDebugOnFunctionCall); __ CallRuntime(Runtime::kDebugOnFunctionCall);
__ Pop(padreg, x1); __ Pop(padreg, x1);
__ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); __ LoadTaggedPointerField(
x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
} }
__ B(&stepping_prepared); __ B(&stepping_prepared);
...@@ -579,7 +590,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -579,7 +590,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ Push(x1, padreg); __ Push(x1, padreg);
__ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
__ Pop(padreg, x1); __ Pop(padreg, x1);
__ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); __ LoadTaggedPointerField(
x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
} }
__ B(&stepping_prepared); __ B(&stepping_prepared);
...@@ -993,7 +1005,7 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, ...@@ -993,7 +1005,7 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
Register closure = x1; Register closure = x1;
Register optimized_code_entry = scratch1; Register optimized_code_entry = scratch1;
__ Ldr( __ LoadAnyTaggedField(
optimized_code_entry, optimized_code_entry,
FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset)); FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
...@@ -1046,8 +1058,9 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, ...@@ -1046,8 +1058,9 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
// Check if the optimized code is marked for deopt. If it is, call the // Check if the optimized code is marked for deopt. If it is, call the
// runtime to clear it. // runtime to clear it.
Label found_deoptimized_code; Label found_deoptimized_code;
__ Ldr(scratch2, FieldMemOperand(optimized_code_entry, __ LoadTaggedPointerField(
Code::kCodeDataContainerOffset)); scratch2,
FieldMemOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
__ Ldr( __ Ldr(
scratch2, scratch2,
FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset)); FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
...@@ -1149,9 +1162,11 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1149,9 +1162,11 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Get the bytecode array from the function object and load it into // Get the bytecode array from the function object and load it into
// kInterpreterBytecodeArrayRegister. // kInterpreterBytecodeArrayRegister.
__ Ldr(x0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
__ Ldr(kInterpreterBytecodeArrayRegister, x0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset)); __ LoadTaggedPointerField(
kInterpreterBytecodeArrayRegister,
FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, x11); GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, x11);
// The bytecode array could have been flushed from the shared function info, // The bytecode array could have been flushed from the shared function info,
...@@ -1162,9 +1177,11 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { ...@@ -1162,9 +1177,11 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ B(ne, &compile_lazy); __ B(ne, &compile_lazy);
// Load the feedback vector from the closure. // Load the feedback vector from the closure.
__ Ldr(feedback_vector, __ LoadTaggedPointerField(
FieldMemOperand(closure, JSFunction::kFeedbackCellOffset)); feedback_vector,
__ Ldr(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset)); FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
__ LoadTaggedPointerField(
feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
Label push_stack_frame; Label push_stack_frame;
// Check if feedback vector is valid. If valid, check for optimized code // Check if feedback vector is valid. If valid, check for optimized code
...@@ -1458,8 +1475,10 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { ...@@ -1458,8 +1475,10 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
// get the custom trampoline, otherwise grab the entry address of the global // get the custom trampoline, otherwise grab the entry address of the global
// trampoline. // trampoline.
__ Ldr(x1, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); __ Ldr(x1, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
__ Ldr(x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
__ Ldr(x1, FieldMemOperand(x1, SharedFunctionInfo::kFunctionDataOffset)); x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ LoadTaggedPointerField(
x1, FieldMemOperand(x1, SharedFunctionInfo::kFunctionDataOffset));
__ CompareObjectType(x1, kInterpreterDispatchTableRegister, __ CompareObjectType(x1, kInterpreterDispatchTableRegister,
kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
INTERPRETER_DATA_TYPE); INTERPRETER_DATA_TYPE);
...@@ -1637,7 +1656,8 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { ...@@ -1637,7 +1656,8 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
} }
// On failure, tail call back to regular js by re-calling the function // On failure, tail call back to regular js by re-calling the function
// which has be reset to the compile lazy builtin. // which has be reset to the compile lazy builtin.
__ Ldr(x4, FieldMemOperand(new_target, JSFunction::kCodeOffset)); __ LoadTaggedPointerField(
x4, FieldMemOperand(new_target, JSFunction::kCodeOffset));
__ JumpCodeObject(x4); __ JumpCodeObject(x4);
} }
...@@ -2174,7 +2194,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, ...@@ -2174,7 +2194,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
// Allow x2 to be a FixedArray, or a FixedDoubleArray if x4 == 0. // Allow x2 to be a FixedArray, or a FixedDoubleArray if x4 == 0.
Label ok, fail; Label ok, fail;
__ AssertNotSmi(x2, AbortReason::kOperandIsNotAFixedArray); __ AssertNotSmi(x2, AbortReason::kOperandIsNotAFixedArray);
__ Ldr(x10, FieldMemOperand(x2, HeapObject::kMapOffset)); __ LoadTaggedPointerField(x10, FieldMemOperand(x2, HeapObject::kMapOffset));
__ Ldrh(x13, FieldMemOperand(x10, Map::kInstanceTypeOffset)); __ Ldrh(x13, FieldMemOperand(x10, Map::kInstanceTypeOffset));
__ Cmp(x13, FIXED_ARRAY_TYPE); __ Cmp(x13, FIXED_ARRAY_TYPE);
__ B(eq, &ok); __ B(eq, &ok);
...@@ -2250,7 +2270,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -2250,7 +2270,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
if (mode == CallOrConstructMode::kConstruct) { if (mode == CallOrConstructMode::kConstruct) {
Label new_target_constructor, new_target_not_constructor; Label new_target_constructor, new_target_not_constructor;
__ JumpIfSmi(x3, &new_target_not_constructor); __ JumpIfSmi(x3, &new_target_not_constructor);
__ Ldr(x5, FieldMemOperand(x3, HeapObject::kMapOffset)); __ LoadTaggedPointerField(x5, FieldMemOperand(x3, HeapObject::kMapOffset));
__ Ldrb(x5, FieldMemOperand(x5, Map::kBitFieldOffset)); __ Ldrb(x5, FieldMemOperand(x5, Map::kBitFieldOffset));
__ TestAndBranchIfAnySet(x5, Map::IsConstructorBit::kMask, __ TestAndBranchIfAnySet(x5, Map::IsConstructorBit::kMask,
&new_target_constructor); &new_target_constructor);
...@@ -2281,8 +2301,9 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, ...@@ -2281,8 +2301,9 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
{ {
__ Ldr(scratch, __ Ldr(scratch,
MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ Ldr(scratch, __ LoadTaggedPointerField(
FieldMemOperand(scratch, JSFunction::kSharedFunctionInfoOffset)); scratch,
FieldMemOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
__ Ldrh(len, __ Ldrh(len,
FieldMemOperand(scratch, FieldMemOperand(scratch,
SharedFunctionInfo::kFormalParameterCountOffset)); SharedFunctionInfo::kFormalParameterCountOffset));
...@@ -2336,7 +2357,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, ...@@ -2336,7 +2357,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
// See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
// Check that function is not a "classConstructor". // Check that function is not a "classConstructor".
Label class_constructor; Label class_constructor;
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kFlagsOffset)); __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kFlagsOffset));
__ TestAndBranchIfAnySet(w3, SharedFunctionInfo::IsClassConstructorBit::kMask, __ TestAndBranchIfAnySet(w3, SharedFunctionInfo::IsClassConstructorBit::kMask,
&class_constructor); &class_constructor);
...@@ -2344,7 +2366,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, ...@@ -2344,7 +2366,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
// Enter the context of the function; ToObject has to run in the function // Enter the context of the function; ToObject has to run in the function
// context, and we also need to take the global proxy from the function // context, and we also need to take the global proxy from the function
// context in case of conversion. // context in case of conversion.
__ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); __ LoadTaggedPointerField(cp,
FieldMemOperand(x1, JSFunction::kContextOffset));
// We need to convert the receiver for non-native sloppy mode functions. // We need to convert the receiver for non-native sloppy mode functions.
Label done_convert; Label done_convert;
__ TestAndBranchIfAnySet(w3, __ TestAndBranchIfAnySet(w3,
...@@ -2395,7 +2418,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, ...@@ -2395,7 +2418,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
__ Pop(cp, x1, x0, padreg); __ Pop(cp, x1, x0, padreg);
__ SmiUntag(x0); __ SmiUntag(x0);
} }
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Bind(&convert_receiver); __ Bind(&convert_receiver);
} }
__ Poke(x3, Operand(x0, LSL, kXRegSizeLog2)); __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
...@@ -2438,8 +2462,8 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { ...@@ -2438,8 +2462,8 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
// Load [[BoundArguments]] into x2 and length of that into x4. // Load [[BoundArguments]] into x2 and length of that into x4.
Label no_bound_arguments; Label no_bound_arguments;
__ Ldr(bound_argv, __ LoadTaggedPointerField(
FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset)); bound_argv, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
__ SmiUntag(bound_argc, __ SmiUntag(bound_argc,
FieldMemOperand(bound_argv, FixedArray::kLengthOffset)); FieldMemOperand(bound_argv, FixedArray::kLengthOffset));
__ Cbz(bound_argc, &no_bound_arguments); __ Cbz(bound_argc, &no_bound_arguments);
...@@ -2558,14 +2582,16 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) { ...@@ -2558,14 +2582,16 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
__ AssertBoundFunction(x1); __ AssertBoundFunction(x1);
// Patch the receiver to [[BoundThis]]. // Patch the receiver to [[BoundThis]].
__ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset)); __ LoadAnyTaggedField(x10,
FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
__ Poke(x10, Operand(x0, LSL, kPointerSizeLog2)); __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
// Push the [[BoundArguments]] onto the stack. // Push the [[BoundArguments]] onto the stack.
Generate_PushBoundArguments(masm); Generate_PushBoundArguments(masm);
// Call the [[BoundTargetFunction]] via the Call builtin. // Call the [[BoundTargetFunction]] via the Call builtin.
__ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); __ LoadTaggedPointerField(
x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
__ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny), __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
} }
...@@ -2633,7 +2659,8 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { ...@@ -2633,7 +2659,8 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
Label call_generic_stub; Label call_generic_stub;
// Jump to JSBuiltinsConstructStub or JSConstructStubGeneric. // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
__ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kFlagsOffset)); __ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kFlagsOffset));
__ TestAndBranchIfAllClear( __ TestAndBranchIfAllClear(
w4, SharedFunctionInfo::ConstructAsBuiltinBit::kMask, &call_generic_stub); w4, SharedFunctionInfo::ConstructAsBuiltinBit::kMask, &call_generic_stub);
...@@ -2664,13 +2691,14 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { ...@@ -2664,13 +2691,14 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
Label done; Label done;
__ Cmp(x1, x3); __ Cmp(x1, x3);
__ B(ne, &done); __ B(ne, &done);
__ Ldr(x3, __ LoadTaggedPointerField(
FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); x3, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
__ Bind(&done); __ Bind(&done);
} }
// Construct the [[BoundTargetFunction]] via the Construct builtin. // Construct the [[BoundTargetFunction]] via the Construct builtin.
__ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); __ LoadTaggedPointerField(
x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
__ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET); __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
} }
...@@ -2688,7 +2716,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { ...@@ -2688,7 +2716,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
__ JumpIfSmi(x1, &non_constructor); __ JumpIfSmi(x1, &non_constructor);
// Check if target has a [[Construct]] internal method. // Check if target has a [[Construct]] internal method.
__ Ldr(x4, FieldMemOperand(x1, HeapObject::kMapOffset)); __ LoadTaggedPointerField(x4, FieldMemOperand(x1, HeapObject::kMapOffset));
__ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset)); __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(x2, Map::IsConstructorBit::kMask, __ TestAndBranchIfAllClear(x2, Map::IsConstructorBit::kMask,
&non_constructor); &non_constructor);
...@@ -2892,7 +2920,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2892,7 +2920,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// x1 : function (passed through to callee) // x1 : function (passed through to callee)
// x3 : new target (passed through to callee) // x3 : new target (passed through to callee)
static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch"); static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch");
__ Ldr(x2, FieldMemOperand(function, JSFunction::kCodeOffset)); __ LoadTaggedPointerField(x2,
FieldMemOperand(function, JSFunction::kCodeOffset));
__ CallCodeObject(x2); __ CallCodeObject(x2);
// Store offset of return address for deoptimizer. // Store offset of return address for deoptimizer.
...@@ -2906,7 +2935,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2906,7 +2935,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ RecordComment("-- Call without adapting args --"); __ RecordComment("-- Call without adapting args --");
__ Bind(&dont_adapt_arguments); __ Bind(&dont_adapt_arguments);
static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch"); static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch");
__ Ldr(x2, FieldMemOperand(function, JSFunction::kCodeOffset)); __ LoadTaggedPointerField(x2,
FieldMemOperand(function, JSFunction::kCodeOffset));
__ JumpCodeObject(x2); __ JumpCodeObject(x2);
__ Bind(&stack_overflow); __ Bind(&stack_overflow);
...@@ -2943,8 +2973,9 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) { ...@@ -2943,8 +2973,9 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
// function. // function.
__ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister); __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister);
// Load the correct CEntry builtin from the instance object. // Load the correct CEntry builtin from the instance object.
__ Ldr(x2, FieldMemOperand(kWasmInstanceRegister, __ LoadTaggedPointerField(
WasmInstanceObject::kCEntryStubOffset)); x2, FieldMemOperand(kWasmInstanceRegister,
WasmInstanceObject::kCEntryStubOffset));
// Initialize the JavaScript context with 0. CEntry will use it to // Initialize the JavaScript context with 0. CEntry will use it to
// set the current context on the isolate. // set the current context on the isolate.
__ Mov(cp, Smi::zero()); __ Mov(cp, Smi::zero());
...@@ -3247,8 +3278,9 @@ void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) { ...@@ -3247,8 +3278,9 @@ void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
Label unexpected_map, map_ok; Label unexpected_map, map_ok;
// Initial map for the builtin Array function should be a map. // Initial map for the builtin Array function should be a map.
__ Ldr(x10, FieldMemOperand(constructor, __ LoadTaggedPointerField(
JSFunction::kPrototypeOrInitialMapOffset)); x10,
FieldMemOperand(constructor, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi. // Will both indicate a nullptr and a Smi.
__ JumpIfSmi(x10, &unexpected_map); __ JumpIfSmi(x10, &unexpected_map);
__ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok);
...@@ -3258,8 +3290,9 @@ void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) { ...@@ -3258,8 +3290,9 @@ void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
Register kind = w3; Register kind = w3;
// Figure out the right elements kind // Figure out the right elements kind
__ Ldr(x10, FieldMemOperand(constructor, __ LoadTaggedPointerField(
JSFunction::kPrototypeOrInitialMapOffset)); x10,
FieldMemOperand(constructor, JSFunction::kPrototypeOrInitialMapOffset));
// Retrieve elements_kind from map. // Retrieve elements_kind from map.
__ LoadElementsKindFromMap(kind, x10); __ LoadElementsKindFromMap(kind, x10);
...@@ -3626,7 +3659,8 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { ...@@ -3626,7 +3659,8 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
Register api_function_address = x2; Register api_function_address = x2;
Register js_getter = x4; Register js_getter = x4;
__ Ldr(js_getter, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); __ LoadTaggedPointerField(
js_getter, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
__ Ldr(api_function_address, __ Ldr(api_function_address,
FieldMemOperand(js_getter, Foreign::kForeignAddressOffset)); FieldMemOperand(js_getter, Foreign::kForeignAddressOffset));
......
...@@ -693,12 +693,14 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -693,12 +693,14 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// Check the function's context matches the context argument. // Check the function's context matches the context argument.
UseScratchRegisterScope scope(tasm()); UseScratchRegisterScope scope(tasm());
Register temp = scope.AcquireX(); Register temp = scope.AcquireX();
__ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); __ LoadTaggedPointerField(
temp, FieldMemOperand(func, JSFunction::kContextOffset));
__ cmp(cp, temp); __ cmp(cp, temp);
__ Assert(eq, AbortReason::kWrongFunctionContext); __ Assert(eq, AbortReason::kWrongFunctionContext);
} }
static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch"); static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch");
__ Ldr(x2, FieldMemOperand(func, JSFunction::kCodeOffset)); __ LoadTaggedPointerField(x2,
FieldMemOperand(func, JSFunction::kCodeOffset));
__ CallCodeObject(x2); __ CallCodeObject(x2);
RecordCallPosition(instr); RecordCallPosition(instr);
frame_access_state()->ClearSPDelta(); frame_access_state()->ClearSPDelta();
...@@ -2464,8 +2466,9 @@ void CodeGenerator::AssembleConstructFrame() { ...@@ -2464,8 +2466,9 @@ void CodeGenerator::AssembleConstructFrame() {
__ Str(kWasmInstanceRegister, __ Str(kWasmInstanceRegister,
MemOperand(fp, WasmCompiledFrameConstants::kWasmInstanceOffset)); MemOperand(fp, WasmCompiledFrameConstants::kWasmInstanceOffset));
} }
__ Ldr(x2, FieldMemOperand(kWasmInstanceRegister, __ LoadTaggedPointerField(
WasmInstanceObject::kCEntryStubOffset)); x2, FieldMemOperand(kWasmInstanceRegister,
WasmInstanceObject::kCEntryStubOffset));
__ Mov(cp, Smi::zero()); __ Mov(cp, Smi::zero());
__ CallRuntimeWithCEntry(Runtime::kThrowWasmStackOverflow, x2); __ CallRuntimeWithCEntry(Runtime::kThrowWasmStackOverflow, x2);
// We come from WebAssembly, there are no references for the GC. // We come from WebAssembly, there are no references for the GC.
......
...@@ -40,7 +40,8 @@ void DebugCodegen::GenerateFrameDropperTrampoline(MacroAssembler* masm) { ...@@ -40,7 +40,8 @@ void DebugCodegen::GenerateFrameDropperTrampoline(MacroAssembler* masm) {
__ Mov(sp, fp); __ Mov(sp, fp);
__ Pop(fp, lr); // Frame, Return address. __ Pop(fp, lr); // Frame, Return address.
__ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField(
x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldrh(x0, __ Ldrh(x0,
FieldMemOperand(x0, SharedFunctionInfo::kFormalParameterCountOffset)); FieldMemOperand(x0, SharedFunctionInfo::kFormalParameterCountOffset));
__ mov(x2, x0); __ mov(x2, x0);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment