Commit 585ca904 authored by Hao Xu's avatar Hao Xu Committed by V8 LUCI CQ

[x64][compiler] Optimize SmiTag/SmiUntag

... by selecting better instructions and avoiding sign-extend unsigned
smi.

Change-Id: I60b47f88dd34bfcda189716ac55d1fab13f3d4a1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3819112
Commit-Queue: Hao A Xu <hao.a.xu@intel.com>
Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82463}
parent 5d13fc53
......@@ -355,6 +355,13 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
__ ldr(output, FieldMemOperand(source, offset));
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
LoadTaggedSignedField(output, source, offset);
SmiUntag(output);
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
__ ldr(output, FieldMemOperand(source, offset));
......
......@@ -406,6 +406,13 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
__ LoadTaggedSignedField(output, FieldMemOperand(source, offset));
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
LoadTaggedSignedField(output, source, offset);
SmiUntag(output);
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
__ LoadAnyTaggedField(output, FieldMemOperand(source, offset));
......
......@@ -158,6 +158,8 @@ class BaselineAssembler {
int offset);
inline void LoadTaggedSignedField(Register output, Register source,
int offset);
inline void LoadTaggedSignedFieldAndUntag(Register output, Register source,
int offset);
inline void LoadTaggedAnyField(Register output, Register source, int offset);
inline void LoadWord16FieldZeroExtend(Register output, Register source,
int offset);
......
......@@ -2198,8 +2198,8 @@ void BaselineCompiler::VisitSwitchOnGeneratorState() {
__ JumpIfRoot(generator_object, RootIndex::kUndefinedValue, &fallthrough);
Register continuation = scratch_scope.AcquireScratch();
__ LoadTaggedAnyField(continuation, generator_object,
JSGeneratorObject::kContinuationOffset);
__ LoadTaggedSignedFieldAndUntag(continuation, generator_object,
JSGeneratorObject::kContinuationOffset);
__ StoreTaggedSignedField(
generator_object, JSGeneratorObject::kContinuationOffset,
Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
......@@ -2220,7 +2220,6 @@ void BaselineCompiler::VisitSwitchOnGeneratorState() {
for (interpreter::JumpTableTargetOffset offset : offsets) {
labels[offset.case_value] = EnsureLabel(offset.target_offset);
}
__ SmiUntag(continuation);
__ Switch(continuation, 0, labels.get(), offsets.size());
// We should never fall through this switch.
// TODO(v8:11429,leszeks): Maybe remove the fallthrough check in the Switch?
......
......@@ -337,6 +337,13 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
__ mov(output, FieldOperand(source, offset));
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
LoadTaggedSignedField(output, source, offset);
SmiUntag(output);
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
__ mov(output, FieldOperand(source, offset));
......
......@@ -353,6 +353,12 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
int offset) {
__ Ld_d(output, FieldMemOperand(source, offset));
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
LoadTaggedSignedField(output, source, offset);
SmiUntag(output);
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
__ Ld_d(output, FieldMemOperand(source, offset));
......
......@@ -363,6 +363,12 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
int offset) {
__ Lw(output, FieldMemOperand(source, offset));
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
LoadTaggedSignedField(output, source, offset);
SmiUntag(output);
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
__ Lw(output, FieldMemOperand(source, offset));
......
......@@ -361,6 +361,12 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
int offset) {
__ Ld(output, FieldMemOperand(source, offset));
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
LoadTaggedSignedField(output, source, offset);
SmiUntag(output);
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
__ Ld(output, FieldMemOperand(source, offset));
......
......@@ -507,6 +507,13 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
__ LoadTaggedSignedField(output, FieldMemOperand(source, offset), r0);
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
LoadTaggedSignedField(output, source, offset);
SmiUntag(output);
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
ASM_CODE_COMMENT(masm_);
......
......@@ -337,6 +337,12 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
int offset) {
__ LoadTaggedSignedField(output, FieldMemOperand(source, offset));
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
LoadTaggedSignedField(output, source, offset);
SmiUntag(output);
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
__ LoadAnyTaggedField(output, FieldMemOperand(source, offset));
......
......@@ -505,6 +505,13 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
__ LoadTaggedSignedField(output, FieldMemOperand(source, offset));
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
LoadTaggedSignedField(output, source, offset);
SmiUntag(output);
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
ASM_CODE_COMMENT(masm_);
......
......@@ -338,6 +338,11 @@ void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
int offset) {
__ LoadTaggedSignedField(output, FieldOperand(source, offset));
}
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
Register source,
int offset) {
__ SmiUntagField(output, FieldOperand(source, offset));
}
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
int offset) {
__ LoadAnyTaggedField(output, FieldOperand(source, offset));
......@@ -630,7 +635,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
__ Pop(kInterpreterAccumulatorRegister, params_size);
__ masm()->SmiUntag(params_size);
__ masm()->SmiUntagUnsigned(params_size);
}
__ Bind(&skip_interrupt_label);
}
......
......@@ -222,7 +222,8 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// Restore constructor function and argument count.
__ movq(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
__ SmiUntag(rax, Operand(rbp, ConstructFrameConstants::kLengthOffset));
__ SmiUntagUnsigned(rax,
Operand(rbp, ConstructFrameConstants::kLengthOffset));
// Check if we have enough stack space to push all arguments.
// Argument count in rax.
......@@ -1152,8 +1153,9 @@ void Builtins::Generate_InterpreterEntryTrampoline(
// Get bytecode array and bytecode offset from the stack frame.
__ movq(kInterpreterBytecodeArrayRegister,
Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister,
Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntagUnsigned(
kInterpreterBytecodeOffsetRegister,
Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
// Either return, or advance to the next bytecode and dispatch.
Label do_return;
......@@ -1449,8 +1451,9 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
}
// Get the target bytecode offset from the frame.
__ SmiUntag(kInterpreterBytecodeOffsetRegister,
Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntagUnsigned(
kInterpreterBytecodeOffsetRegister,
Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
if (FLAG_debug_code) {
Label okay;
......@@ -1475,8 +1478,9 @@ void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
// Get bytecode array and bytecode offset from the stack frame.
__ movq(kInterpreterBytecodeArrayRegister,
Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister,
Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntagUnsigned(
kInterpreterBytecodeOffsetRegister,
Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Label enter_bytecode, function_entry_bytecode;
__ cmpq(kInterpreterBytecodeOffsetRegister,
......@@ -1679,7 +1683,7 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
int code = config->GetAllocatableGeneralCode(i);
__ popq(Register::from_code(code));
if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
__ SmiUntag(Register::from_code(code));
__ SmiUntagUnsigned(Register::from_code(code));
}
}
if (with_result && java_script_builtin) {
......@@ -2238,7 +2242,7 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
__ movq(rcx, rax);
__ Pop(rdi);
__ Pop(rax);
__ SmiUntag(rax);
__ SmiUntagUnsigned(rax);
}
__ LoadTaggedPointerField(
rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
......@@ -2273,7 +2277,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
Label no_bound_arguments;
__ LoadTaggedPointerField(
rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
__ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
__ SmiUntagFieldUnsigned(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
__ testl(rbx, rbx);
__ j(zero, &no_bound_arguments);
{
......@@ -2315,7 +2319,8 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
Label loop;
__ LoadTaggedPointerField(
rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
__ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
__ SmiUntagFieldUnsigned(rbx,
FieldOperand(rcx, FixedArray::kLengthOffset));
__ addq(rax, rbx); // Adjust effective number of arguments.
__ bind(&loop);
// Instead of doing decl(rbx) here subtract kTaggedSize from the header
......@@ -2707,7 +2712,7 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
__ CallRuntime(Runtime::kWasmCompileLazy, 3);
// The runtime function returns the jump table slot offset as a Smi. Use
// that to compute the jump target in r15.
__ SmiUntag(kReturnRegister0);
__ SmiUntagUnsigned(kReturnRegister0);
__ movq(r15, kReturnRegister0);
// Restore registers.
......@@ -3315,10 +3320,8 @@ void GenericJSToWasmWrapperHelper(MacroAssembler* masm, bool stack_switch) {
__ cmpq(valuetype, Immediate(wasm::kWasmI32.raw_bit_field()));
__ j(not_equal, &convert_param);
__ JumpIfNotSmi(param, &convert_param);
// Change the param from Smi to int32.
__ SmiUntag(param);
// Zero extend.
__ movl(param, param);
// Change the param from Smi to int32 (zero extend).
__ SmiToInt32(param);
// Place the param into the proper slot in Integer section.
__ movq(MemOperand(current_int_param_slot, 0), param);
__ subq(current_int_param_slot, Immediate(kSystemPointerSize));
......@@ -5044,7 +5047,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
__ j(not_equal, &install_baseline_code);
// Save BytecodeOffset from the stack frame.
__ SmiUntag(
__ SmiUntagUnsigned(
kInterpreterBytecodeOffsetRegister,
MemOperand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
// Replace BytecodeOffset with the feedback vector.
......
......@@ -10,85 +10,86 @@
namespace v8 {
namespace internal {
#define ABORT_MESSAGES_LIST(V) \
V(kNoReason, "no reason") \
\
V(k32BitValueInRegisterIsNotZeroExtended, \
"32 bit value in register is not zero-extended") \
V(kAPICallReturnedInvalidObject, "API call returned invalid object") \
V(kAllocatingNonEmptyPackedArray, "Allocating non-empty packed array") \
V(kAllocationIsNotDoubleAligned, "Allocation is not double aligned") \
V(kExpectedOptimizationSentinel, \
"Expected optimized code cell or optimization sentinel") \
V(kExpectedUndefinedOrCell, "Expected undefined or cell in register") \
V(kExpectedFeedbackVector, "Expected feedback vector") \
V(kExpectedBaselineData, "Expected baseline data") \
V(kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, \
"The function_data field should be a BytecodeArray on interpreter entry") \
V(kInputStringTooLong, "Input string too long") \
V(kInvalidBytecode, "Invalid bytecode") \
V(kInvalidBytecodeAdvance, "Cannot advance current bytecode, ") \
V(kInvalidHandleScopeLevel, "Invalid HandleScope level") \
V(kInvalidJumpTableIndex, "Invalid jump table index") \
V(kInvalidParametersAndRegistersInGenerator, \
"invalid parameters and registers in generator") \
V(kMissingBytecodeArray, "Missing bytecode array from function") \
V(kObjectNotTagged, "The object is not tagged") \
V(kObjectTagged, "The object is tagged") \
V(kOffsetOutOfRange, "Offset out of range") \
V(kOperandIsASmi, "Operand is a smi") \
V(kOperandIsASmiAndNotABoundFunction, \
"Operand is a smi and not a bound function") \
V(kOperandIsASmiAndNotAConstructor, \
"Operand is a smi and not a constructor") \
V(kOperandIsASmiAndNotAFunction, "Operand is a smi and not a function") \
V(kOperandIsASmiAndNotAGeneratorObject, \
"Operand is a smi and not a generator object") \
V(kOperandIsCleared, "Operand is cleared") \
V(kOperandIsNotABoundFunction, "Operand is not a bound function") \
V(kOperandIsNotAConstructor, "Operand is not a constructor") \
V(kOperandIsNotAFixedArray, "Operand is not a fixed array") \
V(kOperandIsNotAFunction, "Operand is not a function") \
V(kOperandIsNotACallableFunction, "Operand is not a callable function") \
V(kOperandIsNotAGeneratorObject, "Operand is not a generator object") \
V(kOperandIsNotACodeT, "Operand is not a CodeT") \
V(kOperandIsNotASmi, "Operand is not a smi") \
V(kPromiseAlreadySettled, "Promise already settled") \
V(kReceivedInvalidReturnAddress, "Received invalid return address") \
V(kRegisterDidNotMatchExpectedRoot, "Register did not match expected root") \
V(kReturnAddressNotFoundInFrame, "Return address not found in frame") \
V(kShouldNotDirectlyEnterOsrFunction, \
"Should not directly enter OSR-compiled function") \
V(kStackAccessBelowStackPointer, "Stack access below stack pointer") \
V(kStackFrameTypesMustMatch, "Stack frame types must match") \
V(kUnalignedCellInWriteBarrier, "Unaligned cell in write barrier") \
V(kUnexpectedAdditionalPopValue, "Unexpected additional pop value") \
V(kUnexpectedElementsKindInArrayConstructor, \
"Unexpected ElementsKind in array constructor") \
V(kUnexpectedFPCRMode, "Unexpected FPCR mode.") \
V(kUnexpectedFunctionIDForInvokeIntrinsic, \
"Unexpected runtime function id for the InvokeIntrinsic bytecode") \
V(kUnexpectedInitialMapForArrayFunction, \
"Unexpected initial map for Array function") \
V(kUnexpectedLevelAfterReturnFromApiCall, \
"Unexpected level after return from api call") \
V(kUnexpectedNegativeValue, "Unexpected negative value") \
V(kUnexpectedReturnFromFrameDropper, \
"Unexpectedly returned from dropping frames") \
V(kUnexpectedReturnFromThrow, "Unexpectedly returned from a throw") \
V(kUnexpectedReturnFromWasmTrap, \
"Should not return after throwing a wasm trap") \
V(kUnexpectedStackPointer, "The stack pointer is not the expected value") \
V(kUnexpectedValue, "Unexpected value") \
V(kUnsupportedModuleOperation, "Unsupported module operation") \
V(kUnsupportedNonPrimitiveCompare, "Unsupported non-primitive compare") \
V(kWrongAddressOrValuePassedToRecordWrite, \
"Wrong address or value passed to RecordWrite") \
V(kWrongArgumentCountForInvokeIntrinsic, \
"Wrong number of arguments for intrinsic") \
V(kWrongFunctionCodeStart, "Wrong value in code start register passed") \
V(kWrongFunctionContext, "Wrong context passed to function") \
V(kUnexpectedThreadInWasmSet, "thread_in_wasm flag was already set") \
#define ABORT_MESSAGES_LIST(V) \
V(kNoReason, "no reason") \
\
V(k32BitValueInRegisterIsNotZeroExtended, \
"32 bit value in register is not zero-extended") \
V(kSignedBitOfSmiIsNotZero, "Signed bit of 31 bit smi register is not zero") \
V(kAPICallReturnedInvalidObject, "API call returned invalid object") \
V(kAllocatingNonEmptyPackedArray, "Allocating non-empty packed array") \
V(kAllocationIsNotDoubleAligned, "Allocation is not double aligned") \
V(kExpectedOptimizationSentinel, \
"Expected optimized code cell or optimization sentinel") \
V(kExpectedUndefinedOrCell, "Expected undefined or cell in register") \
V(kExpectedFeedbackVector, "Expected feedback vector") \
V(kExpectedBaselineData, "Expected baseline data") \
V(kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, \
"The function_data field should be a BytecodeArray on interpreter entry") \
V(kInputStringTooLong, "Input string too long") \
V(kInvalidBytecode, "Invalid bytecode") \
V(kInvalidBytecodeAdvance, "Cannot advance current bytecode, ") \
V(kInvalidHandleScopeLevel, "Invalid HandleScope level") \
V(kInvalidJumpTableIndex, "Invalid jump table index") \
V(kInvalidParametersAndRegistersInGenerator, \
"invalid parameters and registers in generator") \
V(kMissingBytecodeArray, "Missing bytecode array from function") \
V(kObjectNotTagged, "The object is not tagged") \
V(kObjectTagged, "The object is tagged") \
V(kOffsetOutOfRange, "Offset out of range") \
V(kOperandIsASmi, "Operand is a smi") \
V(kOperandIsASmiAndNotABoundFunction, \
"Operand is a smi and not a bound function") \
V(kOperandIsASmiAndNotAConstructor, \
"Operand is a smi and not a constructor") \
V(kOperandIsASmiAndNotAFunction, "Operand is a smi and not a function") \
V(kOperandIsASmiAndNotAGeneratorObject, \
"Operand is a smi and not a generator object") \
V(kOperandIsCleared, "Operand is cleared") \
V(kOperandIsNotABoundFunction, "Operand is not a bound function") \
V(kOperandIsNotAConstructor, "Operand is not a constructor") \
V(kOperandIsNotAFixedArray, "Operand is not a fixed array") \
V(kOperandIsNotAFunction, "Operand is not a function") \
V(kOperandIsNotACallableFunction, "Operand is not a callable function") \
V(kOperandIsNotAGeneratorObject, "Operand is not a generator object") \
V(kOperandIsNotACodeT, "Operand is not a CodeT") \
V(kOperandIsNotASmi, "Operand is not a smi") \
V(kPromiseAlreadySettled, "Promise already settled") \
V(kReceivedInvalidReturnAddress, "Received invalid return address") \
V(kRegisterDidNotMatchExpectedRoot, "Register did not match expected root") \
V(kReturnAddressNotFoundInFrame, "Return address not found in frame") \
V(kShouldNotDirectlyEnterOsrFunction, \
"Should not directly enter OSR-compiled function") \
V(kStackAccessBelowStackPointer, "Stack access below stack pointer") \
V(kStackFrameTypesMustMatch, "Stack frame types must match") \
V(kUnalignedCellInWriteBarrier, "Unaligned cell in write barrier") \
V(kUnexpectedAdditionalPopValue, "Unexpected additional pop value") \
V(kUnexpectedElementsKindInArrayConstructor, \
"Unexpected ElementsKind in array constructor") \
V(kUnexpectedFPCRMode, "Unexpected FPCR mode.") \
V(kUnexpectedFunctionIDForInvokeIntrinsic, \
"Unexpected runtime function id for the InvokeIntrinsic bytecode") \
V(kUnexpectedInitialMapForArrayFunction, \
"Unexpected initial map for Array function") \
V(kUnexpectedLevelAfterReturnFromApiCall, \
"Unexpected level after return from api call") \
V(kUnexpectedNegativeValue, "Unexpected negative value") \
V(kUnexpectedReturnFromFrameDropper, \
"Unexpectedly returned from dropping frames") \
V(kUnexpectedReturnFromThrow, "Unexpectedly returned from a throw") \
V(kUnexpectedReturnFromWasmTrap, \
"Should not return after throwing a wasm trap") \
V(kUnexpectedStackPointer, "The stack pointer is not the expected value") \
V(kUnexpectedValue, "Unexpected value") \
V(kUnsupportedModuleOperation, "Unsupported module operation") \
V(kUnsupportedNonPrimitiveCompare, "Unsupported non-primitive compare") \
V(kWrongAddressOrValuePassedToRecordWrite, \
"Wrong address or value passed to RecordWrite") \
V(kWrongArgumentCountForInvokeIntrinsic, \
"Wrong number of arguments for intrinsic") \
V(kWrongFunctionCodeStart, "Wrong value in code start register passed") \
V(kWrongFunctionContext, "Wrong context passed to function") \
V(kUnexpectedThreadInWasmSet, "thread_in_wasm flag was already set") \
V(kUnexpectedThreadInWasmUnset, "thread_in_wasm flag was not set")
#define BAILOUT_MESSAGES_LIST(V) \
......
......@@ -295,6 +295,10 @@ void TurboAssembler::SmiUntagField(Register dst, Operand src) {
SmiUntag(dst, src);
}
void TurboAssembler::SmiUntagFieldUnsigned(Register dst, Operand src) {
SmiUntagUnsigned(dst, src);
}
void TurboAssembler::StoreTaggedField(Operand dst_field_operand,
Immediate value) {
if (COMPRESS_POINTERS_BOOL) {
......@@ -868,7 +872,7 @@ void MacroAssembler::GenerateTailCallToReturnedCode(
// Restore target function, new target and actual argument count.
Pop(kJavaScriptCallArgCountRegister);
SmiUntag(kJavaScriptCallArgCountRegister);
SmiUntagUnsigned(kJavaScriptCallArgCountRegister);
Pop(kJavaScriptCallNewTargetRegister);
Pop(kJavaScriptCallTargetRegister);
}
......@@ -1457,7 +1461,8 @@ void TurboAssembler::SmiTag(Register reg) {
static_assert(kSmiTag == 0);
DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
if (COMPRESS_POINTERS_BOOL) {
shll(reg, Immediate(kSmiShift));
DCHECK_EQ(kSmiShift, 1);
addl(reg, reg);
} else {
shlq(reg, Immediate(kSmiShift));
}
......@@ -1484,6 +1489,17 @@ void TurboAssembler::SmiUntag(Register reg) {
sarq(reg, Immediate(kSmiShift));
}
void TurboAssembler::SmiUntagUnsigned(Register reg) {
static_assert(kSmiTag == 0);
DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
if (COMPRESS_POINTERS_BOOL) {
AssertSignedBitOfSmiIsZero(reg);
shrl(reg, Immediate(kSmiShift));
} else {
shrq(reg, Immediate(kSmiShift));
}
}
void TurboAssembler::SmiUntag(Register dst, Register src) {
DCHECK(dst != src);
if (COMPRESS_POINTERS_BOOL) {
......@@ -1500,9 +1516,8 @@ void TurboAssembler::SmiUntag(Register dst, Register src) {
void TurboAssembler::SmiUntag(Register dst, Operand src) {
if (SmiValuesAre32Bits()) {
movl(dst, Operand(src, kSmiShift / kBitsPerByte));
// Sign extend to 64-bit.
movsxlq(dst, dst);
movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
} else {
DCHECK(SmiValuesAre31Bits());
if (COMPRESS_POINTERS_BOOL) {
......@@ -1514,6 +1529,23 @@ void TurboAssembler::SmiUntag(Register dst, Operand src) {
}
}
void TurboAssembler::SmiUntagUnsigned(Register dst, Operand src) {
if (SmiValuesAre32Bits()) {
// Zero extend to 64-bit.
movl(dst, Operand(src, kSmiShift / kBitsPerByte));
} else {
DCHECK(SmiValuesAre31Bits());
if (COMPRESS_POINTERS_BOOL) {
movl(dst, src);
AssertSignedBitOfSmiIsZero(dst);
shrl(dst, Immediate(kSmiShift));
} else {
movq(dst, src);
shrq(dst, Immediate(kSmiShift));
}
}
}
void TurboAssembler::SmiToInt32(Register reg) {
static_assert(kSmiTag == 0);
DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
......@@ -2126,7 +2158,7 @@ Operand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) {
Operand TurboAssembler::EntryFromBuiltinIndexAsOperand(Register builtin_index) {
if (SmiValuesAre32Bits()) {
// The builtin_index register contains the builtin index as a Smi.
SmiUntag(builtin_index);
SmiUntagUnsigned(builtin_index);
return Operand(kRootRegister, builtin_index, times_system_pointer_size,
IsolateData::builtin_entry_table_offset());
} else {
......@@ -2652,6 +2684,14 @@ void TurboAssembler::AssertZeroExtended(Register int32_register) {
Check(above, AbortReason::k32BitValueInRegisterIsNotZeroExtended);
}
void TurboAssembler::AssertSignedBitOfSmiIsZero(Register smi_register) {
if (!FLAG_debug_code) return;
ASM_CODE_COMMENT(this);
DCHECK(COMPRESS_POINTERS_BOOL);
testl(smi_register, Immediate(int32_t{0x10000000}));
Check(zero, AbortReason::kSignedBitOfSmiIsNotZero);
}
void MacroAssembler::AssertCodeT(Register object) {
if (!FLAG_debug_code) return;
ASM_CODE_COMMENT(this);
......
......@@ -353,9 +353,11 @@ class V8_EXPORT_PRIVATE TurboAssembler
// Convert smi to word-size sign-extended value.
void SmiUntag(Register reg);
void SmiUntagUnsigned(Register reg);
// Requires dst != src
void SmiUntag(Register dst, Register src);
void SmiUntag(Register dst, Operand src);
void SmiUntagUnsigned(Register dst, Operand src);
// Convert smi to 32-bit value.
void SmiToInt32(Register reg);
......@@ -462,6 +464,10 @@ class V8_EXPORT_PRIVATE TurboAssembler
// have zeros in the top 32 bits, enabled via --debug-code.
void AssertZeroExtended(Register reg) NOOP_UNLESS_DEBUG_CODE;
// Abort execution if the signed bit of smi register with pointer compression
// is not zero, enabled via --debug-code.
void AssertSignedBitOfSmiIsZero(Register smi) NOOP_UNLESS_DEBUG_CODE;
// Like Assert(), but always enabled.
void Check(Condition cc, AbortReason reason);
......@@ -600,6 +606,7 @@ class V8_EXPORT_PRIVATE TurboAssembler
// Loads a field containing smi value and untags it.
void SmiUntagField(Register dst, Operand src);
void SmiUntagFieldUnsigned(Register dst, Operand src);
// Compresses tagged value if necessary and stores it to given on-heap
// location.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment