Commit cec5b750 authored by Leszek Swirski's avatar Leszek Swirski Committed by V8 LUCI CQ

Revert "Reland "[ptr-compr][x64][compiler] Support load map in compressed"

This reverts commit 63b37c0e.

Reason for revert: Seems to regress performance

Original change's description:
> Reland "[ptr-compr][x64][compiler] Support load map in compressed
> form"
>
> This is a reland of commit 6ca3adb9
>
> Fix build failed with V8_MAP_PACKING.
>
> Original change's description:
> > [ptr-compr][x64][compiler] Support load map in compressed form
> >
> > ...to allow pointer decompression at use-site.
> >
> > Bug: v8:13056, v8:7703
> > Change-Id: If369286814c76340a945cc2a9fd863888a813080
> > Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3811737
> > Reviewed-by: Leszek Swirski <leszeks@chromium.org>
> > Commit-Queue: Hao A Xu <hao.a.xu@intel.com>
> > Cr-Commit-Position: refs/heads/main@{#82242}
>
> Bug: v8:13056, v8:7703
> Change-Id: Ic753558058f70f6ee7850019aac9235b87d0e56a
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3815779
> Commit-Queue: Hao A Xu <hao.a.xu@intel.com>
> Reviewed-by: Leszek Swirski <leszeks@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#82322}

Bug: v8:13056, v8:7703
Change-Id: I8693af9189e214ec54a56149e0b29038e85838c2
Fixed: chromium:1352384
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3842931
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82689}
parent d650d368
......@@ -150,13 +150,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
__ ldrh(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
JumpIf(cc, type, Operand(instance_type), target);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target, Label::Distance) {
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
target);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target, Label::Distance) {
......@@ -525,12 +518,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
LoadMap(map_bit_field, object);
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
__ add(lhs, lhs, Operand(rhs));
}
......
......@@ -149,13 +149,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
__ Ldrh(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
JumpIf(cc, type, instance_type, target);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target, Label::Distance) {
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
target);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target, Label::Distance) {
......@@ -583,12 +576,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
LoadMap(map_bit_field, object);
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
if (SmiValuesAre31Bits()) {
__ Add(lhs.W(), lhs.W(), Immediate(rhs));
......
......@@ -67,11 +67,6 @@ class BaselineAssembler {
InstanceType instance_type, Register map,
Label* target,
Label::Distance distance = Label::kFar);
inline void JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target,
Label::Distance distance = Label::kFar);
inline void JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type, Label* target,
Label::Distance distance = Label::kFar);
......@@ -191,13 +186,6 @@ class BaselineAssembler {
int32_t index);
inline void LoadFixedArrayElement(TaggedRegister output, TaggedRegister array,
int32_t index);
inline void LoadWord8Field(Register output, TaggedRegister source,
int offset);
inline void LoadMap(TaggedRegister output, Register value);
inline void JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type, TaggedRegister map,
Label* target,
Label::Distance distance = Label::kFar);
#endif
// Falls through and sets scratch_and_result to 0 on failure, jumps to
......@@ -222,8 +210,6 @@ class BaselineAssembler {
inline void StaModuleVariable(Register context, Register value,
int cell_index, uint32_t depth);
inline void LoadMapBitField(Register map_bit_field, Register object);
inline void AddSmi(Register lhs, Smi rhs);
inline void SmiUntag(Register value);
inline void SmiUntag(Register output, Register value);
......
......@@ -1515,7 +1515,8 @@ void BaselineCompiler::VisitTestUndetectable() {
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
Register map_bit_field = kInterpreterAccumulatorRegister;
__ LoadMapBitField(map_bit_field, kInterpreterAccumulatorRegister);
__ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
Condition::kZero, &not_undetectable, Label::kNear);
......@@ -1558,8 +1559,8 @@ void BaselineCompiler::VisitTestTypeOf() {
Label is_smi, is_heap_number;
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
__ JumpIfObjectType(Condition::kEqual, kInterpreterAccumulatorRegister,
HEAP_NUMBER_TYPE, &scratch_scope, &is_heap_number,
Label::kNear);
HEAP_NUMBER_TYPE, scratch_scope.AcquireScratch(),
&is_heap_number, Label::kNear);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
__ Jump(&done, Label::kNear);
......@@ -1575,7 +1576,8 @@ void BaselineCompiler::VisitTestTypeOf() {
static_assert(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
__ JumpIfObjectType(Condition::kGreaterThanEqual,
kInterpreterAccumulatorRegister, FIRST_NONSTRING_TYPE,
&scratch_scope, &bad_instance_type, Label::kNear);
scratch_scope.AcquireScratch(), &bad_instance_type,
Label::kNear);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
__ Jump(&done, Label::kNear);
......@@ -1589,8 +1591,8 @@ void BaselineCompiler::VisitTestTypeOf() {
Label is_smi, bad_instance_type;
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
__ JumpIfObjectType(Condition::kNotEqual, kInterpreterAccumulatorRegister,
SYMBOL_TYPE, &scratch_scope, &bad_instance_type,
Label::kNear);
SYMBOL_TYPE, scratch_scope.AcquireScratch(),
&bad_instance_type, Label::kNear);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
__ Jump(&done, Label::kNear);
......@@ -1619,8 +1621,8 @@ void BaselineCompiler::VisitTestTypeOf() {
Label is_smi, bad_instance_type;
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
__ JumpIfObjectType(Condition::kNotEqual, kInterpreterAccumulatorRegister,
BIGINT_TYPE, &scratch_scope, &bad_instance_type,
Label::kNear);
BIGINT_TYPE, scratch_scope.AcquireScratch(),
&bad_instance_type, Label::kNear);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
__ Jump(&done, Label::kNear);
......@@ -1640,7 +1642,8 @@ void BaselineCompiler::VisitTestTypeOf() {
// All other undetectable maps are typeof undefined.
Register map_bit_field = kInterpreterAccumulatorRegister;
__ LoadMapBitField(map_bit_field, kInterpreterAccumulatorRegister);
__ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
Condition::kZero, &not_undetectable, Label::kNear);
......@@ -1659,7 +1662,8 @@ void BaselineCompiler::VisitTestTypeOf() {
// Check if the map is callable but not undetectable.
Register map_bit_field = kInterpreterAccumulatorRegister;
__ LoadMapBitField(map_bit_field, kInterpreterAccumulatorRegister);
__ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
__ TestAndBranch(map_bit_field, Map::Bits1::IsCallableBit::kMask,
Condition::kZero, &not_callable, Label::kNear);
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
......@@ -2042,7 +2046,8 @@ void BaselineCompiler::VisitJumpIfJSReceiver() {
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
__ JumpIfObjectType(Condition::kLessThan, kInterpreterAccumulatorRegister,
FIRST_JS_RECEIVER_TYPE, &scratch_scope, &dont_jump);
FIRST_JS_RECEIVER_TYPE, scratch_scope.AcquireScratch(),
&dont_jump);
UpdateInterruptBudgetAndDoInterpreterJump();
__ Bind(&is_smi);
......@@ -2179,7 +2184,8 @@ void BaselineCompiler::VisitThrowIfNotSuperConstructor() {
Register reg = scratch_scope.AcquireScratch();
LoadRegister(reg, 0);
Register map_bit_field = scratch_scope.AcquireScratch();
__ LoadMapBitField(map_bit_field, reg);
__ LoadMap(map_bit_field, reg);
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
__ TestAndBranch(map_bit_field, Map::Bits1::IsConstructorBit::kMask,
Condition::kNotZero, &done, Label::kNear);
......
......@@ -155,14 +155,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
__ CmpObjectType(object, instance_type, map);
__ j(AsMasmCondition(cc), target, distance);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target,
Label::Distance distance) {
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
target, distance);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target,
......@@ -493,12 +485,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
LoadMap(map_bit_field, object);
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
if (rhs.value() == 0) return;
__ add(lhs, Immediate(rhs));
......
......@@ -139,13 +139,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
__ GetObjectType(object, map, type);
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target, Label::Distance) {
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
target);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target, Label::Distance) {
......@@ -495,12 +488,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
LoadMap(map_bit_field, object);
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
__ Add_d(lhs, lhs, Operand(rhs));
}
......
......@@ -157,13 +157,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
__ GetObjectType(object, map, type);
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target, Label::Distance) {
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
target);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target, Label::Distance) {
......@@ -486,12 +479,6 @@ void BaselineAssembler::StaContextSlot(Register context, Register value,
value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
LoadMap(map_bit_field, object);
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
__ Addu(lhs, lhs, Operand(rhs));
}
......
......@@ -139,13 +139,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
__ GetObjectType(object, map, type);
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target, Label::Distance) {
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
target);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target, Label::Distance) {
......@@ -505,12 +498,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
LoadMap(map_bit_field, object);
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
__ Daddu(lhs, lhs, Operand(rhs));
}
......
......@@ -246,14 +246,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
JumpIf(cc, type, Operand(instance_type), target);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target, Label::Distance) {
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
target);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target, Label::Distance) {
......@@ -657,12 +649,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
LoadMap(map_bit_field, object);
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
if (rhs.value() == 0) return;
__ LoadSmiLiteral(r0, rhs);
......
......@@ -137,13 +137,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
__ GetObjectType(object, map, type);
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target, Label::Distance) {
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
target);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target, Label::Distance) {
......@@ -503,12 +496,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
LoadMap(map_bit_field, object);
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
ASM_CODE_COMMENT(masm_);
if (SmiValuesAre31Bits()) {
......
......@@ -246,14 +246,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
JumpIf(cc, type, Operand(instance_type), target);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target, Label::Distance) {
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
target);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target, Label::Distance) {
......@@ -655,12 +647,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
LoadMap(map_bit_field, object);
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
if (rhs.value() == 0) return;
__ LoadSmiLiteral(r0, rhs);
......
......@@ -146,23 +146,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
__ CmpObjectType(object, instance_type, map);
__ j(AsMasmCondition(cc), target, distance);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
TaggedRegister map, Label* target,
Label::Distance distance) {
__ AssertNotSmi(object);
__ CmpObjectType(object, instance_type, map);
__ j(AsMasmCondition(cc), target, distance);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
ScratchRegisterScope* scratch_scope,
Label* target,
Label::Distance distance) {
JumpIfObjectType(cc, object, instance_type,
TaggedRegister(scratch_scope->AcquireScratch()), target,
distance);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
Label* target,
......@@ -355,10 +338,6 @@ void BaselineAssembler::LoadWord8Field(Register output, Register source,
int offset) {
__ movb(output, FieldOperand(source, offset));
}
void BaselineAssembler::LoadWord8Field(Register output, TaggedRegister source,
int offset) {
__ movb(output, FieldOperand(source, offset));
}
void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
Smi value) {
__ StoreTaggedSignedField(FieldOperand(target, offset), value);
......@@ -419,10 +398,6 @@ void BaselineAssembler::LoadFixedArrayElement(TaggedRegister output,
FixedArray::kHeaderSize + index * kTaggedSize);
}
void BaselineAssembler::LoadMap(TaggedRegister output, Register value) {
__ LoadMap(output, value);
}
void BaselineAssembler::TryLoadOptimizedOsrCode(Register scratch_and_result,
Register feedback_vector,
FeedbackSlot slot,
......@@ -580,13 +555,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
}
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
Register object) {
TaggedRegister map(map_bit_field);
LoadMap(map, object);
LoadWord8Field(map_bit_field, map, Map::kBitFieldOffset);
}
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
if (rhs.value() == 0) return;
if (SmiValuesAre31Bits()) {
......
......@@ -292,8 +292,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// If the type of the result (stored in its map) is less than
// FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
TaggedRegister map(rcx);
__ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, map);
__ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
__ j(above_equal, &leave_and_return, Label::kNear);
__ jmp(&use_receiver);
......@@ -1019,7 +1018,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(
// if so, call into CompileLazy.
Label compile_lazy;
__ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
TaggedRegister(kScratchRegister));
kScratchRegister);
__ j(not_equal, &compile_lazy);
// Load the feedback vector from the closure.
......@@ -1032,9 +1031,8 @@ void Builtins::Generate_InterpreterEntryTrampoline(
Label push_stack_frame;
// Check if feedback vector is valid. If valid, check for optimized code
// and update invocation count. Otherwise, setup the stack frame.
TaggedRegister map(rcx);
__ LoadMap(map, feedback_vector);
__ CmpInstanceType(map, FEEDBACK_VECTOR_TYPE);
__ LoadMap(rcx, feedback_vector);
__ CmpInstanceType(rcx, FEEDBACK_VECTOR_TYPE);
__ j(not_equal, &push_stack_frame);
// Check the tiering state.
......@@ -1213,9 +1211,8 @@ void Builtins::Generate_InterpreterEntryTrampoline(
Label install_baseline_code;
// Check if feedback vector is valid. If not, call prepare for baseline to
// allocate it.
TaggedRegister map(rcx);
__ LoadMap(map, feedback_vector);
__ CmpInstanceType(map, FEEDBACK_VECTOR_TYPE);
__ LoadMap(rcx, feedback_vector);
__ CmpInstanceType(rcx, FEEDBACK_VECTOR_TYPE);
__ j(not_equal, &install_baseline_code);
// Check the tiering state.
......@@ -1409,8 +1406,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
__ LoadTaggedPointerField(
rbx, FieldOperand(shared_function_info,
SharedFunctionInfo::kFunctionDataOffset));
__ CmpObjectType(rbx, INTERPRETER_DATA_TYPE,
TaggedRegister(kScratchRegister));
__ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
__ j(not_equal, &builtin_trampoline, Label::kNear);
__ LoadTaggedPointerField(
......@@ -1444,7 +1440,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
// Check function data field is actually a BytecodeArray object.
__ AssertNotSmi(kInterpreterBytecodeArrayRegister);
__ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
TaggedRegister(rbx));
rbx);
__ Assert(
equal,
AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
......@@ -2093,9 +2089,8 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
if (mode == CallOrConstructMode::kConstruct) {
Label new_target_constructor, new_target_not_constructor;
__ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
TaggedRegister map(rbx);
__ LoadMap(map, rdx);
__ testb(FieldOperand(map, Map::kBitFieldOffset),
__ LoadMap(rbx, rdx);
__ testb(FieldOperand(rbx, Map::kBitFieldOffset),
Immediate(Map::Bits1::IsConstructorBit::kMask));
__ j(not_zero, &new_target_constructor, Label::kNear);
__ bind(&new_target_not_constructor);
......@@ -2210,7 +2205,7 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
__ movq(rcx, args.GetReceiverOperand());
__ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
__ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, TaggedRegister(rbx));
__ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
__ j(above_equal, &done_convert);
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
Label convert_global_proxy;
......@@ -5000,7 +4995,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
// always have baseline code.
if (!is_osr) {
Label start_with_baseline;
__ CmpObjectType(code_obj, CODET_TYPE, TaggedRegister(kScratchRegister));
__ CmpObjectType(code_obj, CODET_TYPE, kScratchRegister);
__ j(equal, &start_with_baseline);
// Start with bytecode as there is no baseline code.
......@@ -5013,7 +5008,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
// Start with baseline code.
__ bind(&start_with_baseline);
} else if (FLAG_debug_code) {
__ CmpObjectType(code_obj, CODET_TYPE, TaggedRegister(kScratchRegister));
__ CmpObjectType(code_obj, CODET_TYPE, kScratchRegister);
__ Assert(equal, AbortReason::kExpectedBaselineData);
}
......@@ -5036,8 +5031,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
Label install_baseline_code;
// Check if feedback vector is valid. If not, call prepare for baseline to
// allocate it.
__ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE,
TaggedRegister(kScratchRegister));
__ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE, kScratchRegister);
__ j(not_equal, &install_baseline_code);
// Save BytecodeOffset from the stack frame.
......
......@@ -206,14 +206,6 @@ void TurboAssembler::LoadMap(Register destination, Register object) {
#endif
}
void TurboAssembler::LoadMap(TaggedRegister destination, Register object) {
LoadTaggedPointerField(destination,
FieldOperand(object, HeapObject::kMapOffset));
#ifdef V8_MAP_PACKING
UnpackMapWord(destination.reg());
#endif
}
void TurboAssembler::LoadTaggedPointerField(Register destination,
Operand field_operand) {
if (COMPRESS_POINTERS_BOOL) {
......@@ -2611,20 +2603,10 @@ void MacroAssembler::CmpObjectType(Register heap_object, InstanceType type,
CmpInstanceType(map, type);
}
void MacroAssembler::CmpObjectType(Register heap_object, InstanceType type,
TaggedRegister map) {
LoadMap(map, heap_object);
CmpInstanceType(map, type);
}
void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
}
void MacroAssembler::CmpInstanceType(TaggedRegister map, InstanceType type) {
cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
}
void MacroAssembler::CmpInstanceTypeRange(Register map,
Register instance_type_out,
InstanceType lower_limit,
......@@ -2640,10 +2622,9 @@ void MacroAssembler::TestCodeTIsMarkedForDeoptimization(Register codet,
testl(FieldOperand(codet, CodeDataContainer::kKindSpecificFlagsOffset),
Immediate(1 << Code::kMarkedForDeoptimizationBit));
} else {
TaggedRegister container(scratch);
LoadTaggedPointerField(container,
LoadTaggedPointerField(scratch,
FieldOperand(codet, Code::kCodeDataContainerOffset));
testl(FieldOperand(container, CodeDataContainer::kKindSpecificFlagsOffset),
testl(FieldOperand(scratch, CodeDataContainer::kKindSpecificFlagsOffset),
Immediate(1 << Code::kMarkedForDeoptimizationBit));
}
}
......@@ -2843,11 +2824,10 @@ void MacroAssembler::InvokeFunction(Register function, Register new_target,
Register actual_parameter_count,
InvokeType type) {
ASM_CODE_COMMENT(this);
TaggedRegister sfi(rbx);
LoadTaggedPointerField(
sfi, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
movzxwq(rbx,
FieldOperand(sfi, SharedFunctionInfo::kFormalParameterCountOffset));
FieldOperand(rbx, SharedFunctionInfo::kFormalParameterCountOffset));
InvokeFunction(function, new_target, rbx, actual_parameter_count, type);
}
......@@ -3294,20 +3274,12 @@ void MacroAssembler::LeaveExitFrameEpilogue() {
void MacroAssembler::LoadNativeContextSlot(Register dst, int index) {
ASM_CODE_COMMENT(this);
// Load native context.
TaggedRegister context(dst);
LoadMap(context, rsi);
LoadMap(dst, rsi);
LoadTaggedPointerField(
context,
FieldOperand(context,
Map::kConstructorOrBackPointerOrNativeContextOffset));
dst,
FieldOperand(dst, Map::kConstructorOrBackPointerOrNativeContextOffset));
// Load value from native context.
if (COMPRESS_POINTERS_BOOL) {
LoadTaggedPointerField(
dst, Operand(kPtrComprCageBaseRegister, context.reg(),
ScaleFactor::times_1, Context::SlotOffset(index)));
} else {
LoadTaggedPointerField(dst, Operand(dst, Context::SlotOffset(index)));
}
LoadTaggedPointerField(dst, Operand(dst, Context::SlotOffset(index)));
}
int TurboAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
......@@ -3463,10 +3435,9 @@ void TurboAssembler::ComputeCodeStartAddress(Register dst) {
// 3. if it is not zero then it jumps to the builtin.
void TurboAssembler::BailoutIfDeoptimized(Register scratch) {
int offset = Code::kCodeDataContainerOffset - Code::kHeaderSize;
TaggedRegister container(scratch);
LoadTaggedPointerField(container,
LoadTaggedPointerField(scratch,
Operand(kJavaScriptCallCodeStartRegister, offset));
testl(FieldOperand(container, CodeDataContainer::kKindSpecificFlagsOffset),
testl(FieldOperand(scratch, CodeDataContainer::kKindSpecificFlagsOffset),
Immediate(1 << Code::kMarkedForDeoptimizationBit));
Jump(BUILTIN_CODE(isolate(), CompileLazyDeoptimizedCode),
RelocInfo::CODE_TARGET, not_zero);
......
......@@ -286,7 +286,6 @@ class V8_EXPORT_PRIVATE TurboAssembler
#endif
void LoadMap(Register destination, Register object);
void LoadMap(TaggedRegister destination, Register object);
void Move(Register dst, intptr_t x) {
if (x == 0) {
......@@ -807,13 +806,10 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
// Incoming register is heap_object and outgoing register is map.
// They may be the same register, and may be kScratchRegister.
void CmpObjectType(Register heap_object, InstanceType type, Register map);
void CmpObjectType(Register heap_object, InstanceType type,
TaggedRegister map);
// Compare instance type for map.
// Always use unsigned comparisons: above and below, not less and greater.
void CmpInstanceType(Register map, InstanceType type);
void CmpInstanceType(TaggedRegister map, InstanceType type);
// Compare instance type ranges for a map (low and high inclusive)
// Always use unsigned comparisons: below_equal for a positive result.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment