Commit da5b5f66 authored by Lu Yahan's avatar Lu Yahan Committed by V8 LUCI CQ

[riscv64] Remove dynamic map checks and custom deoptimization kinds

Port b2978927

Bug: v8:12552
Change-Id: I73e76fc5cc8905a0fbfc801b2f794735866d19e8
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3544725
Auto-Submit: Yahan Lu <yahan@iscas.ac.cn>
Reviewed-by: 's avatarji qiu <qiuji@iscas.ac.cn>
Commit-Queue: ji qiu <qiuji@iscas.ac.cn>
Cr-Commit-Position: refs/heads/main@{#79561}
parent 0719ace6
......@@ -3678,10 +3678,6 @@ void Builtins::Generate_DeoptimizationEntry_Soft(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kSoft);
}
void Builtins::Generate_DeoptimizationEntry_Bailout(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kBailout);
}
void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
}
......@@ -3867,74 +3863,6 @@ void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
Generate_BaselineOrInterpreterEntry(masm, false, true);
}
void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
Generate_DynamicCheckMapsTrampoline<DynamicCheckMapsDescriptor>(
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMaps));
}
void Builtins::Generate_DynamicCheckMapsWithFeedbackVectorTrampoline(
MacroAssembler* masm) {
Generate_DynamicCheckMapsTrampoline<
DynamicCheckMapsWithFeedbackVectorDescriptor>(
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMapsWithFeedbackVector));
}
template <class Descriptor>
void Builtins::Generate_DynamicCheckMapsTrampoline(
MacroAssembler* masm, Handle<Code> builtin_target) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
// Only save the registers that the DynamicMapChecks builtin can clobber.
Descriptor descriptor;
RegList registers = descriptor.allocatable_registers();
// FLAG_debug_code is enabled CSA checks will call C function and so we need
// to save all CallerSaved registers too.
if (FLAG_debug_code) registers |= kJSCallerSaved;
__ MaybeSaveRegisters(registers);
// Load the immediate arguments from the deopt exit to pass to the builtin.
Register slot_arg = descriptor.GetRegisterParameter(Descriptor::kSlot);
Register handler_arg = descriptor.GetRegisterParameter(Descriptor::kHandler);
__ Ld(handler_arg, MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
__ Uld(slot_arg, MemOperand(handler_arg,
Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
__ Uld(
handler_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
__ Call(builtin_target, RelocInfo::CODE_TARGET);
Label deopt, bailout;
__ Branch(&deopt, ne, a0,
Operand(static_cast<int64_t>(DynamicCheckMapsStatus::kSuccess)),
Label::Distance::kNear);
__ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
__ Ret();
__ bind(&deopt);
__ Branch(&bailout, eq, a0,
Operand(static_cast<int64_t>(DynamicCheckMapsStatus::kBailout)));
if (FLAG_debug_code) {
__ Assert(eq, AbortReason::kUnexpectedDynamicCheckMapsStatus, a0,
Operand(static_cast<int64_t>(DynamicCheckMapsStatus::kDeopt)));
}
__ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_eager = masm->isolate()->builtins()->code_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
__ Jump(deopt_eager, RelocInfo::CODE_TARGET);
__ bind(&bailout);
__ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_bailout = masm->isolate()->builtins()->code_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
__ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
}
#undef __
} // namespace internal
......
......@@ -43,18 +43,6 @@ constexpr auto WriteBarrierDescriptor::registers() {
return RegisterArray(a1, a5, a4, a2, a0, a3, kContextRegister, a7);
}
// static
constexpr auto DynamicCheckMapsDescriptor::registers() {
STATIC_ASSERT(kReturnRegister0 == a0);
return RegisterArray(kReturnRegister0, a1, a2, a3, cp);
}
// static
constexpr auto DynamicCheckMapsWithFeedbackVectorDescriptor::registers() {
STATIC_ASSERT(kReturnRegister0 == a0);
return RegisterArray(kReturnRegister0, a1, a2, a3, cp);
}
// static
constexpr Register LoadDescriptor::ReceiverRegister() { return a1; }
// static
......
......@@ -4990,11 +4990,6 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
(kind == DeoptimizeKind::kLazy)
? Deoptimizer::kLazyDeoptExitSize
: Deoptimizer::kNonLazyDeoptExitSize);
if (kind == DeoptimizeKind::kEagerWithResume) {
Branch(ret);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeBeforeArgsSize);
}
}
void TurboAssembler::LoadCodeObjectEntry(Register destination,
......
......@@ -10,12 +10,6 @@ namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 3 * kInstrSize;
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
kEagerWithResumeBeforeArgsSize + 4 * kInstrSize;
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = kInstrSize;
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset =
kInstrSize + kSystemPointerSize;
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
return Float32::FromBits(
......
......@@ -1530,23 +1530,20 @@ TEST(DeoptExitSizeIsFixed) {
for (int i = 0; i < kDeoptimizeKindCount; i++) {
DeoptimizeKind kind = static_cast<DeoptimizeKind>(i);
Label before_exit;
masm.bind(&before_exit);
if (kind == DeoptimizeKind::kEagerWithResume) {
Builtin target = Deoptimizer::GetDeoptWithResumeBuiltin(
DeoptimizeReason::kDynamicCheckMaps);
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
nullptr);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
Deoptimizer::kEagerWithResumeBeforeArgsSize);
Builtin target = Deoptimizer::GetDeoptimizationEntry(kind);
// Mirroring logic in code-generator.cc.
if (kind == DeoptimizeKind::kLazy) {
// CFI emits an extra instruction here.
masm.BindExceptionHandler(&before_exit);
} else {
Builtin target = Deoptimizer::GetDeoptimizationEntry(kind);
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
nullptr);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
kind == DeoptimizeKind::kLazy
? Deoptimizer::kLazyDeoptExitSize
: Deoptimizer::kNonLazyDeoptExitSize);
masm.bind(&before_exit);
}
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
&before_exit);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
kind == DeoptimizeKind::kLazy
? Deoptimizer::kLazyDeoptExitSize
: Deoptimizer::kNonLazyDeoptExitSize);
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment