Commit f52f9341 authored by Milad Fa's avatar Milad Fa Committed by V8 LUCI CQ

PPC/s390: Remove dynamic map checks and custom deoptimization kinds

Port b2978927

Original Commit Message:

    This CL removes:

    - Dynamic map checks aka minimorphic property loads (TF support,
      builtins).
    - "Bailout" deopts (= drop to the interpreter once, but don't
      throw out optimized code).
    - "EagerWithResume" deopts (= part of dynamic map check
      functionality, we call a builtin for the deopt check and deopt
      or resume based on the result).

R=jgruber@chromium.org, joransiu@ca.ibm.com, junyan@redhat.com, midawson@redhat.com
BUG=
LOG=N

Change-Id: I64476f73810774c2c592231d82c4a2cbfa2bf94e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3537881Reviewed-by: 's avatarJunliang Yan <junyan@redhat.com>
Commit-Queue: Milad Farazmand <mfarazma@redhat.com>
Cr-Commit-Position: refs/heads/main@{#79551}
parent 164a040a
......@@ -3470,10 +3470,6 @@ void Builtins::Generate_DeoptimizationEntry_Soft(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kSoft);
}
void Builtins::Generate_DeoptimizationEntry_Bailout(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kBailout);
}
void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
}
......@@ -3496,76 +3492,6 @@ void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
__ bkpt(0);
}
void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
Generate_DynamicCheckMapsTrampoline<DynamicCheckMapsDescriptor>(
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMaps));
}
void Builtins::Generate_DynamicCheckMapsWithFeedbackVectorTrampoline(
MacroAssembler* masm) {
Generate_DynamicCheckMapsTrampoline<
DynamicCheckMapsWithFeedbackVectorDescriptor>(
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMapsWithFeedbackVector));
}
template <class Descriptor>
void Builtins::Generate_DynamicCheckMapsTrampoline(
MacroAssembler* masm, Handle<Code> builtin_target) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
// Only save the registers that the DynamicCheckMaps builtin can clobber.
Descriptor descriptor;
RegList registers = descriptor.allocatable_registers();
// FLAG_debug_code is enabled CSA checks will call C function and so we need
// to save all CallerSaved registers too.
if (FLAG_debug_code) registers |= kJSCallerSaved;
__ MaybeSaveRegisters(registers);
// Load the immediate arguments from the deopt exit to pass to the builtin.
Register slot_arg = descriptor.GetRegisterParameter(Descriptor::kSlot);
Register handler_arg = descriptor.GetRegisterParameter(Descriptor::kHandler);
__ LoadU64(handler_arg,
MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
__ LoadU64(
slot_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
__ LoadU64(
handler_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
__ Call(builtin_target, RelocInfo::CODE_TARGET);
Label deopt, bailout;
__ cmpi(r3, Operand(static_cast<int>(DynamicCheckMapsStatus::kSuccess)));
__ bne(&deopt);
__ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
__ Ret();
__ bind(&deopt);
__ cmpi(r3, Operand(static_cast<int>(DynamicCheckMapsStatus::kBailout)));
__ beq(&bailout);
if (FLAG_debug_code) {
__ cmpi(r3, Operand(static_cast<int>(DynamicCheckMapsStatus::kDeopt)));
__ Assert(eq, AbortReason::kUnexpectedDynamicCheckMapsStatus);
}
__ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_eager = masm->isolate()->builtins()->code_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
__ Jump(deopt_eager, RelocInfo::CODE_TARGET);
__ bind(&bailout);
__ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_bailout = masm->isolate()->builtins()->code_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
__ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
}
#undef __
} // namespace internal
} // namespace v8
......
......@@ -3849,10 +3849,6 @@ void Builtins::Generate_DeoptimizationEntry_Soft(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kSoft);
}
void Builtins::Generate_DeoptimizationEntry_Bailout(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kBailout);
}
void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
}
......@@ -3884,76 +3880,6 @@ void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
Generate_BaselineOrInterpreterEntry(masm, false, true);
}
void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
Generate_DynamicCheckMapsTrampoline<DynamicCheckMapsDescriptor>(
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMaps));
}
void Builtins::Generate_DynamicCheckMapsWithFeedbackVectorTrampoline(
MacroAssembler* masm) {
Generate_DynamicCheckMapsTrampoline<
DynamicCheckMapsWithFeedbackVectorDescriptor>(
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMapsWithFeedbackVector));
}
template <class Descriptor>
void Builtins::Generate_DynamicCheckMapsTrampoline(
MacroAssembler* masm, Handle<Code> builtin_target) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
// Only save the registers that the DynamicCheckMaps builtin can clobber.
Descriptor descriptor;
RegList registers = descriptor.allocatable_registers();
// FLAG_debug_code is enabled CSA checks will call C function and so we need
// to save all CallerSaved registers too.
if (FLAG_debug_code) registers |= kJSCallerSaved;
__ MaybeSaveRegisters(registers);
// Load the immediate arguments from the deopt exit to pass to the builtin.
Register slot_arg = descriptor.GetRegisterParameter(Descriptor::kSlot);
Register handler_arg = descriptor.GetRegisterParameter(Descriptor::kHandler);
__ LoadU64(handler_arg,
MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
__ LoadU64(
slot_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
__ LoadU64(
handler_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
__ Call(builtin_target, RelocInfo::CODE_TARGET);
Label deopt, bailout;
__ CmpS64(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kSuccess)));
__ bne(&deopt);
__ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
__ Ret();
__ bind(&deopt);
__ CmpS64(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kBailout)));
__ beq(&bailout);
if (FLAG_debug_code) {
__ CmpS64(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kDeopt)));
__ Assert(eq, AbortReason::kUnexpectedDynamicCheckMapsStatus);
}
__ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_eager = masm->isolate()->builtins()->code_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
__ Jump(deopt_eager, RelocInfo::CODE_TARGET);
__ bind(&bailout);
__ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_bailout = masm->isolate()->builtins()->code_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
__ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
}
#undef __
} // namespace internal
......
......@@ -41,18 +41,6 @@ constexpr auto WriteBarrierDescriptor::registers() {
return RegisterArray(r4, r8, r7, r5, r3, r6, kContextRegister);
}
// static
constexpr auto DynamicCheckMapsDescriptor::registers() {
STATIC_ASSERT(kReturnRegister0 == r3);
return RegisterArray(r3, r4, r5, r6, cp);
}
// static
constexpr auto DynamicCheckMapsWithFeedbackVectorDescriptor::registers() {
STATIC_ASSERT(kReturnRegister0 == r3);
return RegisterArray(r3, r4, r5, r6, cp);
}
// static
constexpr Register LoadDescriptor::ReceiverRegister() { return r4; }
// static
......
......@@ -3710,11 +3710,6 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
(kind == DeoptimizeKind::kLazy)
? Deoptimizer::kLazyDeoptExitSize
: Deoptimizer::kNonLazyDeoptExitSize);
if (kind == DeoptimizeKind::kEagerWithResume) {
b(ret);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeBeforeArgsSize);
}
}
void TurboAssembler::ZeroExtByte(Register dst, Register src) {
......
......@@ -41,18 +41,6 @@ constexpr auto WriteBarrierDescriptor::registers() {
return RegisterArray(r3, r7, r6, r4, r2, r5, kContextRegister);
}
// static
constexpr auto DynamicCheckMapsDescriptor::registers() {
STATIC_ASSERT(kReturnRegister0 == r2);
return RegisterArray(r2, r3, r4, r5, cp);
}
// static
constexpr auto DynamicCheckMapsWithFeedbackVectorDescriptor::registers() {
STATIC_ASSERT(kReturnRegister0 == r2);
return RegisterArray(r2, r3, r4, r5, cp);
}
// static
constexpr Register LoadDescriptor::ReceiverRegister() { return r3; }
// static
......
......@@ -4829,11 +4829,6 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
(kind == DeoptimizeKind::kLazy)
? Deoptimizer::kLazyDeoptExitSize
: Deoptimizer::kNonLazyDeoptExitSize);
if (kind == DeoptimizeKind::kEagerWithResume) {
bc_long(Condition::al, ret);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeBeforeArgsSize);
}
}
void TurboAssembler::Trap() { stop(); }
......
......@@ -17,18 +17,11 @@ namespace internal {
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Eager);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Bailout);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 4 * kInstrSize;
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = kInstrSize;
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset =
kInstrSize + kSystemPointerSize;
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
float float_val = static_cast<float>(double_registers_[n].get_scalar());
......
......@@ -17,18 +17,11 @@ namespace internal {
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Eager);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Bailout);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 6 + 2;
const int Deoptimizer::kLazyDeoptExitSize = 6 + 2;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 6 + 2 + 6;
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = 6;
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset =
6 + kSystemPointerSize;
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
return Float32::FromBits(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment