Commit 0a110021 authored by Leszek Swirski's avatar Leszek Swirski Committed by V8 LUCI CQ

[deoptimizer] Remove non-fixed-size deopts

All architectures have kSupportsFixedDeoptExitSizes = true, so we can
remove kSupportsFixedDeoptExitSizes entirely and always have fixed-size
deopts.

Change-Id: Ib696f6d2431f60677cc7fa2193ee27b9b0f80bc8
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3550268Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79654}
parent 0bfd4cc1
......@@ -3356,17 +3356,16 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
static constexpr int kSavedRegistersAreaSize =
(kNumberOfRegisters * kPointerSize) + kDoubleRegsSize;
__ mov(r2, Operand(Deoptimizer::kFixedExitSizeMarker));
// Get the address of the location in the code object (r3) (return
// address for lazy deoptimization) and compute the fp-to-sp delta in
// register r4.
__ mov(r3, lr);
__ add(r4, sp, Operand(kSavedRegistersAreaSize));
__ sub(r4, fp, r4);
__ mov(r2, lr);
__ add(r3, sp, Operand(kSavedRegistersAreaSize));
__ sub(r3, fp, r3);
// Allocate a new deoptimizer object.
// Pass four arguments in r0 to r3 and fifth argument on stack.
__ PrepareCallCFunction(6);
__ PrepareCallCFunction(5);
__ mov(r0, Operand(0));
Label context_check;
__ ldr(r1, MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
......@@ -3374,15 +3373,14 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
__ ldr(r0, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
__ bind(&context_check);
__ mov(r1, Operand(static_cast<int>(deopt_kind)));
// r2: bailout id already loaded.
// r3: code address or 0 already loaded.
__ str(r4, MemOperand(sp, 0 * kPointerSize)); // Fp-to-sp delta.
__ Move(r5, ExternalReference::isolate_address(isolate));
__ str(r5, MemOperand(sp, 1 * kPointerSize)); // Isolate.
// r2: code address or 0 already loaded.
// r3: Fp-to-sp delta already loaded.
__ Move(r4, ExternalReference::isolate_address(isolate));
__ str(r4, MemOperand(sp, 0 * kPointerSize)); // Isolate.
// Call Deoptimizer::New().
{
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(ExternalReference::new_deoptimizer_function(), 6);
__ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
}
// Preserve "deoptimizer" object in register r0 and get the input
......
......@@ -3896,10 +3896,8 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
// Floating point registers are saved on the stack above core registers.
const int kDoubleRegistersOffset = saved_registers.Count() * kXRegSize;
Register bailout_id = x2;
Register code_object = x3;
Register fp_to_sp = x4;
__ Mov(bailout_id, Deoptimizer::kFixedExitSizeMarker);
Register code_object = x2;
Register fp_to_sp = x3;
// Get the address of the location in the code object. This is the return
// address for lazy deoptimization.
__ Mov(code_object, lr);
......@@ -3920,15 +3918,14 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
__ Mov(x1, static_cast<int>(deopt_kind));
// Following arguments are already loaded:
// - x2: bailout id
// - x3: code object address
// - x4: fp-to-sp delta
__ Mov(x5, ExternalReference::isolate_address(isolate));
// - x2: code object address
// - x3: fp-to-sp delta
__ Mov(x4, ExternalReference::isolate_address(isolate));
{
// Call Deoptimizer::New().
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(ExternalReference::new_deoptimizer_function(), 6);
__ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
}
// Preserve "deoptimizer" object in register x0.
......
......@@ -4008,7 +4008,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
__ neg(edx);
// Allocate a new deoptimizer object.
__ PrepareCallCFunction(6, eax);
__ PrepareCallCFunction(5, eax);
__ mov(eax, Immediate(0));
Label context_check;
__ mov(edi, Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset));
......@@ -4018,15 +4018,13 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
__ mov(Operand(esp, 0 * kSystemPointerSize), eax); // Function.
__ mov(Operand(esp, 1 * kSystemPointerSize),
Immediate(static_cast<int>(deopt_kind)));
__ mov(Operand(esp, 2 * kSystemPointerSize),
Immediate(Deoptimizer::kFixedExitSizeMarker)); // Bailout id.
__ mov(Operand(esp, 3 * kSystemPointerSize), ecx); // Code address or 0.
__ mov(Operand(esp, 4 * kSystemPointerSize), edx); // Fp-to-sp delta.
__ Move(Operand(esp, 5 * kSystemPointerSize),
__ mov(Operand(esp, 2 * kSystemPointerSize), ecx); // Code address or 0.
__ mov(Operand(esp, 3 * kSystemPointerSize), edx); // Fp-to-sp delta.
__ Move(Operand(esp, 4 * kSystemPointerSize),
Immediate(ExternalReference::isolate_address(masm->isolate())));
{
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(ExternalReference::new_deoptimizer_function(), 6);
__ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
}
// Preserve deoptimizer object in register eax and get the input
......
......@@ -4816,22 +4816,16 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate),
rbp);
// We use this to keep the value of the fifth argument temporarily.
// Unfortunately we can't store it directly in r8 (used for passing
// this on linux), since it is another parameter passing register on windows.
Register arg5 = r15;
__ Move(arg_reg_3, Deoptimizer::kFixedExitSizeMarker);
// Get the address of the location in the code object
// and compute the fp-to-sp delta in register arg5.
__ movq(arg_reg_4, Operand(rsp, kCurrentOffsetToReturnAddress));
__ movq(arg_reg_3, Operand(rsp, kCurrentOffsetToReturnAddress));
// Load the fp-to-sp-delta.
__ leaq(arg5, Operand(rsp, kCurrentOffsetToParentSP));
__ subq(arg5, rbp);
__ negq(arg5);
__ leaq(arg_reg_4, Operand(rsp, kCurrentOffsetToParentSP));
__ subq(arg_reg_4, rbp);
__ negq(arg_reg_4);
// Allocate a new deoptimizer object.
__ PrepareCallCFunction(6);
__ PrepareCallCFunction(5);
__ Move(rax, 0);
Label context_check;
__ movq(rdi, Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset));
......@@ -4843,19 +4837,19 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
// Args 3 and 4 are already in the right registers.
// On windows put the arguments on the stack (PrepareCallCFunction
// has created space for this). On linux pass the arguments in r8 and r9.
// has created space for this). On linux pass the arguments in r8.
#ifdef V8_TARGET_OS_WIN
__ movq(Operand(rsp, 4 * kSystemPointerSize), arg5);
Register arg5 = r15;
__ LoadAddress(arg5, ExternalReference::isolate_address(isolate));
__ movq(Operand(rsp, 5 * kSystemPointerSize), arg5);
__ movq(Operand(rsp, 4 * kSystemPointerSize), arg5);
#else
__ movq(r8, arg5);
__ LoadAddress(r9, ExternalReference::isolate_address(isolate));
// r8 is arg_reg_5 on Linux
__ LoadAddress(r8, ExternalReference::isolate_address(isolate));
#endif
{
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(ExternalReference::new_deoptimizer_function(), 6);
__ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
}
// Preserve deoptimizer object in register rax and get the input
// frame descriptor pointer.
......
......@@ -327,17 +327,13 @@ void CodeGenerator::AssembleCode() {
// emitted before emitting the deoptimization exits.
PrepareForDeoptimizationExits(&deoptimization_exits_);
if (Deoptimizer::kSupportsFixedDeoptExitSizes) {
deopt_exit_start_offset_ = tasm()->pc_offset();
}
deopt_exit_start_offset_ = tasm()->pc_offset();
// Assemble deoptimization exits.
offsets_info_.deoptimization_exits = tasm()->pc_offset();
int last_updated = 0;
// We sort the deoptimization exits here so that the lazy ones will
// be visited second last, and eagerwithresume last. We need this as on
// architectures where Deoptimizer::kSupportsFixedDeoptExitSizes is true,
// lazy deopts and eagerwithresume might need additional instructions.
// We sort the deoptimization exits here so that the lazy ones will be visited
// last. We need this as lazy deopts might need additional instructions.
auto cmp = [](const DeoptimizationExit* a, const DeoptimizationExit* b) {
// The deoptimization exits are sorted so that lazy deopt exits appear after
// eager deopts.
......@@ -349,9 +345,7 @@ void CodeGenerator::AssembleCode() {
}
return a->pc_offset() < b->pc_offset();
};
if (Deoptimizer::kSupportsFixedDeoptExitSizes) {
std::sort(deoptimization_exits_.begin(), deoptimization_exits_.end(), cmp);
}
std::sort(deoptimization_exits_.begin(), deoptimization_exits_.end(), cmp);
{
#ifdef V8_TARGET_ARCH_PPC64
......@@ -360,9 +354,7 @@ void CodeGenerator::AssembleCode() {
#endif
for (DeoptimizationExit* exit : deoptimization_exits_) {
if (exit->emitted()) continue;
if (Deoptimizer::kSupportsFixedDeoptExitSizes) {
exit->set_deoptimization_id(next_deoptimization_id_++);
}
exit->set_deoptimization_id(next_deoptimization_id_++);
result_ = AssembleDeoptimizerCall(exit);
if (result_ != kSuccess) return;
......@@ -1144,9 +1136,6 @@ DeoptimizationExit* CodeGenerator::BuildTranslation(
#else // DEBUG
0);
#endif // DEBUG
if (!Deoptimizer::kSupportsFixedDeoptExitSizes) {
exit->set_deoptimization_id(next_deoptimization_id_++);
}
if (immediate_args_count != 0) {
auto immediate_args = zone()->New<ZoneVector<ImmediateOperand*>>(zone());
InstructionOperandIterator imm_iter(
......
......@@ -4247,7 +4247,6 @@ void CodeGenerator::PrepareForDeoptimizationExits(
}
__ CheckTrampolinePoolQuick(total_size);
DCHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
}
void CodeGenerator::AssembleMove(InstructionOperand* source,
......
......@@ -4079,7 +4079,6 @@ void CodeGenerator::PrepareForDeoptimizationExits(
}
__ CheckTrampolinePoolQuick(total_size);
DCHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
}
void CodeGenerator::AssembleMove(InstructionOperand* source,
......
......@@ -19,7 +19,6 @@ ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
......
......@@ -9,7 +9,6 @@
namespace v8 {
namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = kInstrSize;
#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
......
......@@ -193,11 +193,11 @@ Code Deoptimizer::FindDeoptimizingCode(Address addr) {
// We rely on this function not causing a GC. It is called from generated code
// without having a real stack frame in place.
Deoptimizer* Deoptimizer::New(Address raw_function, DeoptimizeKind kind,
unsigned deopt_exit_index, Address from,
int fp_to_sp_delta, Isolate* isolate) {
Address from, int fp_to_sp_delta,
Isolate* isolate) {
JSFunction function = JSFunction::cast(Object(raw_function));
Deoptimizer* deoptimizer = new Deoptimizer(
isolate, function, kind, deopt_exit_index, from, fp_to_sp_delta);
Deoptimizer* deoptimizer =
new Deoptimizer(isolate, function, kind, from, fp_to_sp_delta);
isolate->set_current_deoptimizer(deoptimizer);
return deoptimizer;
}
......@@ -472,11 +472,10 @@ const char* Deoptimizer::MessageFor(DeoptimizeKind kind) {
}
Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction function,
DeoptimizeKind kind, unsigned deopt_exit_index,
Address from, int fp_to_sp_delta)
DeoptimizeKind kind, Address from, int fp_to_sp_delta)
: isolate_(isolate),
function_(function),
deopt_exit_index_(deopt_exit_index),
deopt_exit_index_(kFixedExitSizeMarker),
deopt_kind_(kind),
from_(from),
fp_to_sp_delta_(fp_to_sp_delta),
......@@ -500,9 +499,6 @@ Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction function,
deoptimizing_throw_ = true;
}
DCHECK(deopt_exit_index_ == kFixedExitSizeMarker ||
deopt_exit_index_ < kMaxNumberOfEntries);
DCHECK_NE(from, kNullAddress);
compiled_code_ = FindOptimizedCode();
DCHECK(!compiled_code_.is_null());
......@@ -528,38 +524,35 @@ Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction function,
function.shared().internal_formal_parameter_count_with_receiver();
input_ = new (size) FrameDescription(size, parameter_count);
if (kSupportsFixedDeoptExitSizes) {
DCHECK_EQ(deopt_exit_index_, kFixedExitSizeMarker);
// Calculate the deopt exit index from return address.
DCHECK_GT(kNonLazyDeoptExitSize, 0);
DCHECK_GT(kLazyDeoptExitSize, 0);
DeoptimizationData deopt_data =
DeoptimizationData::cast(compiled_code_.deoptimization_data());
Address deopt_start = compiled_code_.raw_instruction_start() +
deopt_data.DeoptExitStart().value();
int non_lazy_deopt_count = deopt_data.NonLazyDeoptCount().value();
Address lazy_deopt_start =
deopt_start + non_lazy_deopt_count * kNonLazyDeoptExitSize;
// The deoptimization exits are sorted so that lazy deopt exits appear after
// eager deopts.
static_assert(static_cast<int>(DeoptimizeKind::kLazy) ==
static_cast<int>(kLastDeoptimizeKind),
"lazy deopts are expected to be emitted last");
// from_ is the value of the link register after the call to the
// deoptimizer, so for the last lazy deopt, from_ points to the first
// non-lazy deopt, so we use <=, similarly for the last non-lazy deopt and
// the first deopt with resume entry.
if (from_ <= lazy_deopt_start) {
int offset =
static_cast<int>(from_ - kNonLazyDeoptExitSize - deopt_start);
DCHECK_EQ(0, offset % kNonLazyDeoptExitSize);
deopt_exit_index_ = offset / kNonLazyDeoptExitSize;
} else {
int offset =
static_cast<int>(from_ - kLazyDeoptExitSize - lazy_deopt_start);
DCHECK_EQ(0, offset % kLazyDeoptExitSize);
deopt_exit_index_ = non_lazy_deopt_count + (offset / kLazyDeoptExitSize);
}
DCHECK_EQ(deopt_exit_index_, kFixedExitSizeMarker);
// Calculate the deopt exit index from return address.
DCHECK_GT(kNonLazyDeoptExitSize, 0);
DCHECK_GT(kLazyDeoptExitSize, 0);
DeoptimizationData deopt_data =
DeoptimizationData::cast(compiled_code_.deoptimization_data());
Address deopt_start = compiled_code_.raw_instruction_start() +
deopt_data.DeoptExitStart().value();
int non_lazy_deopt_count = deopt_data.NonLazyDeoptCount().value();
Address lazy_deopt_start =
deopt_start + non_lazy_deopt_count * kNonLazyDeoptExitSize;
// The deoptimization exits are sorted so that lazy deopt exits appear after
// eager deopts.
static_assert(static_cast<int>(DeoptimizeKind::kLazy) ==
static_cast<int>(kLastDeoptimizeKind),
"lazy deopts are expected to be emitted last");
// from_ is the value of the link register after the call to the
// deoptimizer, so for the last lazy deopt, from_ points to the first
// non-lazy deopt, so we use <=, similarly for the last non-lazy deopt and
// the first deopt with resume entry.
if (from_ <= lazy_deopt_start) {
int offset = static_cast<int>(from_ - kNonLazyDeoptExitSize - deopt_start);
DCHECK_EQ(0, offset % kNonLazyDeoptExitSize);
deopt_exit_index_ = offset / kNonLazyDeoptExitSize;
} else {
int offset =
static_cast<int>(from_ - kLazyDeoptExitSize - lazy_deopt_start);
DCHECK_EQ(0, offset % kLazyDeoptExitSize);
deopt_exit_index_ = non_lazy_deopt_count + (offset / kLazyDeoptExitSize);
}
}
......
......@@ -56,8 +56,7 @@ class Deoptimizer : public Malloced {
DeoptimizeKind deopt_kind() const { return deopt_kind_; }
static Deoptimizer* New(Address raw_function, DeoptimizeKind kind,
unsigned deopt_exit_index, Address from,
int fp_to_sp_delta, Isolate* isolate);
Address from, int fp_to_sp_delta, Isolate* isolate);
static Deoptimizer* Grab(Isolate* isolate);
// The returned object with information on the optimized frame needs to be
......@@ -118,20 +117,11 @@ class Deoptimizer : public Malloced {
static constexpr int kMaxNumberOfEntries = 16384;
// This marker is passed to Deoptimizer::New as {deopt_exit_index} on
// platforms that have fixed deopt sizes (see also
// kSupportsFixedDeoptExitSizes). The actual deoptimization id is then
// platforms that have fixed deopt sizes. The actual deoptimization id is then
// calculated from the return address.
static constexpr unsigned kFixedExitSizeMarker = kMaxUInt32;
// Set to true when the architecture supports deoptimization exit sequences
// of a fixed size, that can be sorted so that the deoptimization index is
// deduced from the address of the deoptimization exit.
// TODO(jgruber): Remove this, and support for variable deopt exit sizes,
// once all architectures use fixed exit sizes.
V8_EXPORT_PRIVATE static const bool kSupportsFixedDeoptExitSizes;
// Size of deoptimization exit sequence. This is only meaningful when
// kSupportsFixedDeoptExitSizes is true.
// Size of deoptimization exit sequence.
V8_EXPORT_PRIVATE static const int kNonLazyDeoptExitSize;
V8_EXPORT_PRIVATE static const int kLazyDeoptExitSize;
......@@ -145,7 +135,7 @@ class Deoptimizer : public Malloced {
const TranslatedFrame::iterator& iterator);
Deoptimizer(Isolate* isolate, JSFunction function, DeoptimizeKind kind,
unsigned deopt_exit_index, Address from, int fp_to_sp_delta);
Address from, int fp_to_sp_delta);
Code FindOptimizedCode();
void DeleteFrameDescriptions();
......
......@@ -9,7 +9,6 @@
namespace v8 {
namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 5;
const int Deoptimizer::kLazyDeoptExitSize = 5;
......
......@@ -7,7 +7,6 @@
namespace v8 {
namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
......
......@@ -7,7 +7,6 @@
namespace v8 {
namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 3 * kInstrSize;
......
......@@ -7,7 +7,6 @@
namespace v8 {
namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 3 * kInstrSize;
......
......@@ -19,7 +19,6 @@ ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 3 * kInstrSize;
......
......@@ -7,7 +7,6 @@
namespace v8 {
namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
......
......@@ -19,7 +19,6 @@ ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 6 + 2;
const int Deoptimizer::kLazyDeoptExitSize = 6 + 2;
......
......@@ -21,7 +21,6 @@ ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 4;
const int Deoptimizer::kLazyDeoptExitSize = 4;
......
......@@ -1522,8 +1522,6 @@ TEST(Regress621926) {
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
v8::internal::byte buffer[256];
......
......@@ -311,8 +311,6 @@ TEST(ReplaceLane) {
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
......
......@@ -94,8 +94,6 @@ TEST(EmbeddedObj) {
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
......
......@@ -2879,8 +2879,6 @@ TEST(Popcnt) {
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
......
......@@ -1337,8 +1337,6 @@ TEST(macro_float_minmax_f64) {
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
......
......@@ -1690,8 +1690,6 @@ TEST(macro_float_minmax_f64) {
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
......
......@@ -1519,8 +1519,6 @@ TEST(Move) {
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
......
......@@ -1050,8 +1050,6 @@ TEST(AreAliased) {
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment