Commit 56b55f3f authored by Michael Achenbach's avatar Michael Achenbach Committed by Commit Bot

Revert "[TurboProp] Avoid marking the output of a call live in its catch handler"

This reverts commit cdc8d9a5.

Reason for revert: The regression test is too slow:
https://ci.chromium.org/p/v8/builders/ci/V8%20Linux%20-%20gc%20stress/30454

Also gcc failures:
https://ci.chromium.org/p/v8/builders/ci/V8%20Linux64%20gcc%20-%20debug/9528

Original change's description:
> [TurboProp] Avoid marking the output of a call live in its catch handler
>
> The output of a call won't be live if an exception is thrown while the
> call is on the stack and we unwind to a catch handler.
>
> BUG=chromium:1138075,v8:9684
>
> Change-Id: I95bf535bac388940869eb213e25565d64fe96df1
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2476317
> Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
> Reviewed-by: Georg Neis <neis@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#70562}

TBR=rmcilroy@chromium.org,neis@chromium.org

Change-Id: I0f6b9378d516a70401fc429fb3612bbf962b0fb2
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: chromium:1138075
Bug: v8:9684
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2479007Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70564}
parent 8557840b
...@@ -755,7 +755,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -755,7 +755,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
__ CallCodeObject(reg); __ CallCodeObject(reg);
} }
...@@ -797,7 +797,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -797,7 +797,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
__ JumpCodeObject(reg); __ JumpCodeObject(reg);
} }
...@@ -825,7 +825,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -825,7 +825,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
CHECK(!instr->InputAt(0)->IsImmediate()); CHECK(!instr->InputAt(0)->IsImmediate());
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
__ Jump(reg); __ Jump(reg);
unwinding_info_writer_.MarkBlockWillExit(); unwinding_info_writer_.MarkBlockWillExit();
......
...@@ -691,7 +691,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -691,7 +691,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
__ CallCodeObject(reg); __ CallCodeObject(reg);
} }
...@@ -732,7 +732,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -732,7 +732,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
__ JumpCodeObject(reg); __ JumpCodeObject(reg);
} }
...@@ -762,7 +762,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -762,7 +762,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
CHECK(!instr->InputAt(0)->IsImmediate()); CHECK(!instr->InputAt(0)->IsImmediate());
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
UseScratchRegisterScope temps(tasm()); UseScratchRegisterScope temps(tasm());
temps.Exclude(x17); temps.Exclude(x17);
......
...@@ -257,6 +257,17 @@ class OutOfLineCode : public ZoneObject { ...@@ -257,6 +257,17 @@ class OutOfLineCode : public ZoneObject {
OutOfLineCode* const next_; OutOfLineCode* const next_;
}; };
inline bool HasCallDescriptorFlag(Instruction* instr,
CallDescriptor::Flag flag) {
STATIC_ASSERT(CallDescriptor::kFlagsBitsEncodedInInstructionCode == 10);
#ifdef DEBUG
static constexpr int kInstructionCodeFlagsMask =
((1 << CallDescriptor::kFlagsBitsEncodedInInstructionCode) - 1);
DCHECK_EQ(static_cast<int>(flag) & kInstructionCodeFlagsMask, flag);
#endif
return MiscField::decode(instr->opcode()) & flag;
}
} // namespace compiler } // namespace compiler
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -974,12 +974,12 @@ Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) { ...@@ -974,12 +974,12 @@ Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
void CodeGenerator::RecordCallPosition(Instruction* instr) { void CodeGenerator::RecordCallPosition(Instruction* instr) {
const bool needs_frame_state = const bool needs_frame_state =
instr->HasCallDescriptorFlag(CallDescriptor::kNeedsFrameState); HasCallDescriptorFlag(instr, CallDescriptor::kNeedsFrameState);
RecordSafepoint(instr->reference_map(), needs_frame_state RecordSafepoint(instr->reference_map(), needs_frame_state
? Safepoint::kLazyDeopt ? Safepoint::kLazyDeopt
: Safepoint::kNoLazyDeopt); : Safepoint::kNoLazyDeopt);
if (instr->HasCallDescriptorFlag(CallDescriptor::kHasExceptionHandler)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kHasExceptionHandler)) {
InstructionOperandConverter i(this, instr); InstructionOperandConverter i(this, instr);
RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1); RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
DCHECK(instructions()->InstructionBlockAt(handler_rpo)->IsHandler()); DCHECK(instructions()->InstructionBlockAt(handler_rpo)->IsHandler());
......
...@@ -695,10 +695,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -695,10 +695,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
__ LoadCodeObjectEntry(reg, reg); __ LoadCodeObjectEntry(reg, reg);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineCall(reg); __ RetpolineCall(reg);
} else { } else {
__ call(reg); __ call(reg);
...@@ -723,7 +723,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -723,7 +723,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
if (DetermineStubCallMode() == StubCallMode::kCallWasmRuntimeStub) { if (DetermineStubCallMode() == StubCallMode::kCallWasmRuntimeStub) {
__ wasm_call(wasm_code, constant.rmode()); __ wasm_call(wasm_code, constant.rmode());
} else { } else {
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineCall(wasm_code, constant.rmode()); __ RetpolineCall(wasm_code, constant.rmode());
} else { } else {
__ call(wasm_code, constant.rmode()); __ call(wasm_code, constant.rmode());
...@@ -731,7 +731,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -731,7 +731,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineCall(reg); __ RetpolineCall(reg);
} else { } else {
__ call(reg); __ call(reg);
...@@ -753,10 +753,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -753,10 +753,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
__ LoadCodeObjectEntry(reg, reg); __ LoadCodeObjectEntry(reg, reg);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineJump(reg); __ RetpolineJump(reg);
} else { } else {
__ jmp(reg); __ jmp(reg);
...@@ -773,7 +773,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -773,7 +773,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ jmp(wasm_code, constant.rmode()); __ jmp(wasm_code, constant.rmode());
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineJump(reg); __ RetpolineJump(reg);
} else { } else {
__ jmp(reg); __ jmp(reg);
...@@ -787,9 +787,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -787,9 +787,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
CHECK(!HasImmediateInput(instr, 0)); CHECK(!HasImmediateInput(instr, 0));
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineJump(reg); __ RetpolineJump(reg);
} else { } else {
__ jmp(reg); __ jmp(reg);
......
This diff is collapsed.
...@@ -2731,7 +2731,6 @@ constexpr InstructionCode EncodeCallDescriptorFlags( ...@@ -2731,7 +2731,6 @@ constexpr InstructionCode EncodeCallDescriptorFlags(
// Note: Not all bits of `flags` are preserved. // Note: Not all bits of `flags` are preserved.
STATIC_ASSERT(CallDescriptor::kFlagsBitsEncodedInInstructionCode == STATIC_ASSERT(CallDescriptor::kFlagsBitsEncodedInInstructionCode ==
MiscField::kSize); MiscField::kSize);
DCHECK(Instruction::IsCallWithDescriptorFlags(opcode));
return opcode | MiscField::encode(flags & MiscField::kMax); return opcode | MiscField::encode(flags & MiscField::kMax);
} }
......
...@@ -927,23 +927,6 @@ class V8_EXPORT_PRIVATE Instruction final { ...@@ -927,23 +927,6 @@ class V8_EXPORT_PRIVATE Instruction final {
return arch_opcode() == ArchOpcode::kArchThrowTerminator; return arch_opcode() == ArchOpcode::kArchThrowTerminator;
} }
static constexpr bool IsCallWithDescriptorFlags(InstructionCode arch_opcode) {
return arch_opcode <= ArchOpcode::kArchCallBuiltinPointer;
}
bool IsCallWithDescriptorFlags() const {
return IsCallWithDescriptorFlags(arch_opcode());
}
bool HasCallDescriptorFlag(CallDescriptor::Flag flag) const {
DCHECK(IsCallWithDescriptorFlags());
STATIC_ASSERT(CallDescriptor::kFlagsBitsEncodedInInstructionCode == 10);
#ifdef DEBUG
static constexpr int kInstructionCodeFlagsMask =
((1 << CallDescriptor::kFlagsBitsEncodedInInstructionCode) - 1);
DCHECK_EQ(static_cast<int>(flag) & kInstructionCodeFlagsMask, flag);
#endif
return MiscField::decode(opcode()) & flag;
}
enum GapPosition { enum GapPosition {
START, START,
END, END,
......
...@@ -168,7 +168,8 @@ const InstructionBlock* MidTierRegisterAllocationData::GetBlock( ...@@ -168,7 +168,8 @@ const InstructionBlock* MidTierRegisterAllocationData::GetBlock(
} }
const BitVector* MidTierRegisterAllocationData::GetBlocksDominatedBy( const BitVector* MidTierRegisterAllocationData::GetBlocksDominatedBy(
const InstructionBlock* block) { int instr_index) {
const InstructionBlock* block = GetBlock(instr_index);
return block_state(block->rpo_number()).dominated_blocks(); return block_state(block->rpo_number()).dominated_blocks();
} }
...@@ -282,12 +283,10 @@ class VirtualRegisterData final { ...@@ -282,12 +283,10 @@ class VirtualRegisterData final {
// Define VirtualRegisterData with the type of output that produces this // Define VirtualRegisterData with the type of output that produces this
// virtual register. // virtual register.
void DefineAsUnallocatedOperand(int virtual_register, int instr_index, void DefineAsUnallocatedOperand(int virtual_register, int instr_index,
bool is_deferred_block, bool is_deferred_block);
bool is_exceptional_call_output);
void DefineAsFixedSpillOperand(AllocatedOperand* operand, void DefineAsFixedSpillOperand(AllocatedOperand* operand,
int virtual_register, int instr_index, int virtual_register, int instr_index,
bool is_deferred_block, bool is_deferred_block);
bool is_exceptional_call_output);
void DefineAsConstantOperand(ConstantOperand* operand, int instr_index, void DefineAsConstantOperand(ConstantOperand* operand, int instr_index,
bool is_deferred_block); bool is_deferred_block);
void DefineAsPhi(int virtual_register, int instr_index, void DefineAsPhi(int virtual_register, int instr_index,
...@@ -365,9 +364,6 @@ class VirtualRegisterData final { ...@@ -365,9 +364,6 @@ class VirtualRegisterData final {
bool is_defined_in_deferred_block() const { bool is_defined_in_deferred_block() const {
return is_defined_in_deferred_block_; return is_defined_in_deferred_block_;
} }
bool is_exceptional_call_output() const {
return is_exceptional_call_output_;
}
struct DeferredSpillSlotOutput { struct DeferredSpillSlotOutput {
public: public:
...@@ -385,11 +381,9 @@ class VirtualRegisterData final { ...@@ -385,11 +381,9 @@ class VirtualRegisterData final {
class SpillRange : public ZoneObject { class SpillRange : public ZoneObject {
public: public:
// Defines a spill range for an output operand. // Defines a spill range for an output operand.
SpillRange(int definition_instr_index, SpillRange(int definition_instr_index, MidTierRegisterAllocationData* data)
const InstructionBlock* definition_block,
MidTierRegisterAllocationData* data)
: live_range_(definition_instr_index, definition_instr_index), : live_range_(definition_instr_index, definition_instr_index),
live_blocks_(data->GetBlocksDominatedBy(definition_block)), live_blocks_(data->GetBlocksDominatedBy(definition_instr_index)),
deferred_spill_outputs_(nullptr) {} deferred_spill_outputs_(nullptr) {}
// Defines a spill range for a Phi variable. // Defines a spill range for a Phi variable.
...@@ -397,7 +391,8 @@ class VirtualRegisterData final { ...@@ -397,7 +391,8 @@ class VirtualRegisterData final {
MidTierRegisterAllocationData* data) MidTierRegisterAllocationData* data)
: live_range_(phi_block->first_instruction_index(), : live_range_(phi_block->first_instruction_index(),
phi_block->first_instruction_index()), phi_block->first_instruction_index()),
live_blocks_(data->GetBlocksDominatedBy(phi_block)), live_blocks_(
data->GetBlocksDominatedBy(phi_block->first_instruction_index())),
deferred_spill_outputs_(nullptr) { deferred_spill_outputs_(nullptr) {
// For phis, add the gap move instructions in the predecssor blocks to // For phis, add the gap move instructions in the predecssor blocks to
// the live range. // the live range.
...@@ -474,8 +469,7 @@ class VirtualRegisterData final { ...@@ -474,8 +469,7 @@ class VirtualRegisterData final {
private: private:
void Initialize(int virtual_register, InstructionOperand* spill_operand, void Initialize(int virtual_register, InstructionOperand* spill_operand,
int instr_index, bool is_phi, bool is_constant, int instr_index, bool is_phi, bool is_constant,
bool is_defined_in_deferred_block, bool is_defined_in_deferred_block);
bool is_exceptional_call_output);
void AddSpillUse(int instr_index, MidTierRegisterAllocationData* data); void AddSpillUse(int instr_index, MidTierRegisterAllocationData* data);
void AddPendingSpillOperand(PendingOperand* pending_operand); void AddPendingSpillOperand(PendingOperand* pending_operand);
...@@ -491,7 +485,6 @@ class VirtualRegisterData final { ...@@ -491,7 +485,6 @@ class VirtualRegisterData final {
bool is_constant_ : 1; bool is_constant_ : 1;
bool is_defined_in_deferred_block_ : 1; bool is_defined_in_deferred_block_ : 1;
bool needs_spill_at_output_ : 1; bool needs_spill_at_output_ : 1;
bool is_exceptional_call_output_ : 1;
}; };
VirtualRegisterData& MidTierRegisterAllocationData::VirtualRegisterDataFor( VirtualRegisterData& MidTierRegisterAllocationData::VirtualRegisterDataFor(
...@@ -505,8 +498,7 @@ void VirtualRegisterData::Initialize(int virtual_register, ...@@ -505,8 +498,7 @@ void VirtualRegisterData::Initialize(int virtual_register,
InstructionOperand* spill_operand, InstructionOperand* spill_operand,
int instr_index, bool is_phi, int instr_index, bool is_phi,
bool is_constant, bool is_constant,
bool is_defined_in_deferred_block, bool is_defined_in_deferred_block) {
bool is_exceptional_call_output) {
vreg_ = virtual_register; vreg_ = virtual_register;
spill_operand_ = spill_operand; spill_operand_ = spill_operand;
spill_range_ = nullptr; spill_range_ = nullptr;
...@@ -515,34 +507,34 @@ void VirtualRegisterData::Initialize(int virtual_register, ...@@ -515,34 +507,34 @@ void VirtualRegisterData::Initialize(int virtual_register,
is_constant_ = is_constant; is_constant_ = is_constant;
is_defined_in_deferred_block_ = is_defined_in_deferred_block; is_defined_in_deferred_block_ = is_defined_in_deferred_block;
needs_spill_at_output_ = !is_constant_ && spill_operand_ != nullptr; needs_spill_at_output_ = !is_constant_ && spill_operand_ != nullptr;
is_exceptional_call_output_ = is_exceptional_call_output;
} }
void VirtualRegisterData::DefineAsConstantOperand(ConstantOperand* operand, void VirtualRegisterData::DefineAsConstantOperand(ConstantOperand* operand,
int instr_index, int instr_index,
bool is_deferred_block) { bool is_deferred_block) {
Initialize(operand->virtual_register(), operand, instr_index, false, true, Initialize(operand->virtual_register(), operand, instr_index, false, true,
is_deferred_block, false); is_deferred_block);
} }
void VirtualRegisterData::DefineAsFixedSpillOperand( void VirtualRegisterData::DefineAsFixedSpillOperand(AllocatedOperand* operand,
AllocatedOperand* operand, int virtual_register, int instr_index, int virtual_register,
bool is_deferred_block, bool is_exceptional_call_output) { int instr_index,
bool is_deferred_block) {
Initialize(virtual_register, operand, instr_index, false, false, Initialize(virtual_register, operand, instr_index, false, false,
is_deferred_block, is_exceptional_call_output); is_deferred_block);
} }
void VirtualRegisterData::DefineAsUnallocatedOperand( void VirtualRegisterData::DefineAsUnallocatedOperand(int virtual_register,
int virtual_register, int instr_index, bool is_deferred_block, int instr_index,
bool is_exceptional_call_output) { bool is_deferred_block) {
Initialize(virtual_register, nullptr, instr_index, false, false, Initialize(virtual_register, nullptr, instr_index, false, false,
is_deferred_block, is_exceptional_call_output); is_deferred_block);
} }
void VirtualRegisterData::DefineAsPhi(int virtual_register, int instr_index, void VirtualRegisterData::DefineAsPhi(int virtual_register, int instr_index,
bool is_deferred_block) { bool is_deferred_block) {
Initialize(virtual_register, nullptr, instr_index, true, false, Initialize(virtual_register, nullptr, instr_index, true, false,
is_deferred_block, false); is_deferred_block);
} }
void VirtualRegisterData::EnsureSpillRange( void VirtualRegisterData::EnsureSpillRange(
...@@ -550,27 +542,16 @@ void VirtualRegisterData::EnsureSpillRange( ...@@ -550,27 +542,16 @@ void VirtualRegisterData::EnsureSpillRange(
DCHECK(!is_constant()); DCHECK(!is_constant());
if (HasSpillRange()) return; if (HasSpillRange()) return;
const InstructionBlock* definition_block =
data->GetBlock(output_instr_index_);
if (is_phi()) { if (is_phi()) {
// Define a spill slot that is defined for the phi's range. // Define a spill slot that is defined for the phi's range.
const InstructionBlock* definition_block =
data->code()->InstructionAt(output_instr_index_)->block();
spill_range_ = spill_range_ =
data->allocation_zone()->New<SpillRange>(definition_block, data); data->allocation_zone()->New<SpillRange>(definition_block, data);
} else { } else {
if (is_exceptional_call_output()) {
// If this virtual register is output by a call which has an exception
// catch handler, then the output will only be live in the IfSuccess
// successor block, not the IfException side, so make the definition block
// the IfSuccess successor block explicitly.
DCHECK_EQ(output_instr_index_,
definition_block->last_instruction_index() - 1);
DCHECK_EQ(definition_block->SuccessorCount(), 2);
DCHECK(data->GetBlock(definition_block->successors()[1])->IsHandler());
definition_block = data->GetBlock(definition_block->successors()[0]);
}
// The spill slot will be defined after the instruction that outputs it. // The spill slot will be defined after the instruction that outputs it.
spill_range_ = data->allocation_zone()->New<SpillRange>( spill_range_ =
output_instr_index_ + 1, definition_block, data); data->allocation_zone()->New<SpillRange>(output_instr_index_ + 1, data);
} }
data->spilled_virtual_registers().Add(vreg()); data->spilled_virtual_registers().Add(vreg());
} }
...@@ -2594,7 +2575,6 @@ void MidTierOutputProcessor::InitializeBlockState( ...@@ -2594,7 +2575,6 @@ void MidTierOutputProcessor::InitializeBlockState(
void MidTierOutputProcessor::DefineOutputs(const InstructionBlock* block) { void MidTierOutputProcessor::DefineOutputs(const InstructionBlock* block) {
int block_start = block->first_instruction_index(); int block_start = block->first_instruction_index();
bool is_deferred = block->IsDeferred(); bool is_deferred = block->IsDeferred();
for (int index = block->last_instruction_index(); index >= block_start; for (int index = block->last_instruction_index(); index >= block_start;
index--) { index--) {
Instruction* instr = code()->InstructionAt(index); Instruction* instr = code()->InstructionAt(index);
...@@ -2613,9 +2593,6 @@ void MidTierOutputProcessor::DefineOutputs(const InstructionBlock* block) { ...@@ -2613,9 +2593,6 @@ void MidTierOutputProcessor::DefineOutputs(const InstructionBlock* block) {
UnallocatedOperand* unallocated_operand = UnallocatedOperand* unallocated_operand =
UnallocatedOperand::cast(output); UnallocatedOperand::cast(output);
int virtual_register = unallocated_operand->virtual_register(); int virtual_register = unallocated_operand->virtual_register();
bool is_exceptional_call_output =
instr->IsCallWithDescriptorFlags() &&
instr->HasCallDescriptorFlag(CallDescriptor::kHasExceptionHandler);
if (unallocated_operand->HasFixedSlotPolicy()) { if (unallocated_operand->HasFixedSlotPolicy()) {
// If output has a fixed slot policy, allocate its spill operand now // If output has a fixed slot policy, allocate its spill operand now
// so that the register allocator can use this knowledge. // so that the register allocator can use this knowledge.
...@@ -2625,12 +2602,10 @@ void MidTierOutputProcessor::DefineOutputs(const InstructionBlock* block) { ...@@ -2625,12 +2602,10 @@ void MidTierOutputProcessor::DefineOutputs(const InstructionBlock* block) {
unallocated_operand->fixed_slot_index()); unallocated_operand->fixed_slot_index());
VirtualRegisterDataFor(virtual_register) VirtualRegisterDataFor(virtual_register)
.DefineAsFixedSpillOperand(fixed_spill_operand, virtual_register, .DefineAsFixedSpillOperand(fixed_spill_operand, virtual_register,
index, is_deferred, index, is_deferred);
is_exceptional_call_output);
} else { } else {
VirtualRegisterDataFor(virtual_register) VirtualRegisterDataFor(virtual_register)
.DefineAsUnallocatedOperand(virtual_register, index, is_deferred, .DefineAsUnallocatedOperand(virtual_register, index, is_deferred);
is_exceptional_call_output);
} }
} }
} }
......
...@@ -57,8 +57,8 @@ class MidTierRegisterAllocationData final : public RegisterAllocationData { ...@@ -57,8 +57,8 @@ class MidTierRegisterAllocationData final : public RegisterAllocationData {
const InstructionBlock* GetBlock(int instr_index); const InstructionBlock* GetBlock(int instr_index);
// Returns a bitvector representing all the blocks that are dominated by the // Returns a bitvector representing all the blocks that are dominated by the
// output of the instruction in |block|. // output of the instruction at |instr_index|.
const BitVector* GetBlocksDominatedBy(const InstructionBlock* block); const BitVector* GetBlocksDominatedBy(int instr_index);
// List of all instruction indexs that require a reference map. // List of all instruction indexs that require a reference map.
ZoneVector<int>& reference_map_instructions() { ZoneVector<int>& reference_map_instructions() {
......
...@@ -711,7 +711,7 @@ void AdjustStackPointerForTailCall(Instruction* instr, ...@@ -711,7 +711,7 @@ void AdjustStackPointerForTailCall(Instruction* instr,
int new_slot_above_sp, int new_slot_above_sp,
bool allow_shrinkage = true) { bool allow_shrinkage = true) {
int stack_slot_delta; int stack_slot_delta;
if (instr->HasCallDescriptorFlag(CallDescriptor::kIsTailCallForTierUp)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kIsTailCallForTierUp)) {
// For this special tail-call mode, the callee has the same arguments and // For this special tail-call mode, the callee has the same arguments and
// linkage as the caller, and arguments adapter frames must be preserved. // linkage as the caller, and arguments adapter frames must be preserved.
// Thus we simply have reset the stack pointer register to its original // Thus we simply have reset the stack pointer register to its original
...@@ -757,7 +757,7 @@ void CodeGenerator::AssembleTailCallBeforeGap(Instruction* instr, ...@@ -757,7 +757,7 @@ void CodeGenerator::AssembleTailCallBeforeGap(Instruction* instr,
if (!pushes.empty() && if (!pushes.empty() &&
(LocationOperand::cast(pushes.back()->destination()).index() + 1 == (LocationOperand::cast(pushes.back()->destination()).index() + 1 ==
first_unused_stack_slot)) { first_unused_stack_slot)) {
DCHECK(!instr->HasCallDescriptorFlag(CallDescriptor::kIsTailCallForTierUp)); DCHECK(!HasCallDescriptorFlag(instr, CallDescriptor::kIsTailCallForTierUp));
X64OperandConverter g(this, instr); X64OperandConverter g(this, instr);
for (auto move : pushes) { for (auto move : pushes) {
LocationOperand destination_location( LocationOperand destination_location(
...@@ -847,10 +847,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -847,10 +847,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
__ LoadCodeObjectEntry(reg, reg); __ LoadCodeObjectEntry(reg, reg);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineCall(reg); __ RetpolineCall(reg);
} else { } else {
__ call(reg); __ call(reg);
...@@ -875,7 +875,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -875,7 +875,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
if (DetermineStubCallMode() == StubCallMode::kCallWasmRuntimeStub) { if (DetermineStubCallMode() == StubCallMode::kCallWasmRuntimeStub) {
__ near_call(wasm_code, constant.rmode()); __ near_call(wasm_code, constant.rmode());
} else { } else {
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineCall(wasm_code, constant.rmode()); __ RetpolineCall(wasm_code, constant.rmode());
} else { } else {
__ Call(wasm_code, constant.rmode()); __ Call(wasm_code, constant.rmode());
...@@ -883,7 +883,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -883,7 +883,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineCall(reg); __ RetpolineCall(reg);
} else { } else {
__ call(reg); __ call(reg);
...@@ -894,7 +894,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -894,7 +894,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break; break;
} }
case kArchTailCallCodeObjectFromJSFunction: case kArchTailCallCodeObjectFromJSFunction:
if (!instr->HasCallDescriptorFlag(CallDescriptor::kIsTailCallForTierUp)) { if (!HasCallDescriptorFlag(instr, CallDescriptor::kIsTailCallForTierUp)) {
AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
i.TempRegister(0), i.TempRegister(1), i.TempRegister(0), i.TempRegister(1),
i.TempRegister(2)); i.TempRegister(2));
...@@ -907,10 +907,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -907,10 +907,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
__ LoadCodeObjectEntry(reg, reg); __ LoadCodeObjectEntry(reg, reg);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineJump(reg); __ RetpolineJump(reg);
} else { } else {
__ jmp(reg); __ jmp(reg);
...@@ -933,7 +933,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -933,7 +933,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
} else { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineJump(reg); __ RetpolineJump(reg);
} else { } else {
__ jmp(reg); __ jmp(reg);
...@@ -948,9 +948,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -948,9 +948,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
CHECK(!HasImmediateInput(instr, 0)); CHECK(!HasImmediateInput(instr, 0));
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
instr->HasCallDescriptorFlag(CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
reg == kJavaScriptCallCodeStartRegister); reg == kJavaScriptCallCodeStartRegister);
if (instr->HasCallDescriptorFlag(CallDescriptor::kRetpoline)) { if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
__ RetpolineJump(reg); __ RetpolineJump(reg);
} else { } else {
__ jmp(reg); __ jmp(reg);
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --turboprop --max-semi-space-size=1
function runNearStackLimit(f) {
function t() {
try {
return t();
} catch (e) {
return f();
}
}
%PrepareFunctionForOptimization(t);
%OptimizeFunctionOnNextCall(t);
return t();
}
function foo(a) {}
function bar(a, b) {}
for (let i = 0; i < 150; i++) {
runNearStackLimit(() => {
return foo(bar(3, 4) === false);
});
}
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --turboprop --gc-interval=1000
function runNearStackLimit(f) {
function t() {
try {
return t();
} catch (e) {
return f();
}
}
%PrepareFunctionForOptimization(t);
%OptimizeFunctionOnNextCall(t);
return t();
}
function foo() {
runNearStackLimit(() => {});
}
(function () {
var a = 42;
var b = 153;
try {
Object.defineProperty({});
} catch (e) {}
foo();
foo();
})();
runNearStackLimit(() => {});
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment