Commit 7619bde8 authored by Michael Starzinger's avatar Michael Starzinger Committed by Commit Bot

[turbofan] Remove obsolete {BlockCodeTargetSharingScope}.

By now we no longer emit calls with {RelocInfo::CODE_TARGET} as part of
WebAssembly code. Hence the requirement to block sharing of code targets
disappeared and the support can be dropped.

R=jarin@chromium.org

Change-Id: I6df026cd05769ddaa6ea8df5a7b17b62e8a7c373
Reviewed-on: https://chromium-review.googlesource.com/1100889Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53782}
parent a5b5f8e9
...@@ -541,7 +541,6 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) ...@@ -541,7 +541,6 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size)
pending_64_bit_constants_.reserve(kMinNumPendingConstants); pending_64_bit_constants_.reserve(kMinNumPendingConstants);
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
next_buffer_check_ = 0; next_buffer_check_ = 0;
code_target_sharing_blocked_nesting_ = 0;
const_pool_blocked_nesting_ = 0; const_pool_blocked_nesting_ = 0;
no_const_pool_before_ = 0; no_const_pool_before_ = 0;
first_const_pool_32_use_ = -1; first_const_pool_32_use_ = -1;
...@@ -564,7 +563,6 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) ...@@ -564,7 +563,6 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size)
Assembler::~Assembler() { Assembler::~Assembler() {
DCHECK_EQ(const_pool_blocked_nesting_, 0); DCHECK_EQ(const_pool_blocked_nesting_, 0);
DCHECK_EQ(code_target_sharing_blocked_nesting_, 0);
} }
void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) { void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) {
...@@ -5153,10 +5151,8 @@ void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode, ...@@ -5153,10 +5151,8 @@ void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode,
if (pending_32_bit_constants_.empty()) { if (pending_32_bit_constants_.empty()) {
first_const_pool_32_use_ = position; first_const_pool_32_use_ = position;
} }
ConstantPoolEntry entry(position, value, ConstantPoolEntry entry(
sharing_ok || (rmode == RelocInfo::CODE_TARGET && position, value, sharing_ok || (rmode == RelocInfo::CODE_TARGET), rmode);
IsCodeTargetSharingAllowed()),
rmode);
bool shared = false; bool shared = false;
if (sharing_ok) { if (sharing_ok) {
...@@ -5175,8 +5171,7 @@ void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode, ...@@ -5175,8 +5171,7 @@ void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode,
// Share entries if allowed and possible. // Share entries if allowed and possible.
// Null-values are placeholders and must be ignored. // Null-values are placeholders and must be ignored.
if (rmode == RelocInfo::CODE_TARGET && IsCodeTargetSharingAllowed() && if (rmode == RelocInfo::CODE_TARGET && value != 0) {
value != 0) {
// Sharing entries here relies on canonicalized handles - without them, we // Sharing entries here relies on canonicalized handles - without them, we
// will miss the optimisation opportunity. // will miss the optimisation opportunity.
Address handle_address = static_cast<Address>(value); Address handle_address = static_cast<Address>(value);
......
...@@ -1427,36 +1427,6 @@ class Assembler : public AssemblerBase { ...@@ -1427,36 +1427,6 @@ class Assembler : public AssemblerBase {
DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope); DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope);
}; };
// Class for blocking sharing of code targets in constant pool.
class BlockCodeTargetSharingScope {
public:
explicit BlockCodeTargetSharingScope(Assembler* assem) : assem_(nullptr) {
Open(assem);
}
// This constructor does not initialize the scope. The user needs to
// explicitly call Open() before using it.
BlockCodeTargetSharingScope() : assem_(nullptr) {}
~BlockCodeTargetSharingScope() {
Close();
}
void Open(Assembler* assem) {
DCHECK_NULL(assem_);
DCHECK_NOT_NULL(assem);
assem_ = assem;
assem_->StartBlockCodeTargetSharing();
}
private:
void Close() {
if (assem_ != nullptr) {
assem_->EndBlockCodeTargetSharing();
}
}
Assembler* assem_;
DISALLOW_COPY_AND_ASSIGN(BlockCodeTargetSharingScope);
};
// Record a comment relocation entry that can be used by a disassembler. // Record a comment relocation entry that can be used by a disassembler.
// Use --code-comments to enable. // Use --code-comments to enable.
void RecordComment(const char* msg); void RecordComment(const char* msg);
...@@ -1588,20 +1558,6 @@ class Assembler : public AssemblerBase { ...@@ -1588,20 +1558,6 @@ class Assembler : public AssemblerBase {
// Patch branch instruction at pos to branch to given branch target pos // Patch branch instruction at pos to branch to given branch target pos
void target_at_put(int pos, int target_pos); void target_at_put(int pos, int target_pos);
// Prevent sharing of code target constant pool entries until
// EndBlockCodeTargetSharing is called. Calls to this function can be nested
// but must be followed by an equal number of call to
// EndBlockCodeTargetSharing.
void StartBlockCodeTargetSharing() {
++code_target_sharing_blocked_nesting_;
}
// Resume sharing of constant pool code target entries. Needs to be called
// as many times as StartBlockCodeTargetSharing to have an effect.
void EndBlockCodeTargetSharing() {
--code_target_sharing_blocked_nesting_;
}
// Prevent contant pool emission until EndBlockConstPool is called. // Prevent contant pool emission until EndBlockConstPool is called.
// Calls to this function can be nested but must be followed by an equal // Calls to this function can be nested but must be followed by an equal
// number of call to EndBlockConstpool. // number of call to EndBlockConstpool.
...@@ -1709,12 +1665,6 @@ class Assembler : public AssemblerBase { ...@@ -1709,12 +1665,6 @@ class Assembler : public AssemblerBase {
static constexpr int kCheckPoolIntervalInst = 32; static constexpr int kCheckPoolIntervalInst = 32;
static constexpr int kCheckPoolInterval = kCheckPoolIntervalInst * kInstrSize; static constexpr int kCheckPoolInterval = kCheckPoolIntervalInst * kInstrSize;
// Sharing of code target entries may be blocked in some code sequences.
int code_target_sharing_blocked_nesting_;
bool IsCodeTargetSharingAllowed() const {
return code_target_sharing_blocked_nesting_ == 0;
}
// Emission of the constant pool may be blocked in some code sequences. // Emission of the constant pool may be blocked in some code sequences.
int const_pool_blocked_nesting_; // Block emission if this is not zero. int const_pool_blocked_nesting_; // Block emission if this is not zero.
int no_const_pool_before_; // Block emission before this pc offset. int no_const_pool_before_; // Block emission before this pc offset.
...@@ -1761,7 +1711,6 @@ class Assembler : public AssemblerBase { ...@@ -1761,7 +1711,6 @@ class Assembler : public AssemblerBase {
friend class RelocInfo; friend class RelocInfo;
friend class BlockConstPoolScope; friend class BlockConstPoolScope;
friend class BlockCodeTargetSharingScope;
friend class EnsureSpace; friend class EnsureSpace;
friend class UseScratchRegisterScope; friend class UseScratchRegisterScope;
}; };
......
...@@ -338,8 +338,7 @@ bool ConstPool::RecordEntry(intptr_t data, RelocInfo::Mode mode) { ...@@ -338,8 +338,7 @@ bool ConstPool::RecordEntry(intptr_t data, RelocInfo::Mode mode) {
if (CanBeShared(mode)) { if (CanBeShared(mode)) {
write_reloc_info = AddSharedEntry(shared_entries_, raw_data, offset); write_reloc_info = AddSharedEntry(shared_entries_, raw_data, offset);
} else if (mode == RelocInfo::CODE_TARGET && } else if (mode == RelocInfo::CODE_TARGET && raw_data != 0) {
assm_->IsCodeTargetSharingAllowed() && raw_data != 0) {
// A zero data value is a placeholder and must not be shared. // A zero data value is a placeholder and must not be shared.
write_reloc_info = AddSharedEntry(handle_to_index_map_, raw_data, offset); write_reloc_info = AddSharedEntry(handle_to_index_map_, raw_data, offset);
} else { } else {
...@@ -548,7 +547,6 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) ...@@ -548,7 +547,6 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size)
unresolved_branches_() { unresolved_branches_() {
const_pool_blocked_nesting_ = 0; const_pool_blocked_nesting_ = 0;
veneer_pool_blocked_nesting_ = 0; veneer_pool_blocked_nesting_ = 0;
code_target_sharing_blocked_nesting_ = 0;
Reset(); Reset();
} }
...@@ -557,7 +555,6 @@ Assembler::~Assembler() { ...@@ -557,7 +555,6 @@ Assembler::~Assembler() {
DCHECK(constpool_.IsEmpty()); DCHECK(constpool_.IsEmpty());
DCHECK_EQ(const_pool_blocked_nesting_, 0); DCHECK_EQ(const_pool_blocked_nesting_, 0);
DCHECK_EQ(veneer_pool_blocked_nesting_, 0); DCHECK_EQ(veneer_pool_blocked_nesting_, 0);
DCHECK_EQ(code_target_sharing_blocked_nesting_, 0);
} }
...@@ -566,7 +563,6 @@ void Assembler::Reset() { ...@@ -566,7 +563,6 @@ void Assembler::Reset() {
DCHECK((pc_ >= buffer_) && (pc_ < buffer_ + buffer_size_)); DCHECK((pc_ >= buffer_) && (pc_ < buffer_ + buffer_size_));
DCHECK_EQ(const_pool_blocked_nesting_, 0); DCHECK_EQ(const_pool_blocked_nesting_, 0);
DCHECK_EQ(veneer_pool_blocked_nesting_, 0); DCHECK_EQ(veneer_pool_blocked_nesting_, 0);
DCHECK_EQ(code_target_sharing_blocked_nesting_, 0);
DCHECK(unresolved_branches_.empty()); DCHECK(unresolved_branches_.empty());
memset(buffer_, 0, pc_ - buffer_); memset(buffer_, 0, pc_ - buffer_);
#endif #endif
......
...@@ -3229,34 +3229,6 @@ class Assembler : public AssemblerBase { ...@@ -3229,34 +3229,6 @@ class Assembler : public AssemblerBase {
DISALLOW_IMPLICIT_CONSTRUCTORS(BlockPoolsScope); DISALLOW_IMPLICIT_CONSTRUCTORS(BlockPoolsScope);
}; };
// Class for blocking sharing of code targets in constant pool.
class BlockCodeTargetSharingScope {
public:
explicit BlockCodeTargetSharingScope(Assembler* assem) : assem_(nullptr) {
Open(assem);
}
// This constructor does not initialize the scope. The user needs to
// explicitly call Open() before using it.
BlockCodeTargetSharingScope() : assem_(nullptr) {}
~BlockCodeTargetSharingScope() { Close(); }
void Open(Assembler* assem) {
DCHECK_NULL(assem_);
DCHECK_NOT_NULL(assem);
assem_ = assem;
assem_->StartBlockCodeTargetSharing();
}
private:
void Close() {
if (assem_ != nullptr) {
assem_->EndBlockCodeTargetSharing();
}
}
Assembler* assem_;
DISALLOW_COPY_AND_ASSIGN(BlockCodeTargetSharingScope);
};
protected: protected:
inline const Register& AppropriateZeroRegFor(const CPURegister& reg) const; inline const Register& AppropriateZeroRegFor(const CPURegister& reg) const;
...@@ -3341,16 +3313,6 @@ class Assembler : public AssemblerBase { ...@@ -3341,16 +3313,6 @@ class Assembler : public AssemblerBase {
void RemoveBranchFromLabelLinkChain(Instruction* branch, Label* label, void RemoveBranchFromLabelLinkChain(Instruction* branch, Label* label,
Instruction* label_veneer = nullptr); Instruction* label_veneer = nullptr);
// Prevent sharing of code target constant pool entries until
// EndBlockCodeTargetSharing is called. Calls to this function can be nested
// but must be followed by an equal number of call to
// EndBlockCodeTargetSharing.
void StartBlockCodeTargetSharing() { ++code_target_sharing_blocked_nesting_; }
// Resume sharing of constant pool code target entries. Needs to be called
// as many times as StartBlockCodeTargetSharing to have an effect.
void EndBlockCodeTargetSharing() { --code_target_sharing_blocked_nesting_; }
private: private:
static uint32_t FPToImm8(double imm); static uint32_t FPToImm8(double imm);
...@@ -3530,12 +3492,6 @@ class Assembler : public AssemblerBase { ...@@ -3530,12 +3492,6 @@ class Assembler : public AssemblerBase {
// Emission of the veneer pools may be blocked in some code sequences. // Emission of the veneer pools may be blocked in some code sequences.
int veneer_pool_blocked_nesting_; // Block emission if this is not zero. int veneer_pool_blocked_nesting_; // Block emission if this is not zero.
// Sharing of code target entries may be blocked in some code sequences.
int code_target_sharing_blocked_nesting_;
bool IsCodeTargetSharingAllowed() const {
return code_target_sharing_blocked_nesting_ == 0;
}
// Relocation info generation // Relocation info generation
// Each relocation is encoded as a variable size value // Each relocation is encoded as a variable size value
static constexpr int kMaxRelocSize = RelocInfoWriter::kMaxSize; static constexpr int kMaxRelocSize = RelocInfoWriter::kMaxSize;
......
...@@ -669,11 +669,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -669,11 +669,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
switch (arch_opcode) { switch (arch_opcode) {
case kArchCallCodeObject: { case kArchCallCodeObject: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (instr->InputAt(0)->IsImmediate()) { if (instr->InputAt(0)->IsImmediate()) {
__ Call(i.InputCode(0), RelocInfo::CODE_TARGET); __ Call(i.InputCode(0), RelocInfo::CODE_TARGET);
} else { } else {
...@@ -690,11 +685,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -690,11 +685,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break; break;
} }
case kArchCallWasmFunction: { case kArchCallWasmFunction: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (instr->InputAt(0)->IsImmediate()) { if (instr->InputAt(0)->IsImmediate()) {
Constant constant = i.ToConstant(instr->InputAt(0)); Constant constant = i.ToConstant(instr->InputAt(0));
Address wasm_code = static_cast<Address>(constant.ToInt32()); Address wasm_code = static_cast<Address>(constant.ToInt32());
...@@ -709,11 +699,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -709,11 +699,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
case kArchTailCallCodeObjectFromJSFunction: case kArchTailCallCodeObjectFromJSFunction:
case kArchTailCallCodeObject: { case kArchTailCallCodeObject: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) { if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
i.TempRegister(0), i.TempRegister(1), i.TempRegister(0), i.TempRegister(1),
...@@ -736,11 +721,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -736,11 +721,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break; break;
} }
case kArchTailCallWasm: { case kArchTailCallWasm: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (instr->InputAt(0)->IsImmediate()) { if (instr->InputAt(0)->IsImmediate()) {
Constant constant = i.ToConstant(instr->InputAt(0)); Constant constant = i.ToConstant(instr->InputAt(0));
Address wasm_code = static_cast<Address>(constant.ToInt32()); Address wasm_code = static_cast<Address>(constant.ToInt32());
......
...@@ -601,11 +601,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -601,11 +601,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
switch (arch_opcode) { switch (arch_opcode) {
case kArchCallCodeObject: { case kArchCallCodeObject: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (instr->InputAt(0)->IsImmediate()) { if (instr->InputAt(0)->IsImmediate()) {
__ Call(i.InputCode(0), RelocInfo::CODE_TARGET); __ Call(i.InputCode(0), RelocInfo::CODE_TARGET);
} else { } else {
...@@ -635,11 +630,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -635,11 +630,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
case kArchTailCallCodeObjectFromJSFunction: case kArchTailCallCodeObjectFromJSFunction:
case kArchTailCallCodeObject: { case kArchTailCallCodeObject: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) { if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
i.TempRegister(0), i.TempRegister(1), i.TempRegister(0), i.TempRegister(1),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment