Commit 7619bde8 authored by Michael Starzinger's avatar Michael Starzinger Committed by Commit Bot

[turbofan] Remove obsolete {BlockCodeTargetSharingScope}.

By now we no longer emit calls with {RelocInfo::CODE_TARGET} as part of
WebAssembly code. Hence the requirement to block sharing of code targets
disappeared and the support can be dropped.

R=jarin@chromium.org

Change-Id: I6df026cd05769ddaa6ea8df5a7b17b62e8a7c373
Reviewed-on: https://chromium-review.googlesource.com/1100889Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53782}
parent a5b5f8e9
......@@ -541,7 +541,6 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size)
pending_64_bit_constants_.reserve(kMinNumPendingConstants);
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
next_buffer_check_ = 0;
code_target_sharing_blocked_nesting_ = 0;
const_pool_blocked_nesting_ = 0;
no_const_pool_before_ = 0;
first_const_pool_32_use_ = -1;
......@@ -564,7 +563,6 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size)
Assembler::~Assembler() {
DCHECK_EQ(const_pool_blocked_nesting_, 0);
DCHECK_EQ(code_target_sharing_blocked_nesting_, 0);
}
void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) {
......@@ -5153,10 +5151,8 @@ void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode,
if (pending_32_bit_constants_.empty()) {
first_const_pool_32_use_ = position;
}
ConstantPoolEntry entry(position, value,
sharing_ok || (rmode == RelocInfo::CODE_TARGET &&
IsCodeTargetSharingAllowed()),
rmode);
ConstantPoolEntry entry(
position, value, sharing_ok || (rmode == RelocInfo::CODE_TARGET), rmode);
bool shared = false;
if (sharing_ok) {
......@@ -5175,8 +5171,7 @@ void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode,
// Share entries if allowed and possible.
// Null-values are placeholders and must be ignored.
if (rmode == RelocInfo::CODE_TARGET && IsCodeTargetSharingAllowed() &&
value != 0) {
if (rmode == RelocInfo::CODE_TARGET && value != 0) {
// Sharing entries here relies on canonicalized handles - without them, we
// will miss the optimisation opportunity.
Address handle_address = static_cast<Address>(value);
......
......@@ -1427,36 +1427,6 @@ class Assembler : public AssemblerBase {
DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope);
};
// Class for blocking sharing of code targets in constant pool.
class BlockCodeTargetSharingScope {
public:
explicit BlockCodeTargetSharingScope(Assembler* assem) : assem_(nullptr) {
Open(assem);
}
// This constructor does not initialize the scope. The user needs to
// explicitly call Open() before using it.
BlockCodeTargetSharingScope() : assem_(nullptr) {}
~BlockCodeTargetSharingScope() {
Close();
}
void Open(Assembler* assem) {
DCHECK_NULL(assem_);
DCHECK_NOT_NULL(assem);
assem_ = assem;
assem_->StartBlockCodeTargetSharing();
}
private:
void Close() {
if (assem_ != nullptr) {
assem_->EndBlockCodeTargetSharing();
}
}
Assembler* assem_;
DISALLOW_COPY_AND_ASSIGN(BlockCodeTargetSharingScope);
};
// Record a comment relocation entry that can be used by a disassembler.
// Use --code-comments to enable.
void RecordComment(const char* msg);
......@@ -1588,20 +1558,6 @@ class Assembler : public AssemblerBase {
// Patch branch instruction at pos to branch to given branch target pos
void target_at_put(int pos, int target_pos);
// Prevent sharing of code target constant pool entries until
// EndBlockCodeTargetSharing is called. Calls to this function can be nested
// but must be followed by an equal number of call to
// EndBlockCodeTargetSharing.
void StartBlockCodeTargetSharing() {
++code_target_sharing_blocked_nesting_;
}
// Resume sharing of constant pool code target entries. Needs to be called
// as many times as StartBlockCodeTargetSharing to have an effect.
void EndBlockCodeTargetSharing() {
--code_target_sharing_blocked_nesting_;
}
// Prevent contant pool emission until EndBlockConstPool is called.
// Calls to this function can be nested but must be followed by an equal
// number of call to EndBlockConstpool.
......@@ -1709,12 +1665,6 @@ class Assembler : public AssemblerBase {
static constexpr int kCheckPoolIntervalInst = 32;
static constexpr int kCheckPoolInterval = kCheckPoolIntervalInst * kInstrSize;
// Sharing of code target entries may be blocked in some code sequences.
int code_target_sharing_blocked_nesting_;
bool IsCodeTargetSharingAllowed() const {
return code_target_sharing_blocked_nesting_ == 0;
}
// Emission of the constant pool may be blocked in some code sequences.
int const_pool_blocked_nesting_; // Block emission if this is not zero.
int no_const_pool_before_; // Block emission before this pc offset.
......@@ -1761,7 +1711,6 @@ class Assembler : public AssemblerBase {
friend class RelocInfo;
friend class BlockConstPoolScope;
friend class BlockCodeTargetSharingScope;
friend class EnsureSpace;
friend class UseScratchRegisterScope;
};
......
......@@ -338,8 +338,7 @@ bool ConstPool::RecordEntry(intptr_t data, RelocInfo::Mode mode) {
if (CanBeShared(mode)) {
write_reloc_info = AddSharedEntry(shared_entries_, raw_data, offset);
} else if (mode == RelocInfo::CODE_TARGET &&
assm_->IsCodeTargetSharingAllowed() && raw_data != 0) {
} else if (mode == RelocInfo::CODE_TARGET && raw_data != 0) {
// A zero data value is a placeholder and must not be shared.
write_reloc_info = AddSharedEntry(handle_to_index_map_, raw_data, offset);
} else {
......@@ -548,7 +547,6 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size)
unresolved_branches_() {
const_pool_blocked_nesting_ = 0;
veneer_pool_blocked_nesting_ = 0;
code_target_sharing_blocked_nesting_ = 0;
Reset();
}
......@@ -557,7 +555,6 @@ Assembler::~Assembler() {
DCHECK(constpool_.IsEmpty());
DCHECK_EQ(const_pool_blocked_nesting_, 0);
DCHECK_EQ(veneer_pool_blocked_nesting_, 0);
DCHECK_EQ(code_target_sharing_blocked_nesting_, 0);
}
......@@ -566,7 +563,6 @@ void Assembler::Reset() {
DCHECK((pc_ >= buffer_) && (pc_ < buffer_ + buffer_size_));
DCHECK_EQ(const_pool_blocked_nesting_, 0);
DCHECK_EQ(veneer_pool_blocked_nesting_, 0);
DCHECK_EQ(code_target_sharing_blocked_nesting_, 0);
DCHECK(unresolved_branches_.empty());
memset(buffer_, 0, pc_ - buffer_);
#endif
......
......@@ -3229,34 +3229,6 @@ class Assembler : public AssemblerBase {
DISALLOW_IMPLICIT_CONSTRUCTORS(BlockPoolsScope);
};
// Class for blocking sharing of code targets in constant pool.
class BlockCodeTargetSharingScope {
public:
explicit BlockCodeTargetSharingScope(Assembler* assem) : assem_(nullptr) {
Open(assem);
}
// This constructor does not initialize the scope. The user needs to
// explicitly call Open() before using it.
BlockCodeTargetSharingScope() : assem_(nullptr) {}
~BlockCodeTargetSharingScope() { Close(); }
void Open(Assembler* assem) {
DCHECK_NULL(assem_);
DCHECK_NOT_NULL(assem);
assem_ = assem;
assem_->StartBlockCodeTargetSharing();
}
private:
void Close() {
if (assem_ != nullptr) {
assem_->EndBlockCodeTargetSharing();
}
}
Assembler* assem_;
DISALLOW_COPY_AND_ASSIGN(BlockCodeTargetSharingScope);
};
protected:
inline const Register& AppropriateZeroRegFor(const CPURegister& reg) const;
......@@ -3341,16 +3313,6 @@ class Assembler : public AssemblerBase {
void RemoveBranchFromLabelLinkChain(Instruction* branch, Label* label,
Instruction* label_veneer = nullptr);
// Prevent sharing of code target constant pool entries until
// EndBlockCodeTargetSharing is called. Calls to this function can be nested
// but must be followed by an equal number of call to
// EndBlockCodeTargetSharing.
void StartBlockCodeTargetSharing() { ++code_target_sharing_blocked_nesting_; }
// Resume sharing of constant pool code target entries. Needs to be called
// as many times as StartBlockCodeTargetSharing to have an effect.
void EndBlockCodeTargetSharing() { --code_target_sharing_blocked_nesting_; }
private:
static uint32_t FPToImm8(double imm);
......@@ -3530,12 +3492,6 @@ class Assembler : public AssemblerBase {
// Emission of the veneer pools may be blocked in some code sequences.
int veneer_pool_blocked_nesting_; // Block emission if this is not zero.
// Sharing of code target entries may be blocked in some code sequences.
int code_target_sharing_blocked_nesting_;
bool IsCodeTargetSharingAllowed() const {
return code_target_sharing_blocked_nesting_ == 0;
}
// Relocation info generation
// Each relocation is encoded as a variable size value
static constexpr int kMaxRelocSize = RelocInfoWriter::kMaxSize;
......
......@@ -669,11 +669,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
switch (arch_opcode) {
case kArchCallCodeObject: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (instr->InputAt(0)->IsImmediate()) {
__ Call(i.InputCode(0), RelocInfo::CODE_TARGET);
} else {
......@@ -690,11 +685,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchCallWasmFunction: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (instr->InputAt(0)->IsImmediate()) {
Constant constant = i.ToConstant(instr->InputAt(0));
Address wasm_code = static_cast<Address>(constant.ToInt32());
......@@ -709,11 +699,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
}
case kArchTailCallCodeObjectFromJSFunction:
case kArchTailCallCodeObject: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
i.TempRegister(0), i.TempRegister(1),
......@@ -736,11 +721,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchTailCallWasm: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (instr->InputAt(0)->IsImmediate()) {
Constant constant = i.ToConstant(instr->InputAt(0));
Address wasm_code = static_cast<Address>(constant.ToInt32());
......
......@@ -601,11 +601,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
switch (arch_opcode) {
case kArchCallCodeObject: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (instr->InputAt(0)->IsImmediate()) {
__ Call(i.InputCode(0), RelocInfo::CODE_TARGET);
} else {
......@@ -635,11 +630,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
}
case kArchTailCallCodeObjectFromJSFunction:
case kArchTailCallCodeObject: {
// We must not share code targets for calls to builtins for wasm code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(tasm());
if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
i.TempRegister(0), i.TempRegister(1),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment