Commit 8003bbbe authored by Camillo Bruni's avatar Camillo Bruni Committed by Commit Bot

[sparkplug] Preallocate instruction buffer

We have roughly a 7x (5x on ia32) ratio between bytecode and sparkplug
code. Using this number to preallocate the buffer for the emitted code
we can avoid a few copies for larger functions.

Drive-by-fix: Make sure EnsureSpace is marked V8_INLINE

Bug: v8:11420
Change-Id: I6ec48717d2e030c6118c59f5cdc286c952ec2843
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2835732
Commit-Queue: Camillo Bruni <cbruni@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74085}
parent cc0d6a85
...@@ -230,6 +230,21 @@ void MoveArgumentsForBuiltin(BaselineAssembler* masm, Args... args) { ...@@ -230,6 +230,21 @@ void MoveArgumentsForBuiltin(BaselineAssembler* masm, Args... args) {
} // namespace detail } // namespace detail
namespace {
// Rough upper-bound estimate. Copying the data is most likely more expensive
// than pre-allocating a large enough buffer.
#ifdef V8_TARGET_ARCH_IA32
const int kAverageBytecodeToInstructionRatio = 5;
#else
const int kAverageBytecodeToInstructionRatio = 7;
#endif
std::unique_ptr<AssemblerBuffer> AllocateBuffer(
Handle<BytecodeArray> bytecodes) {
int estimated_size = bytecodes->length() * kAverageBytecodeToInstructionRatio;
return NewAssemblerBuffer(RoundUp(estimated_size, 4 * KB));
}
} // namespace
BaselineCompiler::BaselineCompiler( BaselineCompiler::BaselineCompiler(
Isolate* isolate, Handle<SharedFunctionInfo> shared_function_info, Isolate* isolate, Handle<SharedFunctionInfo> shared_function_info,
Handle<BytecodeArray> bytecode) Handle<BytecodeArray> bytecode)
...@@ -237,7 +252,7 @@ BaselineCompiler::BaselineCompiler( ...@@ -237,7 +252,7 @@ BaselineCompiler::BaselineCompiler(
stats_(isolate->counters()->runtime_call_stats()), stats_(isolate->counters()->runtime_call_stats()),
shared_function_info_(shared_function_info), shared_function_info_(shared_function_info),
bytecode_(bytecode), bytecode_(bytecode),
masm_(isolate, CodeObjectRequired::kNo), masm_(isolate, CodeObjectRequired::kNo, AllocateBuffer(bytecode)),
basm_(&masm_), basm_(&masm_),
iterator_(bytecode_), iterator_(bytecode_),
zone_(isolate->allocator(), ZONE_NAME), zone_(isolate->allocator(), ZONE_NAME),
......
...@@ -206,7 +206,7 @@ Operand::Operand(Smi value) : rmode_(RelocInfo::NONE) { ...@@ -206,7 +206,7 @@ Operand::Operand(Smi value) : rmode_(RelocInfo::NONE) {
Operand::Operand(Register rm) : rm_(rm), shift_op_(LSL), shift_imm_(0) {} Operand::Operand(Register rm) : rm_(rm), shift_op_(LSL), shift_imm_(0) {}
void Assembler::CheckBuffer() { void Assembler::CheckBuffer() {
if (buffer_space() <= kGap) { if (V8_UNLIKELY(buffer_space() <= kGap)) {
GrowBuffer(); GrowBuffer();
} }
MaybeCheckConstPool(); MaybeCheckConstPool();
......
...@@ -1165,8 +1165,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -1165,8 +1165,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// Check if is time to emit a constant pool. // Check if is time to emit a constant pool.
void CheckConstPool(bool force_emit, bool require_jump); void CheckConstPool(bool force_emit, bool require_jump);
void MaybeCheckConstPool() { V8_INLINE void MaybeCheckConstPool() {
if (pc_offset() >= next_buffer_check_) { if (V8_UNLIKELY(pc_offset() >= next_buffer_check_)) {
CheckConstPool(false, true); CheckConstPool(false, true);
} }
} }
...@@ -1298,7 +1298,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -1298,7 +1298,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// The bound position, before this we cannot do instruction elimination. // The bound position, before this we cannot do instruction elimination.
int last_bound_pos_; int last_bound_pos_;
inline void CheckBuffer(); V8_INLINE void CheckBuffer();
void GrowBuffer(); void GrowBuffer();
// Instruction generation // Instruction generation
......
...@@ -1072,12 +1072,12 @@ const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { ...@@ -1072,12 +1072,12 @@ const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const {
inline void Assembler::CheckBufferSpace() { inline void Assembler::CheckBufferSpace() {
DCHECK_LT(pc_, buffer_start_ + buffer_->size()); DCHECK_LT(pc_, buffer_start_ + buffer_->size());
if (buffer_space() < kGap) { if (V8_UNLIKELY(buffer_space() < kGap)) {
GrowBuffer(); GrowBuffer();
} }
} }
inline void Assembler::CheckBuffer() { V8_INLINE void Assembler::CheckBuffer() {
CheckBufferSpace(); CheckBufferSpace();
if (pc_offset() >= next_veneer_pool_check_) { if (pc_offset() >= next_veneer_pool_check_) {
CheckVeneerPool(false, true); CheckVeneerPool(false, true);
...@@ -1085,6 +1085,10 @@ inline void Assembler::CheckBuffer() { ...@@ -1085,6 +1085,10 @@ inline void Assembler::CheckBuffer() {
constpool_.MaybeCheck(); constpool_.MaybeCheck();
} }
EnsureSpace::EnsureSpace(Assembler* assembler) : block_pools_scope_(assembler) {
assembler->CheckBufferSpace();
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -2634,7 +2634,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -2634,7 +2634,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
} }
void GrowBuffer(); void GrowBuffer();
void CheckBufferSpace(); V8_INLINE void CheckBufferSpace();
void CheckBuffer(); void CheckBuffer();
// Emission of the veneer pools may be blocked in some code sequences. // Emission of the veneer pools may be blocked in some code sequences.
...@@ -2786,9 +2786,7 @@ class PatchingAssembler : public Assembler { ...@@ -2786,9 +2786,7 @@ class PatchingAssembler : public Assembler {
class EnsureSpace { class EnsureSpace {
public: public:
explicit EnsureSpace(Assembler* assembler) : block_pools_scope_(assembler) { explicit V8_INLINE EnsureSpace(Assembler* assembler);
assembler->CheckBufferSpace();
}
private: private:
Assembler::BlockPoolsScope block_pools_scope_; Assembler::BlockPoolsScope block_pools_scope_;
......
...@@ -1936,10 +1936,10 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -1936,10 +1936,10 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// checks that we did not generate too much. // checks that we did not generate too much.
class EnsureSpace { class EnsureSpace {
public: public:
explicit EnsureSpace(Assembler* assembler) : assembler_(assembler) { explicit V8_INLINE EnsureSpace(Assembler* assembler) : assembler_(assembler) {
if (assembler_->buffer_overflow()) assembler_->GrowBuffer(); if (V8_UNLIKELY(assembler_->buffer_overflow())) assembler_->GrowBuffer();
#ifdef DEBUG #ifdef DEBUG
space_before_ = assembler_->available_space(); space_before_ = assembler->available_space();
#endif #endif
} }
...@@ -1951,7 +1951,7 @@ class EnsureSpace { ...@@ -1951,7 +1951,7 @@ class EnsureSpace {
#endif #endif
private: private:
Assembler* assembler_; Assembler* const assembler_;
#ifdef DEBUG #ifdef DEBUG
int space_before_; int space_before_;
#endif #endif
......
...@@ -1907,7 +1907,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -1907,7 +1907,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
class EnsureSpace { class EnsureSpace {
public: public:
explicit inline EnsureSpace(Assembler* assembler); explicit V8_INLINE EnsureSpace(Assembler* assembler);
}; };
class V8_EXPORT_PRIVATE V8_NODISCARD UseScratchRegisterScope { class V8_EXPORT_PRIVATE V8_NODISCARD UseScratchRegisterScope {
......
...@@ -2376,8 +2376,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -2376,8 +2376,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// checks that we did not generate too much. // checks that we did not generate too much.
class EnsureSpace { class EnsureSpace {
public: public:
explicit EnsureSpace(Assembler* assembler) : assembler_(assembler) { explicit V8_INLINE EnsureSpace(Assembler* assembler) : assembler_(assembler) {
if (assembler_->buffer_overflow()) assembler_->GrowBuffer(); if (V8_UNLIKELY(assembler_->buffer_overflow())) assembler_->GrowBuffer();
#ifdef DEBUG #ifdef DEBUG
space_before_ = assembler_->available_space(); space_before_ = assembler_->available_space();
#endif #endif
...@@ -2391,7 +2391,7 @@ class EnsureSpace { ...@@ -2391,7 +2391,7 @@ class EnsureSpace {
#endif #endif
private: private:
Assembler* assembler_; Assembler* const assembler_;
#ifdef DEBUG #ifdef DEBUG
int space_before_; int space_before_;
#endif #endif
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment