Commit 37ecee6a authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Cache regular code object size

MemoryChunkLayout::MaxRegularCodeObjectSize() can be cached in a
global variable on process initialization. This should help to increase
code object allocation performance, since this method was called on
each code object allocation.

Bug: v8:11891
Change-Id: I870bd37202370aec89ef2db24264e363099bf8a0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2966387
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarMaya Lekova <mslekova@chromium.org>
Cr-Commit-Position: refs/heads/master@{#75215}
parent f420a264
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include "src/compiler/access-builder.h" #include "src/compiler/access-builder.h"
#include "src/compiler/allocation-builder.h" #include "src/compiler/allocation-builder.h"
#include "src/heap/heap-inl.h"
#include "src/objects/arguments-inl.h" #include "src/objects/arguments-inl.h"
#include "src/objects/map-inl.h" #include "src/objects/map-inl.h"
...@@ -14,6 +15,16 @@ namespace v8 { ...@@ -14,6 +15,16 @@ namespace v8 {
namespace internal { namespace internal {
namespace compiler { namespace compiler {
void AllocationBuilder::Allocate(int size, AllocationType allocation,
Type type) {
DCHECK_LE(size, isolate()->heap()->MaxRegularHeapObjectSize(allocation));
effect_ = graph()->NewNode(
common()->BeginRegion(RegionObservability::kNotObservable), effect_);
allocation_ = graph()->NewNode(simplified()->Allocate(type, allocation),
jsgraph()->Constant(size), effect_, control_);
effect_ = allocation_;
}
void AllocationBuilder::AllocateContext(int variadic_part_length, MapRef map) { void AllocationBuilder::AllocateContext(int variadic_part_length, MapRef map) {
DCHECK(base::IsInRange(map.instance_type(), FIRST_CONTEXT_TYPE, DCHECK(base::IsInRange(map.instance_type(), FIRST_CONTEXT_TYPE,
LAST_CONTEXT_TYPE)); LAST_CONTEXT_TYPE));
...@@ -27,7 +38,6 @@ void AllocationBuilder::AllocateContext(int variadic_part_length, MapRef map) { ...@@ -27,7 +38,6 @@ void AllocationBuilder::AllocateContext(int variadic_part_length, MapRef map) {
jsgraph()->Constant(variadic_part_length)); jsgraph()->Constant(variadic_part_length));
} }
// static
bool AllocationBuilder::CanAllocateArray(int length, MapRef map, bool AllocationBuilder::CanAllocateArray(int length, MapRef map,
AllocationType allocation) { AllocationType allocation) {
DCHECK(map.instance_type() == FIXED_ARRAY_TYPE || DCHECK(map.instance_type() == FIXED_ARRAY_TYPE ||
...@@ -35,7 +45,7 @@ bool AllocationBuilder::CanAllocateArray(int length, MapRef map, ...@@ -35,7 +45,7 @@ bool AllocationBuilder::CanAllocateArray(int length, MapRef map,
int const size = (map.instance_type() == FIXED_ARRAY_TYPE) int const size = (map.instance_type() == FIXED_ARRAY_TYPE)
? FixedArray::SizeFor(length) ? FixedArray::SizeFor(length)
: FixedDoubleArray::SizeFor(length); : FixedDoubleArray::SizeFor(length);
return size <= Heap::MaxRegularHeapObjectSize(allocation); return size <= isolate()->heap()->MaxRegularHeapObjectSize(allocation);
} }
// Compound allocation of a FixedArray. // Compound allocation of a FixedArray.
...@@ -50,11 +60,10 @@ void AllocationBuilder::AllocateArray(int length, MapRef map, ...@@ -50,11 +60,10 @@ void AllocationBuilder::AllocateArray(int length, MapRef map,
Store(AccessBuilder::ForFixedArrayLength(), jsgraph()->Constant(length)); Store(AccessBuilder::ForFixedArrayLength(), jsgraph()->Constant(length));
} }
// static
bool AllocationBuilder::CanAllocateSloppyArgumentElements( bool AllocationBuilder::CanAllocateSloppyArgumentElements(
int length, MapRef map, AllocationType allocation) { int length, MapRef map, AllocationType allocation) {
int const size = SloppyArgumentsElements::SizeFor(length); int const size = SloppyArgumentsElements::SizeFor(length);
return size <= Heap::MaxRegularHeapObjectSize(allocation); return size <= isolate()->heap()->MaxRegularHeapObjectSize(allocation);
} }
void AllocationBuilder::AllocateSloppyArgumentElements( void AllocationBuilder::AllocateSloppyArgumentElements(
......
...@@ -25,16 +25,9 @@ class AllocationBuilder final { ...@@ -25,16 +25,9 @@ class AllocationBuilder final {
control_(control) {} control_(control) {}
// Primitive allocation of static size. // Primitive allocation of static size.
void Allocate(int size, AllocationType allocation = AllocationType::kYoung, inline void Allocate(int size,
Type type = Type::Any()) { AllocationType allocation = AllocationType::kYoung,
DCHECK_LE(size, Heap::MaxRegularHeapObjectSize(allocation)); Type type = Type::Any());
effect_ = graph()->NewNode(
common()->BeginRegion(RegionObservability::kNotObservable), effect_);
allocation_ =
graph()->NewNode(simplified()->Allocate(type, allocation),
jsgraph()->Constant(size), effect_, control_);
effect_ = allocation_;
}
// Primitive store into a field. // Primitive store into a field.
void Store(const FieldAccess& access, Node* value) { void Store(const FieldAccess& access, Node* value) {
...@@ -52,14 +45,14 @@ class AllocationBuilder final { ...@@ -52,14 +45,14 @@ class AllocationBuilder final {
inline void AllocateContext(int variadic_part_length, MapRef map); inline void AllocateContext(int variadic_part_length, MapRef map);
// Compound allocation of a FixedArray. // Compound allocation of a FixedArray.
inline static bool CanAllocateArray( inline bool CanAllocateArray(
int length, MapRef map, int length, MapRef map,
AllocationType allocation = AllocationType::kYoung); AllocationType allocation = AllocationType::kYoung);
inline void AllocateArray(int length, MapRef map, inline void AllocateArray(int length, MapRef map,
AllocationType allocation = AllocationType::kYoung); AllocationType allocation = AllocationType::kYoung);
// Compound allocation of a SloppyArgumentsElements // Compound allocation of a SloppyArgumentsElements
static inline bool CanAllocateSloppyArgumentElements( inline bool CanAllocateSloppyArgumentElements(
int length, MapRef map, int length, MapRef map,
AllocationType allocation = AllocationType::kYoung); AllocationType allocation = AllocationType::kYoung);
inline void AllocateSloppyArgumentElements( inline void AllocateSloppyArgumentElements(
......
...@@ -1501,13 +1501,15 @@ Node* JSCreateLowering::TryAllocateAliasedArguments( ...@@ -1501,13 +1501,15 @@ Node* JSCreateLowering::TryAllocateAliasedArguments(
MapRef sloppy_arguments_elements_map = MapRef sloppy_arguments_elements_map =
MakeRef(broker(), factory()->sloppy_arguments_elements_map()); MakeRef(broker(), factory()->sloppy_arguments_elements_map());
if (!AllocationBuilder::CanAllocateSloppyArgumentElements( AllocationBuilder ab(jsgraph(), effect, control);
mapped_count, sloppy_arguments_elements_map)) {
if (!ab.CanAllocateSloppyArgumentElements(mapped_count,
sloppy_arguments_elements_map)) {
return nullptr; return nullptr;
} }
MapRef fixed_array_map = MakeRef(broker(), factory()->fixed_array_map()); MapRef fixed_array_map = MakeRef(broker(), factory()->fixed_array_map());
if (!AllocationBuilder::CanAllocateArray(argument_count, fixed_array_map)) { if (!ab.CanAllocateArray(argument_count, fixed_array_map)) {
return nullptr; return nullptr;
} }
...@@ -1520,7 +1522,6 @@ Node* JSCreateLowering::TryAllocateAliasedArguments( ...@@ -1520,7 +1522,6 @@ Node* JSCreateLowering::TryAllocateAliasedArguments(
// The unmapped argument values recorded in the frame state are stored yet // The unmapped argument values recorded in the frame state are stored yet
// another indirection away and then linked into the parameter map below, // another indirection away and then linked into the parameter map below,
// whereas mapped argument values are replaced with a hole instead. // whereas mapped argument values are replaced with a hole instead.
AllocationBuilder ab(jsgraph(), effect, control);
ab.AllocateArray(argument_count, fixed_array_map); ab.AllocateArray(argument_count, fixed_array_map);
for (int i = 0; i < mapped_count; ++i) { for (int i = 0; i < mapped_count; ++i) {
ab.Store(AccessBuilder::ForFixedArrayElement(), jsgraph()->Constant(i), ab.Store(AccessBuilder::ForFixedArrayElement(), jsgraph()->Constant(i),
...@@ -1566,10 +1567,14 @@ Node* JSCreateLowering::TryAllocateAliasedArguments( ...@@ -1566,10 +1567,14 @@ Node* JSCreateLowering::TryAllocateAliasedArguments(
int mapped_count = parameter_count; int mapped_count = parameter_count;
MapRef sloppy_arguments_elements_map = MapRef sloppy_arguments_elements_map =
MakeRef(broker(), factory()->sloppy_arguments_elements_map()); MakeRef(broker(), factory()->sloppy_arguments_elements_map());
if (!AllocationBuilder::CanAllocateSloppyArgumentElements(
mapped_count, sloppy_arguments_elements_map)) { {
AllocationBuilder ab(jsgraph(), effect, control);
if (!ab.CanAllocateSloppyArgumentElements(mapped_count,
sloppy_arguments_elements_map)) {
return nullptr; return nullptr;
} }
}
// From here on we are going to allocate a mapped (aka. aliased) elements // From here on we are going to allocate a mapped (aka. aliased) elements
// backing store. We do not statically know how many arguments exist, but // backing store. We do not statically know how many arguments exist, but
......
...@@ -805,7 +805,8 @@ HeapObject FactoryBase<Impl>::AllocateRawArray(int size, ...@@ -805,7 +805,8 @@ HeapObject FactoryBase<Impl>::AllocateRawArray(int size,
AllocationType allocation) { AllocationType allocation) {
HeapObject result = AllocateRaw(size, allocation); HeapObject result = AllocateRaw(size, allocation);
if (!V8_ENABLE_THIRD_PARTY_HEAP_BOOL && if (!V8_ENABLE_THIRD_PARTY_HEAP_BOOL &&
(size > Heap::MaxRegularHeapObjectSize(allocation)) && (size >
isolate()->heap()->AsHeap()->MaxRegularHeapObjectSize(allocation)) &&
FLAG_use_marking_progress_bar) { FLAG_use_marking_progress_bar) {
BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(result); BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(result);
chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR); chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR);
......
...@@ -409,7 +409,7 @@ MaybeHandle<FixedArray> Factory::TryNewFixedArray( ...@@ -409,7 +409,7 @@ MaybeHandle<FixedArray> Factory::TryNewFixedArray(
AllocationResult allocation = heap->AllocateRaw(size, allocation_type); AllocationResult allocation = heap->AllocateRaw(size, allocation_type);
HeapObject result; HeapObject result;
if (!allocation.To(&result)) return MaybeHandle<FixedArray>(); if (!allocation.To(&result)) return MaybeHandle<FixedArray>();
if ((size > Heap::MaxRegularHeapObjectSize(allocation_type)) && if ((size > heap->MaxRegularHeapObjectSize(allocation_type)) &&
FLAG_use_marking_progress_bar) { FLAG_use_marking_progress_bar) {
BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(result); BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(result);
chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR); chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR);
......
...@@ -24,6 +24,7 @@ ...@@ -24,6 +24,7 @@
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/heap/large-spaces.h" #include "src/heap/large-spaces.h"
#include "src/heap/memory-allocator.h" #include "src/heap/memory-allocator.h"
#include "src/heap/memory-chunk-layout.h"
#include "src/heap/memory-chunk.h" #include "src/heap/memory-chunk.h"
#include "src/heap/new-spaces-inl.h" #include "src/heap/new-spaces-inl.h"
#include "src/heap/paged-spaces-inl.h" #include "src/heap/paged-spaces-inl.h"
...@@ -178,6 +179,16 @@ inline const base::AddressRegion& Heap::code_region() { ...@@ -178,6 +179,16 @@ inline const base::AddressRegion& Heap::code_region() {
#endif #endif
} }
int Heap::MaxRegularHeapObjectSize(AllocationType allocation) {
if (!V8_ENABLE_THIRD_PARTY_HEAP_BOOL &&
(allocation == AllocationType::kCode)) {
DCHECK_EQ(MemoryChunkLayout::MaxRegularCodeObjectSize(),
max_regular_code_object_size_);
return max_regular_code_object_size_;
}
return kMaxRegularHeapObjectSize;
}
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type, AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
AllocationOrigin origin, AllocationOrigin origin,
AllocationAlignment alignment) { AllocationAlignment alignment) {
......
...@@ -59,6 +59,7 @@ ...@@ -59,6 +59,7 @@
#include "src/heap/marking-barrier-inl.h" #include "src/heap/marking-barrier-inl.h"
#include "src/heap/marking-barrier.h" #include "src/heap/marking-barrier.h"
#include "src/heap/memory-chunk-inl.h" #include "src/heap/memory-chunk-inl.h"
#include "src/heap/memory-chunk-layout.h"
#include "src/heap/memory-measurement.h" #include "src/heap/memory-measurement.h"
#include "src/heap/memory-reducer.h" #include "src/heap/memory-reducer.h"
#include "src/heap/object-stats.h" #include "src/heap/object-stats.h"
...@@ -218,6 +219,8 @@ Heap::Heap() ...@@ -218,6 +219,8 @@ Heap::Heap()
// Ensure old_generation_size_ is a multiple of kPageSize. // Ensure old_generation_size_ is a multiple of kPageSize.
DCHECK_EQ(0, max_old_generation_size() & (Page::kPageSize - 1)); DCHECK_EQ(0, max_old_generation_size() & (Page::kPageSize - 1));
max_regular_code_object_size_ = MemoryChunkLayout::MaxRegularCodeObjectSize();
set_native_contexts_list(Smi::zero()); set_native_contexts_list(Smi::zero());
set_allocation_sites_list(Smi::zero()); set_allocation_sites_list(Smi::zero());
set_dirty_js_finalization_registries_list(Smi::zero()); set_dirty_js_finalization_registries_list(Smi::zero());
...@@ -5064,15 +5067,6 @@ bool Heap::AllocationLimitOvershotByLargeMargin() { ...@@ -5064,15 +5067,6 @@ bool Heap::AllocationLimitOvershotByLargeMargin() {
return v8_overshoot >= v8_margin || global_overshoot >= global_margin; return v8_overshoot >= v8_margin || global_overshoot >= global_margin;
} }
// static
int Heap::MaxRegularHeapObjectSize(AllocationType allocation) {
if (!V8_ENABLE_THIRD_PARTY_HEAP_BOOL &&
(allocation == AllocationType::kCode)) {
return MemoryChunkLayout::MaxRegularCodeObjectSize();
}
return kMaxRegularHeapObjectSize;
}
bool Heap::ShouldOptimizeForLoadTime() { bool Heap::ShouldOptimizeForLoadTime() {
return isolate()->rail_mode() == PERFORMANCE_LOAD && return isolate()->rail_mode() == PERFORMANCE_LOAD &&
!AllocationLimitOvershotByLargeMargin() && !AllocationLimitOvershotByLargeMargin() &&
......
...@@ -920,6 +920,7 @@ class Heap { ...@@ -920,6 +920,7 @@ class Heap {
CodeRange* code_range() { return code_range_.get(); } CodeRange* code_range() { return code_range_.get(); }
LocalHeap* main_thread_local_heap() { return main_thread_local_heap_; } LocalHeap* main_thread_local_heap() { return main_thread_local_heap_; }
Heap* AsHeap() { return this; }
// =========================================================================== // ===========================================================================
// Root set access. ========================================================== // Root set access. ==========================================================
...@@ -1459,7 +1460,7 @@ class Heap { ...@@ -1459,7 +1460,7 @@ class Heap {
// which the size of the allocatable space per V8 page may depend on the OS // which the size of the allocatable space per V8 page may depend on the OS
// page size at runtime. You may use kMaxRegularHeapObjectSize as a constant // page size at runtime. You may use kMaxRegularHeapObjectSize as a constant
// instead if you know the allocation isn't in the code spaces. // instead if you know the allocation isn't in the code spaces.
V8_EXPORT_PRIVATE static int MaxRegularHeapObjectSize( inline V8_EXPORT_PRIVATE int MaxRegularHeapObjectSize(
AllocationType allocation); AllocationType allocation);
// =========================================================================== // ===========================================================================
...@@ -2418,6 +2419,8 @@ class Heap { ...@@ -2418,6 +2419,8 @@ class Heap {
bool deserialization_complete_ = false; bool deserialization_complete_ = false;
int max_regular_code_object_size_ = 0;
bool fast_promotion_mode_ = false; bool fast_promotion_mode_ = false;
// Used for testing purposes. // Used for testing purposes.
......
...@@ -34,7 +34,7 @@ AllocationResult LocalHeap::AllocateRaw(int size_in_bytes, AllocationType type, ...@@ -34,7 +34,7 @@ AllocationResult LocalHeap::AllocateRaw(int size_in_bytes, AllocationType type,
// Each allocation is supposed to be a safepoint. // Each allocation is supposed to be a safepoint.
Safepoint(); Safepoint();
bool large_object = size_in_bytes > Heap::MaxRegularHeapObjectSize(type); bool large_object = size_in_bytes > heap_->MaxRegularHeapObjectSize(type);
CHECK_EQ(type, AllocationType::kOld); CHECK_EQ(type, AllocationType::kOld);
if (large_object) if (large_object)
......
...@@ -90,6 +90,7 @@ class V8_EXPORT_PRIVATE LocalHeap { ...@@ -90,6 +90,7 @@ class V8_EXPORT_PRIVATE LocalHeap {
bool IsParked(); bool IsParked();
Heap* heap() { return heap_; } Heap* heap() { return heap_; }
Heap* AsHeap() { return heap(); }
MarkingBarrier* marking_barrier() { return marking_barrier_.get(); } MarkingBarrier* marking_barrier() { return marking_barrier_.get(); }
ConcurrentAllocator* old_space_allocator() { return &old_space_allocator_; } ConcurrentAllocator* old_space_allocator() { return &old_space_allocator_; }
......
...@@ -7123,7 +7123,7 @@ class TestAllocationTracker : public HeapObjectAllocationTracker { ...@@ -7123,7 +7123,7 @@ class TestAllocationTracker : public HeapObjectAllocationTracker {
HEAP_TEST(CodeLargeObjectSpace) { HEAP_TEST(CodeLargeObjectSpace) {
Heap* heap = CcTest::heap(); Heap* heap = CcTest::heap();
int size_in_bytes = int size_in_bytes =
Heap::MaxRegularHeapObjectSize(AllocationType::kCode) + kTaggedSize; heap->MaxRegularHeapObjectSize(AllocationType::kCode) + kTaggedSize;
TestAllocationTracker allocation_tracker{size_in_bytes}; TestAllocationTracker allocation_tracker{size_in_bytes};
heap->AddHeapObjectAllocationTracker(&allocation_tracker); heap->AddHeapObjectAllocationTracker(&allocation_tracker);
...@@ -7157,7 +7157,7 @@ UNINITIALIZED_HEAP_TEST(CodeLargeObjectSpace64k) { ...@@ -7157,7 +7157,7 @@ UNINITIALIZED_HEAP_TEST(CodeLargeObjectSpace64k) {
// Allocate a regular code object. // Allocate a regular code object.
{ {
int size_in_bytes = int size_in_bytes =
Heap::MaxRegularHeapObjectSize(AllocationType::kCode) - kTaggedSize; heap->MaxRegularHeapObjectSize(AllocationType::kCode) - kTaggedSize;
TestAllocationTracker allocation_tracker{size_in_bytes}; TestAllocationTracker allocation_tracker{size_in_bytes};
heap->AddHeapObjectAllocationTracker(&allocation_tracker); heap->AddHeapObjectAllocationTracker(&allocation_tracker);
...@@ -7179,7 +7179,7 @@ UNINITIALIZED_HEAP_TEST(CodeLargeObjectSpace64k) { ...@@ -7179,7 +7179,7 @@ UNINITIALIZED_HEAP_TEST(CodeLargeObjectSpace64k) {
// Allocate a large code object. // Allocate a large code object.
{ {
int size_in_bytes = int size_in_bytes =
Heap::MaxRegularHeapObjectSize(AllocationType::kCode) + kTaggedSize; heap->MaxRegularHeapObjectSize(AllocationType::kCode) + kTaggedSize;
TestAllocationTracker allocation_tracker{size_in_bytes}; TestAllocationTracker allocation_tracker{size_in_bytes};
heap->AddHeapObjectAllocationTracker(&allocation_tracker); heap->AddHeapObjectAllocationTracker(&allocation_tracker);
......
...@@ -5,10 +5,10 @@ ...@@ -5,10 +5,10 @@
#include <memory> #include <memory>
#include "include/v8.h" #include "include/v8.h"
#include "src/codegen/code-desc.h" #include "src/codegen/code-desc.h"
#include "src/execution/isolate.h" #include "src/execution/isolate.h"
#include "src/handles/handles-inl.h" #include "src/handles/handles-inl.h"
#include "src/heap/heap-inl.h"
#include "test/cctest/cctest.h" #include "test/cctest/cctest.h"
namespace v8 { namespace v8 {
...@@ -61,7 +61,7 @@ TEST(Factory_CodeBuilder) { ...@@ -61,7 +61,7 @@ TEST(Factory_CodeBuilder) {
// Create a big function that ends up in CODE_LO_SPACE. // Create a big function that ends up in CODE_LO_SPACE.
const int instruction_size = const int instruction_size =
Heap::MaxRegularHeapObjectSize(AllocationType::kCode) + 1; isolate->heap()->MaxRegularHeapObjectSize(AllocationType::kCode) + 1;
std::unique_ptr<byte[]> instructions(new byte[instruction_size]); std::unique_ptr<byte[]> instructions(new byte[instruction_size]);
CodeDesc desc; CodeDesc desc;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment