Commit 8318fcfb authored by Darius Mercadier's avatar Darius Mercadier Committed by Commit Bot

[heap] Add tracing of allocations origins

Bug: v8:9329
Change-Id: Id92ab58179a5b5765560f22beefef842055d7e28
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1715461
Commit-Queue: Darius Mercadier <dmercadier@google.com>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62950}
parent eb3935f4
...@@ -802,6 +802,10 @@ DEFINE_BOOL(trace_gc_freelists_verbose, false, ...@@ -802,6 +802,10 @@ DEFINE_BOOL(trace_gc_freelists_verbose, false,
DEFINE_IMPLICATION(trace_gc_freelists_verbose, trace_gc_freelists) DEFINE_IMPLICATION(trace_gc_freelists_verbose, trace_gc_freelists)
DEFINE_BOOL(trace_evacuation_candidates, false, DEFINE_BOOL(trace_evacuation_candidates, false,
"Show statistics about the pages evacuation by the compaction") "Show statistics about the pages evacuation by the compaction")
DEFINE_BOOL(
trace_allocations_origins, false,
"Show statistics about the origins of allocations. "
"Combine with --no-inline-new to track allocations from generated code")
DEFINE_INT(gc_freelist_strategy, 0, DEFINE_INT(gc_freelist_strategy, 0,
"Freelist strategy to use: " "Freelist strategy to use: "
"1=FreeListFastAlloc. 2=FreeListMany. Anything else=FreeListLegacy") "1=FreeListFastAlloc. 2=FreeListMany. Anything else=FreeListLegacy")
......
...@@ -285,11 +285,12 @@ HeapObject Factory::New(Handle<Map> map, AllocationType allocation) { ...@@ -285,11 +285,12 @@ HeapObject Factory::New(Handle<Map> map, AllocationType allocation) {
} }
Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align, Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align,
AllocationType allocation) { AllocationType allocation,
AllocationOrigin origin) {
AllocationAlignment alignment = double_align ? kDoubleAligned : kWordAligned; AllocationAlignment alignment = double_align ? kDoubleAligned : kWordAligned;
Heap* heap = isolate()->heap(); Heap* heap = isolate()->heap();
HeapObject result = HeapObject result =
heap->AllocateRawWithRetryOrFail(size, allocation, alignment); heap->AllocateRawWithRetryOrFail(size, allocation, origin, alignment);
heap->CreateFillerObjectAt(result.address(), size, ClearRecordedSlots::kNo); heap->CreateFillerObjectAt(result.address(), size, ClearRecordedSlots::kNo);
return Handle<HeapObject>(result, isolate()); return Handle<HeapObject>(result, isolate());
} }
......
...@@ -521,8 +521,9 @@ class V8_EXPORT_PRIVATE Factory { ...@@ -521,8 +521,9 @@ class V8_EXPORT_PRIVATE Factory {
// Allocate a block of memory of the given AllocationType (filled with a // Allocate a block of memory of the given AllocationType (filled with a
// filler). Used as a fall-back for generated code when the space is full. // filler). Used as a fall-back for generated code when the space is full.
Handle<HeapObject> NewFillerObject(int size, bool double_align, Handle<HeapObject> NewFillerObject(
AllocationType allocation); int size, bool double_align, AllocationType allocation,
AllocationOrigin origin = AllocationOrigin::kRuntime);
Handle<JSObject> NewFunctionPrototype(Handle<JSFunction> function); Handle<JSObject> NewFunctionPrototype(Handle<JSFunction> function);
......
...@@ -391,6 +391,12 @@ void GCTracer::NotifySweepingCompleted() { ...@@ -391,6 +391,12 @@ void GCTracer::NotifySweepingCompleted() {
"FreeLists statistics after sweeping completed:\n"); "FreeLists statistics after sweeping completed:\n");
heap_->PrintFreeListsStats(); heap_->PrintFreeListsStats();
} }
if (FLAG_trace_allocations_origins) {
heap_->new_space()->PrintAllocationsOrigins();
heap_->old_space()->PrintAllocationsOrigins();
heap_->code_space()->PrintAllocationsOrigins();
heap_->map_space()->PrintAllocationsOrigins();
}
} }
void GCTracer::SampleAllocation(double current_ms, void GCTracer::SampleAllocation(double current_ms,
......
...@@ -159,6 +159,7 @@ size_t Heap::NewSpaceAllocationCounter() { ...@@ -159,6 +159,7 @@ size_t Heap::NewSpaceAllocationCounter() {
} }
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type, AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
AllocationOrigin origin,
AllocationAlignment alignment) { AllocationAlignment alignment) {
DCHECK(AllowHandleAllocation::IsAllowed()); DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed()); DCHECK(AllowHeapAllocation::IsAllowed());
...@@ -194,13 +195,13 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type, ...@@ -194,13 +195,13 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
allocation = lo_space_->AllocateRaw(size_in_bytes); allocation = lo_space_->AllocateRaw(size_in_bytes);
} }
} else { } else {
allocation = new_space_->AllocateRaw(size_in_bytes, alignment); allocation = new_space_->AllocateRaw(size_in_bytes, alignment, origin);
} }
} else if (AllocationType::kOld == type) { } else if (AllocationType::kOld == type) {
if (large_object) { if (large_object) {
allocation = lo_space_->AllocateRaw(size_in_bytes); allocation = lo_space_->AllocateRaw(size_in_bytes);
} else { } else {
allocation = old_space_->AllocateRaw(size_in_bytes, alignment); allocation = old_space_->AllocateRaw(size_in_bytes, alignment, origin);
} }
} else if (AllocationType::kCode == type) { } else if (AllocationType::kCode == type) {
if (size_in_bytes <= code_space()->AreaSize() && !large_object) { if (size_in_bytes <= code_space()->AreaSize() && !large_object) {
...@@ -216,7 +217,9 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type, ...@@ -216,7 +217,9 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
#endif #endif
DCHECK(!large_object); DCHECK(!large_object);
DCHECK(CanAllocateInReadOnlySpace()); DCHECK(CanAllocateInReadOnlySpace());
allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment); DCHECK_EQ(AllocationOrigin::kRuntime, origin);
allocation =
read_only_space_->AllocateRaw(size_in_bytes, alignment, origin);
} else { } else {
UNREACHABLE(); UNREACHABLE();
} }
......
...@@ -4875,9 +4875,10 @@ HeapObject Heap::EnsureImmovableCode(HeapObject heap_object, int object_size) { ...@@ -4875,9 +4875,10 @@ HeapObject Heap::EnsureImmovableCode(HeapObject heap_object, int object_size) {
} }
HeapObject Heap::AllocateRawWithLightRetry(int size, AllocationType allocation, HeapObject Heap::AllocateRawWithLightRetry(int size, AllocationType allocation,
AllocationOrigin origin,
AllocationAlignment alignment) { AllocationAlignment alignment) {
HeapObject result; HeapObject result;
AllocationResult alloc = AllocateRaw(size, allocation, alignment); AllocationResult alloc = AllocateRaw(size, allocation, origin, alignment);
if (alloc.To(&result)) { if (alloc.To(&result)) {
DCHECK(result != ReadOnlyRoots(this).exception()); DCHECK(result != ReadOnlyRoots(this).exception());
return result; return result;
...@@ -4886,7 +4887,7 @@ HeapObject Heap::AllocateRawWithLightRetry(int size, AllocationType allocation, ...@@ -4886,7 +4887,7 @@ HeapObject Heap::AllocateRawWithLightRetry(int size, AllocationType allocation,
for (int i = 0; i < 2; i++) { for (int i = 0; i < 2; i++) {
CollectGarbage(alloc.RetrySpace(), CollectGarbage(alloc.RetrySpace(),
GarbageCollectionReason::kAllocationFailure); GarbageCollectionReason::kAllocationFailure);
alloc = AllocateRaw(size, allocation, alignment); alloc = AllocateRaw(size, allocation, origin, alignment);
if (alloc.To(&result)) { if (alloc.To(&result)) {
DCHECK(result != ReadOnlyRoots(this).exception()); DCHECK(result != ReadOnlyRoots(this).exception());
return result; return result;
...@@ -4896,16 +4897,18 @@ HeapObject Heap::AllocateRawWithLightRetry(int size, AllocationType allocation, ...@@ -4896,16 +4897,18 @@ HeapObject Heap::AllocateRawWithLightRetry(int size, AllocationType allocation,
} }
HeapObject Heap::AllocateRawWithRetryOrFail(int size, AllocationType allocation, HeapObject Heap::AllocateRawWithRetryOrFail(int size, AllocationType allocation,
AllocationOrigin origin,
AllocationAlignment alignment) { AllocationAlignment alignment) {
AllocationResult alloc; AllocationResult alloc;
HeapObject result = AllocateRawWithLightRetry(size, allocation, alignment); HeapObject result =
AllocateRawWithLightRetry(size, allocation, origin, alignment);
if (!result.is_null()) return result; if (!result.is_null()) return result;
isolate()->counters()->gc_last_resort_from_handles()->Increment(); isolate()->counters()->gc_last_resort_from_handles()->Increment();
CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort); CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort);
{ {
AlwaysAllocateScope scope(isolate()); AlwaysAllocateScope scope(isolate());
alloc = AllocateRaw(size, allocation, alignment); alloc = AllocateRaw(size, allocation, origin, alignment);
} }
if (alloc.To(&result)) { if (alloc.To(&result)) {
DCHECK(result != ReadOnlyRoots(this).exception()); DCHECK(result != ReadOnlyRoots(this).exception());
......
...@@ -96,6 +96,15 @@ enum class TraceRetainingPathMode { kEnabled, kDisabled }; ...@@ -96,6 +96,15 @@ enum class TraceRetainingPathMode { kEnabled, kDisabled };
enum class RetainingPathOption { kDefault, kTrackEphemeronPath }; enum class RetainingPathOption { kDefault, kTrackEphemeronPath };
enum class AllocationOrigin {
kGeneratedCode = 0,
kRuntime = 1,
kGC = 2,
kFirstAllocationOrigin = kGeneratedCode,
kLastAllocationOrigin = kGC,
kNumberOfAllocationOrigins = kLastAllocationOrigin + 1
};
enum class GarbageCollectionReason { enum class GarbageCollectionReason {
kUnknown = 0, kUnknown = 0,
kAllocationFailure = 1, kAllocationFailure = 1,
...@@ -1729,7 +1738,8 @@ class Heap { ...@@ -1729,7 +1738,8 @@ class Heap {
// inlined allocations, use the Heap::DisableInlineAllocation() support). // inlined allocations, use the Heap::DisableInlineAllocation() support).
V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRaw( V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRaw(
int size_in_bytes, AllocationType allocation, int size_in_bytes, AllocationType allocation,
AllocationAlignment aligment = kWordAligned); AllocationOrigin origin = AllocationOrigin::kRuntime,
AllocationAlignment alignment = kWordAligned);
// This method will try to perform an allocation of a given size of a given // This method will try to perform an allocation of a given size of a given
// AllocationType. If the allocation fails, a regular full garbage collection // AllocationType. If the allocation fails, a regular full garbage collection
...@@ -1737,8 +1747,14 @@ class Heap { ...@@ -1737,8 +1747,14 @@ class Heap {
// times. If after that retry procedure the allocation still fails nullptr is // times. If after that retry procedure the allocation still fails nullptr is
// returned. // returned.
HeapObject AllocateRawWithLightRetry( HeapObject AllocateRawWithLightRetry(
int size, AllocationType allocation, int size, AllocationType allocation, AllocationOrigin origin,
AllocationAlignment alignment = kWordAligned); AllocationAlignment alignment = kWordAligned);
HeapObject AllocateRawWithLightRetry(
int size, AllocationType allocation,
AllocationAlignment alignment = kWordAligned) {
return AllocateRawWithLightRetry(size, allocation,
AllocationOrigin::kRuntime, alignment);
}
// This method will try to perform an allocation of a given size of a given // This method will try to perform an allocation of a given size of a given
// AllocationType. If the allocation fails, a regular full garbage collection // AllocationType. If the allocation fails, a regular full garbage collection
...@@ -1747,8 +1763,15 @@ class Heap { ...@@ -1747,8 +1763,15 @@ class Heap {
// garbage collection is triggered which tries to significantly reduce memory. // garbage collection is triggered which tries to significantly reduce memory.
// If the allocation still fails after that a fatal error is thrown. // If the allocation still fails after that a fatal error is thrown.
HeapObject AllocateRawWithRetryOrFail( HeapObject AllocateRawWithRetryOrFail(
int size, AllocationType allocation, int size, AllocationType allocation, AllocationOrigin origin,
AllocationAlignment alignment = kWordAligned); AllocationAlignment alignment = kWordAligned);
HeapObject AllocateRawWithRetryOrFail(
int size, AllocationType allocation,
AllocationAlignment alignment = kWordAligned) {
return AllocateRawWithRetryOrFail(size, allocation,
AllocationOrigin::kRuntime, alignment);
}
HeapObject AllocateRawCodeInLargeObjectSpace(int size); HeapObject AllocateRawCodeInLargeObjectSpace(int size);
// Allocates a heap object based on the map. // Allocates a heap object based on the map.
......
...@@ -14,16 +14,17 @@ namespace internal { ...@@ -14,16 +14,17 @@ namespace internal {
AllocationResult LocalAllocator::Allocate(AllocationSpace space, AllocationResult LocalAllocator::Allocate(AllocationSpace space,
int object_size, int object_size,
AllocationOrigin origin,
AllocationAlignment alignment) { AllocationAlignment alignment) {
switch (space) { switch (space) {
case NEW_SPACE: case NEW_SPACE:
return AllocateInNewSpace(object_size, alignment); return AllocateInNewSpace(object_size, origin, alignment);
case OLD_SPACE: case OLD_SPACE:
return compaction_spaces_.Get(OLD_SPACE)->AllocateRaw(object_size, return compaction_spaces_.Get(OLD_SPACE)->AllocateRaw(object_size,
alignment); alignment, origin);
case CODE_SPACE: case CODE_SPACE:
return compaction_spaces_.Get(CODE_SPACE) return compaction_spaces_.Get(CODE_SPACE)
->AllocateRaw(object_size, alignment); ->AllocateRaw(object_size, alignment, origin);
default: default:
UNREACHABLE(); UNREACHABLE();
} }
...@@ -94,9 +95,9 @@ bool LocalAllocator::NewLocalAllocationBuffer() { ...@@ -94,9 +95,9 @@ bool LocalAllocator::NewLocalAllocationBuffer() {
} }
AllocationResult LocalAllocator::AllocateInNewSpace( AllocationResult LocalAllocator::AllocateInNewSpace(
int object_size, AllocationAlignment alignment) { int object_size, AllocationOrigin origin, AllocationAlignment alignment) {
if (object_size > kMaxLabObjectSize) { if (object_size > kMaxLabObjectSize) {
return new_space_->AllocateRawSynchronized(object_size, alignment); return new_space_->AllocateRawSynchronized(object_size, alignment, origin);
} }
return AllocateInLAB(object_size, alignment); return AllocateInLAB(object_size, alignment);
} }
......
...@@ -42,12 +42,14 @@ class LocalAllocator { ...@@ -42,12 +42,14 @@ class LocalAllocator {
} }
inline AllocationResult Allocate(AllocationSpace space, int object_size, inline AllocationResult Allocate(AllocationSpace space, int object_size,
AllocationOrigin origin,
AllocationAlignment alignment); AllocationAlignment alignment);
inline void FreeLast(AllocationSpace space, HeapObject object, inline void FreeLast(AllocationSpace space, HeapObject object,
int object_size); int object_size);
private: private:
inline AllocationResult AllocateInNewSpace(int object_size, inline AllocationResult AllocateInNewSpace(int object_size,
AllocationOrigin origin,
AllocationAlignment alignment); AllocationAlignment alignment);
inline bool NewLocalAllocationBuffer(); inline bool NewLocalAllocationBuffer();
inline AllocationResult AllocateInLAB(int object_size, inline AllocationResult AllocateInLAB(int object_size,
......
...@@ -1291,8 +1291,8 @@ class EvacuateVisitorBase : public HeapObjectVisitor { ...@@ -1291,8 +1291,8 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
if (AbortCompactionForTesting(object)) return false; if (AbortCompactionForTesting(object)) return false;
#endif // VERIFY_HEAP #endif // VERIFY_HEAP
AllocationAlignment alignment = HeapObject::RequiredAlignment(object.map()); AllocationAlignment alignment = HeapObject::RequiredAlignment(object.map());
AllocationResult allocation = AllocationResult allocation = local_allocator_->Allocate(
local_allocator_->Allocate(target_space, size, alignment); target_space, size, AllocationOrigin::kGC, alignment);
if (allocation.To(target_object)) { if (allocation.To(target_object)) {
MigrateObject(*target_object, object, size, target_space); MigrateObject(*target_object, object, size, target_space);
if (target_space == CODE_SPACE) if (target_space == CODE_SPACE)
...@@ -1398,8 +1398,8 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { ...@@ -1398,8 +1398,8 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
AllocationAlignment alignment = AllocationAlignment alignment =
HeapObject::RequiredAlignment(old_object.map()); HeapObject::RequiredAlignment(old_object.map());
AllocationSpace space_allocated_in = NEW_SPACE; AllocationSpace space_allocated_in = NEW_SPACE;
AllocationResult allocation = AllocationResult allocation = local_allocator_->Allocate(
local_allocator_->Allocate(NEW_SPACE, size, alignment); NEW_SPACE, size, AllocationOrigin::kGC, alignment);
if (allocation.IsRetry()) { if (allocation.IsRetry()) {
allocation = AllocateInOldSpace(size, alignment); allocation = AllocateInOldSpace(size, alignment);
space_allocated_in = OLD_SPACE; space_allocated_in = OLD_SPACE;
...@@ -1412,8 +1412,8 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { ...@@ -1412,8 +1412,8 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
inline AllocationResult AllocateInOldSpace(int size_in_bytes, inline AllocationResult AllocateInOldSpace(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment) {
AllocationResult allocation = AllocationResult allocation = local_allocator_->Allocate(
local_allocator_->Allocate(OLD_SPACE, size_in_bytes, alignment); OLD_SPACE, size_in_bytes, AllocationOrigin::kGC, alignment);
if (allocation.IsRetry()) { if (allocation.IsRetry()) {
heap_->FatalProcessOutOfMemory( heap_->FatalProcessOutOfMemory(
"MarkCompactCollector: semi-space copy, fallback in old gen"); "MarkCompactCollector: semi-space copy, fallback in old gen");
......
...@@ -135,8 +135,8 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject( ...@@ -135,8 +135,8 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject(
"Only FullHeapObjectSlot and HeapObjectSlot are expected here"); "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
DCHECK(heap()->AllowedToBeMigrated(map, object, NEW_SPACE)); DCHECK(heap()->AllowedToBeMigrated(map, object, NEW_SPACE));
AllocationAlignment alignment = HeapObject::RequiredAlignment(map); AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
AllocationResult allocation = AllocationResult allocation = allocator_.Allocate(
allocator_.Allocate(NEW_SPACE, object_size, alignment); NEW_SPACE, object_size, AllocationOrigin::kGC, alignment);
HeapObject target; HeapObject target;
if (allocation.To(&target)) { if (allocation.To(&target)) {
...@@ -171,8 +171,8 @@ CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot, ...@@ -171,8 +171,8 @@ CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot,
std::is_same<THeapObjectSlot, HeapObjectSlot>::value, std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here"); "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
AllocationAlignment alignment = HeapObject::RequiredAlignment(map); AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
AllocationResult allocation = AllocationResult allocation = allocator_.Allocate(
allocator_.Allocate(OLD_SPACE, object_size, alignment); OLD_SPACE, object_size, AllocationOrigin::kGC, alignment);
HeapObject target; HeapObject target;
if (allocation.To(&target)) { if (allocation.To(&target)) {
......
...@@ -371,7 +371,8 @@ HeapObject PagedSpace::TryAllocateLinearlyAligned( ...@@ -371,7 +371,8 @@ HeapObject PagedSpace::TryAllocateLinearlyAligned(
return HeapObject::FromAddress(current_top); return HeapObject::FromAddress(current_top);
} }
AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) { AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes,
AllocationOrigin origin) {
DCHECK_IMPLIES(identity() == RO_SPACE, !IsDetached()); DCHECK_IMPLIES(identity() == RO_SPACE, !IsDetached());
if (!EnsureLinearAllocationArea(size_in_bytes)) { if (!EnsureLinearAllocationArea(size_in_bytes)) {
return AllocationResult::Retry(identity()); return AllocationResult::Retry(identity());
...@@ -379,11 +380,17 @@ AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) { ...@@ -379,11 +380,17 @@ AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes) {
HeapObject object = AllocateLinearly(size_in_bytes); HeapObject object = AllocateLinearly(size_in_bytes);
DCHECK(!object.is_null()); DCHECK(!object.is_null());
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes); MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return object; return object;
} }
AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment,
AllocationOrigin origin) {
DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE); DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
DCHECK_IMPLIES(identity() == RO_SPACE, !IsDetached()); DCHECK_IMPLIES(identity() == RO_SPACE, !IsDetached());
int allocation_size = size_in_bytes; int allocation_size = size_in_bytes;
...@@ -401,12 +408,17 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, ...@@ -401,12 +408,17 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
DCHECK(!object.is_null()); DCHECK(!object.is_null());
} }
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes); MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return object; return object;
} }
AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment,
AllocationOrigin origin) {
if (top_on_previous_step_ && top() < top_on_previous_step_ && if (top_on_previous_step_ && top() < top_on_previous_step_ &&
SupportsInlineAllocation()) { SupportsInlineAllocation()) {
// Generated code decreased the top() pointer to do folded allocations. // Generated code decreased the top() pointer to do folded allocations.
...@@ -421,11 +433,12 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, ...@@ -421,11 +433,12 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0); DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
#ifdef V8_HOST_ARCH_32_BIT #ifdef V8_HOST_ARCH_32_BIT
AllocationResult result = alignment != kWordAligned AllocationResult result =
? AllocateRawAligned(size_in_bytes, alignment) alignment != kWordAligned
: AllocateRawUnaligned(size_in_bytes); ? AllocateRawAligned(size_in_bytes, alignment, origin)
: AllocateRawUnaligned(size_in_bytes, origin);
#else #else
AllocationResult result = AllocateRawUnaligned(size_in_bytes); AllocationResult result = AllocateRawUnaligned(size_in_bytes, origin);
#endif #endif
HeapObject heap_obj; HeapObject heap_obj;
if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) { if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
...@@ -439,13 +452,12 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, ...@@ -439,13 +452,12 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
return result; return result;
} }
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// NewSpace // NewSpace
AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment,
AllocationOrigin origin) {
Address top = allocation_info_.top(); Address top = allocation_info_.top();
int filler_size = Heap::GetFillToAlign(top, alignment); int filler_size = Heap::GetFillToAlign(top, alignment);
int aligned_size_in_bytes = size_in_bytes + filler_size; int aligned_size_in_bytes = size_in_bytes + filler_size;
...@@ -472,11 +484,15 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, ...@@ -472,11 +484,15 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes); MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return obj; return obj;
} }
AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes,
AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) { AllocationOrigin origin) {
Address top = allocation_info_.top(); Address top = allocation_info_.top();
if (allocation_info_.limit() < top + size_in_bytes) { if (allocation_info_.limit() < top + size_in_bytes) {
// See if we can create room. // See if we can create room.
...@@ -493,12 +509,16 @@ AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) { ...@@ -493,12 +509,16 @@ AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes); MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return obj; return obj;
} }
AllocationResult NewSpace::AllocateRaw(int size_in_bytes, AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment,
AllocationOrigin origin) {
if (top() < top_on_previous_step_) { if (top() < top_on_previous_step_) {
// Generated code decreased the top() pointer to do folded allocations // Generated code decreased the top() pointer to do folded allocations
DCHECK_EQ(Page::FromAllocationAreaAddress(top()), DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
...@@ -507,8 +527,8 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes, ...@@ -507,8 +527,8 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
} }
#ifdef V8_HOST_ARCH_32_BIT #ifdef V8_HOST_ARCH_32_BIT
return alignment != kWordAligned return alignment != kWordAligned
? AllocateRawAligned(size_in_bytes, alignment) ? AllocateRawAligned(size_in_bytes, alignment, origin)
: AllocateRawUnaligned(size_in_bytes); : AllocateRawUnaligned(size_in_bytes, origin);
#else #else
#ifdef V8_COMPRESS_POINTERS #ifdef V8_COMPRESS_POINTERS
// TODO(ishell, v8:8875): Consider using aligned allocations once the // TODO(ishell, v8:8875): Consider using aligned allocations once the
...@@ -516,14 +536,14 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes, ...@@ -516,14 +536,14 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
// unaligned access since both x64 and arm64 architectures (where pointer // unaligned access since both x64 and arm64 architectures (where pointer
// compression is supported) allow unaligned access to doubles and full words. // compression is supported) allow unaligned access to doubles and full words.
#endif // V8_COMPRESS_POINTERS #endif // V8_COMPRESS_POINTERS
return AllocateRawUnaligned(size_in_bytes); return AllocateRawUnaligned(size_in_bytes, origin);
#endif #endif
} }
V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized( V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
int size_in_bytes, AllocationAlignment alignment) { int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) {
base::MutexGuard guard(&mutex_); base::MutexGuard guard(&mutex_);
return AllocateRaw(size_in_bytes, alignment); return AllocateRaw(size_in_bytes, alignment, origin);
} }
LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap, LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
......
...@@ -1641,6 +1641,11 @@ void PagedSpace::MergeCompactionSpace(CompactionSpace* other) { ...@@ -1641,6 +1641,11 @@ void PagedSpace::MergeCompactionSpace(CompactionSpace* other) {
// area_size_ // area_size_
other->FreeLinearAllocationArea(); other->FreeLinearAllocationArea();
for (int i = static_cast<int>(AllocationOrigin::kFirstAllocationOrigin);
i <= static_cast<int>(AllocationOrigin::kLastAllocationOrigin); i++) {
allocations_origins_[i] += other->allocations_origins_[i];
}
// The linear allocation area of {other} should be destroyed now. // The linear allocation area of {other} should be destroyed now.
DCHECK_EQ(kNullAddress, other->top()); DCHECK_EQ(kNullAddress, other->top());
DCHECK_EQ(kNullAddress, other->limit()); DCHECK_EQ(kNullAddress, other->limit());
...@@ -1842,6 +1847,20 @@ Address SpaceWithLinearArea::ComputeLimit(Address start, Address end, ...@@ -1842,6 +1847,20 @@ Address SpaceWithLinearArea::ComputeLimit(Address start, Address end,
} }
} }
void SpaceWithLinearArea::UpdateAllocationOrigins(AllocationOrigin origin) {
DCHECK(!((origin != AllocationOrigin::kGC) &&
(heap()->isolate()->current_vm_state() == GC)));
allocations_origins_[static_cast<int>(origin)]++;
}
void SpaceWithLinearArea::PrintAllocationsOrigins() {
PrintIsolate(
heap()->isolate(),
"Allocations Origins for %s: GeneratedCode:%zu - Runtime:%zu - GC:%zu\n",
name(), allocations_origins_[0], allocations_origins_[1],
allocations_origins_[2]);
}
void PagedSpace::MarkLinearAllocationAreaBlack() { void PagedSpace::MarkLinearAllocationAreaBlack() {
DCHECK(heap()->incremental_marking()->black_allocation()); DCHECK(heap()->incremental_marking()->black_allocation());
Address current_top = top(); Address current_top = top();
......
...@@ -2103,6 +2103,10 @@ class SpaceWithLinearArea : public Space { ...@@ -2103,6 +2103,10 @@ class SpaceWithLinearArea : public Space {
V8_EXPORT_PRIVATE virtual void UpdateInlineAllocationLimit( V8_EXPORT_PRIVATE virtual void UpdateInlineAllocationLimit(
size_t min_size) = 0; size_t min_size) = 0;
V8_EXPORT_PRIVATE void UpdateAllocationOrigins(AllocationOrigin origin);
void PrintAllocationsOrigins();
protected: protected:
// If we are doing inline allocation in steps, this method performs the 'step' // If we are doing inline allocation in steps, this method performs the 'step'
// operation. top is the memory address of the bump pointer at the last // operation. top is the memory address of the bump pointer at the last
...@@ -2120,6 +2124,9 @@ class SpaceWithLinearArea : public Space { ...@@ -2120,6 +2124,9 @@ class SpaceWithLinearArea : public Space {
// TODO(ofrobots): make these private after refactoring is complete. // TODO(ofrobots): make these private after refactoring is complete.
LinearAllocationArea allocation_info_; LinearAllocationArea allocation_info_;
Address top_on_previous_step_; Address top_on_previous_step_;
size_t allocations_origins_[static_cast<int>(
AllocationOrigin::kNumberOfAllocationOrigins)] = {0};
}; };
class V8_EXPORT_PRIVATE PagedSpace class V8_EXPORT_PRIVATE PagedSpace
...@@ -2185,17 +2192,19 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -2185,17 +2192,19 @@ class V8_EXPORT_PRIVATE PagedSpace
// Allocate the requested number of bytes in the space if possible, return a // Allocate the requested number of bytes in the space if possible, return a
// failure object if not. // failure object if not.
V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRawUnaligned( V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRawUnaligned(
int size_in_bytes); int size_in_bytes, AllocationOrigin origin = AllocationOrigin::kRuntime);
// Allocate the requested number of bytes in the space double aligned if // Allocate the requested number of bytes in the space double aligned if
// possible, return a failure object if not. // possible, return a failure object if not.
V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRawAligned( V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRawAligned(
int size_in_bytes, AllocationAlignment alignment); int size_in_bytes, AllocationAlignment alignment,
AllocationOrigin origin = AllocationOrigin::kRuntime);
// Allocate the requested number of bytes in the space and consider allocation // Allocate the requested number of bytes in the space and consider allocation
// alignment if needed. // alignment if needed.
V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRaw( V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRaw(
int size_in_bytes, AllocationAlignment alignment); int size_in_bytes, AllocationAlignment alignment,
AllocationOrigin origin = AllocationOrigin::kRuntime);
size_t Free(Address start, size_t size_in_bytes, SpaceAccountingMode mode) { size_t Free(Address start, size_t size_in_bytes, SpaceAccountingMode mode) {
if (size_in_bytes == 0) return 0; if (size_in_bytes == 0) return 0;
...@@ -2768,16 +2777,19 @@ class V8_EXPORT_PRIVATE NewSpace ...@@ -2768,16 +2777,19 @@ class V8_EXPORT_PRIVATE NewSpace
void set_age_mark(Address mark) { to_space_.set_age_mark(mark); } void set_age_mark(Address mark) { to_space_.set_age_mark(mark); }
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult
AllocateRawAligned(int size_in_bytes, AllocationAlignment alignment); AllocateRawAligned(int size_in_bytes, AllocationAlignment alignment,
AllocationOrigin origin = AllocationOrigin::kRuntime);
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult AllocateRawUnaligned(
AllocateRawUnaligned(int size_in_bytes); int size_in_bytes, AllocationOrigin origin = AllocationOrigin::kRuntime);
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult
AllocateRaw(int size_in_bytes, AllocationAlignment alignment); AllocateRaw(int size_in_bytes, AllocationAlignment alignment,
AllocationOrigin origin = AllocationOrigin::kRuntime);
V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRawSynchronized( V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRawSynchronized(
int size_in_bytes, AllocationAlignment alignment); int size_in_bytes, AllocationAlignment alignment,
AllocationOrigin origin = AllocationOrigin::kRuntime);
// Reset the allocation pointer to the beginning of the active semispace. // Reset the allocation pointer to the beginning of the active semispace.
void ResetLinearAllocationArea(); void ResetLinearAllocationArea();
......
...@@ -322,7 +322,8 @@ RUNTIME_FUNCTION(Runtime_AllocateInYoungGeneration) { ...@@ -322,7 +322,8 @@ RUNTIME_FUNCTION(Runtime_AllocateInYoungGeneration) {
double_align = false; double_align = false;
return *isolate->factory()->NewFillerObject(size, double_align, return *isolate->factory()->NewFillerObject(size, double_align,
AllocationType::kYoung); AllocationType::kYoung,
AllocationOrigin::kGeneratedCode);
} }
RUNTIME_FUNCTION(Runtime_AllocateInOldGeneration) { RUNTIME_FUNCTION(Runtime_AllocateInOldGeneration) {
...@@ -339,7 +340,8 @@ RUNTIME_FUNCTION(Runtime_AllocateInOldGeneration) { ...@@ -339,7 +340,8 @@ RUNTIME_FUNCTION(Runtime_AllocateInOldGeneration) {
CHECK(size <= kMaxRegularHeapObjectSize); CHECK(size <= kMaxRegularHeapObjectSize);
} }
return *isolate->factory()->NewFillerObject(size, double_align, return *isolate->factory()->NewFillerObject(size, double_align,
AllocationType::kOld); AllocationType::kOld,
AllocationOrigin::kGeneratedCode);
} }
RUNTIME_FUNCTION(Runtime_AllocateByteArray) { RUNTIME_FUNCTION(Runtime_AllocateByteArray) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment