Commit 3d31f991 authored by JianxiaoLuIntel's avatar JianxiaoLuIntel Committed by V8 LUCI CQ

[heap] Optimize OnMoveEvent checking

The LeftTrimFixedArray will call OnMoveEvent every time. Even though when the profiling is not enabled in user mode, it still need to do some check, and the function call itself has certain overhead. This patch aims to remove the unnecessary check. We only need to check it when the logging status changes.

Change-Id: I0e957860616a18415398f7753ed21caab5a4361f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3751964Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Commit-Queue: Jianxiao Lu <jianxiao.lu@intel.com>
Reviewed-by: 's avatarToon Verwaest <verwaest@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81794}
parent 82a876b0
......@@ -3406,12 +3406,15 @@ void Isolate::ClearSerializerData() {
external_reference_map_ = nullptr;
}
bool Isolate::LogObjectRelocation() {
return FLAG_verify_predictable || v8_file_logger()->is_logging() ||
is_profiling() || v8_file_logger()->is_listening_to_code_events() ||
(heap_profiler() != nullptr &&
heap_profiler()->is_tracking_object_moves()) ||
heap()->has_heap_object_allocation_tracker();
// When profiling status changes, call this function to update the single bool
// cache.
void Isolate::UpdateLogObjectRelocation() {
log_object_relocation_ = FLAG_verify_predictable ||
v8_file_logger()->is_logging() || is_profiling() ||
v8_file_logger()->is_listening_to_code_events() ||
(heap_profiler() != nullptr &&
heap_profiler()->is_tracking_object_moves()) ||
heap()->has_heap_object_allocation_tracker();
}
void Isolate::Deinit() {
......
......@@ -694,7 +694,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
void ClearSerializerData();
bool LogObjectRelocation();
void UpdateLogObjectRelocation();
// Initializes the current thread to run this Isolate.
// Not thread-safe. Multiple threads should not Enter/Exit the same isolate
......@@ -1359,6 +1359,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
CollectSourcePositionsForAllBytecodeArrays();
}
is_profiling_.store(enabled, std::memory_order_relaxed);
UpdateLogObjectRelocation();
}
Logger* logger() const { return logger_; }
......@@ -2007,6 +2008,9 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
GlobalSafepoint* global_safepoint() const { return global_safepoint_.get(); }
bool owns_shareable_data() { return owns_shareable_data_; }
bool log_object_relocation() const { return log_object_relocation_; }
// TODO(pthier): Unify with owns_shareable_data() once the flag
// --shared-string-table is removed.
bool OwnsStringTables() { return !FLAG_shared_string_table || is_shared(); }
......@@ -2278,6 +2282,8 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
// Only false for client isolates attached to a shared isolate.
bool owns_shareable_data_ = true;
bool log_object_relocation_ = false;
#ifdef V8_EXTERNAL_CODE_SPACE
// Base address of the pointer compression cage containing external code
// space, when external code space is enabled.
......
......@@ -829,6 +829,9 @@ void Heap::AddHeapObjectAllocationTracker(
DisableInlineAllocation();
}
allocation_trackers_.push_back(tracker);
if (allocation_trackers_.size() == 1) {
isolate_->UpdateLogObjectRelocation();
}
}
void Heap::RemoveHeapObjectAllocationTracker(
......@@ -836,6 +839,9 @@ void Heap::RemoveHeapObjectAllocationTracker(
allocation_trackers_.erase(std::remove(allocation_trackers_.begin(),
allocation_trackers_.end(), tracker),
allocation_trackers_.end());
if (allocation_trackers_.empty()) {
isolate_->UpdateLogObjectRelocation();
}
if (allocation_trackers_.empty() && FLAG_inline_new) {
EnableInlineAllocation();
}
......@@ -3485,8 +3491,10 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
FixedArrayBase new_object =
FixedArrayBase::cast(HeapObject::FromAddress(new_start));
// Notify the heap profiler of change in object layout.
OnMoveEvent(new_object, object, new_object.Size());
if (isolate()->log_object_relocation()) {
// Notify the heap profiler of change in object layout.
OnMoveEvent(new_object, object, new_object.Size());
}
#ifdef ENABLE_SLOW_DCHECKS
if (FLAG_enable_slow_asserts) {
......
......@@ -4182,7 +4182,7 @@ size_t CreateAndExecuteEvacuationTasks(
std::vector<std::pair<ParallelWorkItem, MemoryChunk*>> evacuation_items,
MigrationObserver* migration_observer) {
base::Optional<ProfilingMigrationObserver> profiling_observer;
if (collector->isolate()->LogObjectRelocation()) {
if (collector->isolate()->log_object_relocation()) {
profiling_observer.emplace(collector->heap());
}
std::vector<std::unique_ptr<v8::internal::Evacuator>> evacuators;
......
......@@ -329,7 +329,7 @@ void ScavengerCollector::CollectGarbage() {
filter_scope.FilterOldSpaceSweepingPages(
[](Page* page) { return !page->ContainsSlots<OLD_TO_NEW>(); });
const bool is_logging = isolate_->LogObjectRelocation();
const bool is_logging = isolate_->log_object_relocation();
for (int i = 0; i < num_scavenge_tasks; ++i) {
scavengers.emplace_back(
new Scavenger(this, heap_, is_logging, &empty_chunks, &copied_list,
......
......@@ -2153,6 +2153,7 @@ void V8FileLogger::SetCodeEventHandler(uint32_t options,
if (jit_logger_) {
RemoveLogEventListener(jit_logger_.get());
jit_logger_.reset();
isolate_->UpdateLogObjectRelocation();
}
if (event_handler) {
......@@ -2160,6 +2161,7 @@ void V8FileLogger::SetCodeEventHandler(uint32_t options,
wasm::GetWasmEngine()->EnableCodeLogging(isolate_);
#endif // V8_ENABLE_WEBASSEMBLY
jit_logger_ = std::make_unique<JitLogger>(isolate_, event_handler);
isolate_->UpdateLogObjectRelocation();
AddLogEventListener(jit_logger_.get());
if (options & kJitCodeEventEnumExisting) {
HandleScope scope(isolate_);
......@@ -2211,6 +2213,7 @@ FILE* V8FileLogger::TearDownAndGetLogFile() {
if (jit_logger_) {
RemoveLogEventListener(jit_logger_.get());
jit_logger_.reset();
isolate_->UpdateLogObjectRelocation();
}
return log_->Close();
......@@ -2224,6 +2227,7 @@ void V8FileLogger::UpdateIsLogging(bool value) {
// Relaxed atomic to avoid locking the mutex for the most common case: when
// logging is disabled.
is_logging_.store(value, std::memory_order_relaxed);
isolate_->UpdateLogObjectRelocation();
}
void ExistingCodeLogger::LogCodeObject(Object object) {
......
......@@ -102,6 +102,7 @@ HeapSnapshot* HeapProfiler::TakeSnapshot(
}
ids_->RemoveDeadEntries();
is_tracking_object_moves_ = true;
heap()->isolate()->UpdateLogObjectRelocation();
is_taking_snapshot_ = false;
heap()->isolate()->debug()->feature_tracker()->Track(
......@@ -140,6 +141,7 @@ v8::AllocationProfile* HeapProfiler::GetAllocationProfile() {
void HeapProfiler::StartHeapObjectsTracking(bool track_allocations) {
ids_->UpdateHeapObjectsMap();
is_tracking_object_moves_ = true;
heap()->isolate()->UpdateLogObjectRelocation();
DCHECK(!allocation_tracker_);
if (track_allocations) {
allocation_tracker_.reset(new AllocationTracker(ids_.get(), names_.get()));
......@@ -231,7 +233,10 @@ Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
void HeapProfiler::ClearHeapObjectMap() {
ids_.reset(new HeapObjectsMap(heap()));
if (!allocation_tracker_) is_tracking_object_moves_ = false;
if (!allocation_tracker_) {
is_tracking_object_moves_ = false;
heap()->isolate()->UpdateLogObjectRelocation();
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment