Commit f124b28d authored by Hannes Payer's avatar Hannes Payer Committed by V8 LUCI CQ

[heap] Only start incremental marking when V8 is not in GC VM state.

Bug: v8:12503
Change-Id: Icda291d9770c46c7fee3c70dd4df97f320b1956a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3398113Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79623}
parent 6819f75d
...@@ -1326,9 +1326,15 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { ...@@ -1326,9 +1326,15 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope) THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
THREAD_LOCAL_TOP_ACCESSOR(EmbedderState*, current_embedder_state) THREAD_LOCAL_TOP_ACCESSOR(EmbedderState*, current_embedder_state)
inline void set_current_vm_state(StateTag state_tag) {
current_vm_state_.store(state_tag, std::memory_order_relaxed);
}
inline StateTag current_vm_state() const {
return current_vm_state_.load(std::memory_order_relaxed);
}
void SetData(uint32_t slot, void* data) { void SetData(uint32_t slot, void* data) {
DCHECK_LT(slot, Internals::kNumIsolateDataSlots); DCHECK_LT(slot, Internals::kNumIsolateDataSlots);
isolate_data_.embedder_data_[slot] = data; isolate_data_.embedder_data_[slot] = data;
...@@ -2072,6 +2078,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { ...@@ -2072,6 +2078,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
ReadOnlyHeap* read_only_heap_ = nullptr; ReadOnlyHeap* read_only_heap_ = nullptr;
std::shared_ptr<ReadOnlyArtifacts> artifacts_; std::shared_ptr<ReadOnlyArtifacts> artifacts_;
std::shared_ptr<StringTable> string_table_; std::shared_ptr<StringTable> string_table_;
std::atomic<StateTag> current_vm_state_{EXTERNAL};
const int id_; const int id_;
EntryStackItem* entry_stack_ = nullptr; EntryStackItem* entry_stack_ = nullptr;
......
...@@ -30,7 +30,6 @@ void ThreadLocalTop::Clear() { ...@@ -30,7 +30,6 @@ void ThreadLocalTop::Clear() {
simulator_ = nullptr; simulator_ = nullptr;
js_entry_sp_ = kNullAddress; js_entry_sp_ = kNullAddress;
external_callback_scope_ = nullptr; external_callback_scope_ = nullptr;
current_vm_state_ = EXTERNAL;
current_embedder_state_ = nullptr; current_embedder_state_ = nullptr;
failed_access_check_callback_ = nullptr; failed_access_check_callback_ = nullptr;
thread_in_wasm_flag_address_ = kNullAddress; thread_in_wasm_flag_address_ = kNullAddress;
......
...@@ -34,9 +34,9 @@ class ThreadLocalTop { ...@@ -34,9 +34,9 @@ class ThreadLocalTop {
// refactor this to really consist of just Addresses and 32-bit // refactor this to really consist of just Addresses and 32-bit
// integer fields. // integer fields.
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING #ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
static constexpr uint32_t kSizeInBytes = 26 * kSystemPointerSize;
#else
static constexpr uint32_t kSizeInBytes = 25 * kSystemPointerSize; static constexpr uint32_t kSizeInBytes = 25 * kSystemPointerSize;
#else
static constexpr uint32_t kSizeInBytes = 24 * kSystemPointerSize;
#endif #endif
// Does early low-level initialization that does not depend on the // Does early low-level initialization that does not depend on the
...@@ -146,7 +146,6 @@ class ThreadLocalTop { ...@@ -146,7 +146,6 @@ class ThreadLocalTop {
Address js_entry_sp_; Address js_entry_sp_;
// The external callback we're currently in. // The external callback we're currently in.
ExternalCallbackScope* external_callback_scope_; ExternalCallbackScope* external_callback_scope_;
StateTag current_vm_state_;
EmbedderState* current_embedder_state_; EmbedderState* current_embedder_state_;
// Call back function to report unsafe JS accesses. // Call back function to report unsafe JS accesses.
......
...@@ -1698,11 +1698,10 @@ void Heap::ReportExternalMemoryPressure() { ...@@ -1698,11 +1698,10 @@ void Heap::ReportExternalMemoryPressure() {
return; return;
} }
if (incremental_marking()->IsStopped()) { if (incremental_marking()->IsStopped()) {
if (incremental_marking()->CanBeActivated()) { if (!StartIncrementalMarking(
StartIncrementalMarking(GCFlagsForIncrementalMarking(), GCFlagsForIncrementalMarking(),
GarbageCollectionReason::kExternalMemoryPressure, GarbageCollectionReason::kExternalMemoryPressure,
kGCCallbackFlagsForExternalMemory); kGCCallbackFlagsForExternalMemory)) {
} else {
CollectAllGarbage(i::Heap::kNoGCFlags, CollectAllGarbage(i::Heap::kNoGCFlags,
GarbageCollectionReason::kExternalMemoryPressure, GarbageCollectionReason::kExternalMemoryPressure,
kGCCallbackFlagsForExternalMemory); kGCCallbackFlagsForExternalMemory);
...@@ -1986,10 +1985,11 @@ int Heap::NotifyContextDisposed(bool dependant_context) { ...@@ -1986,10 +1985,11 @@ int Heap::NotifyContextDisposed(bool dependant_context) {
return ++contexts_disposed_; return ++contexts_disposed_;
} }
void Heap::StartIncrementalMarking(int gc_flags, bool Heap::StartIncrementalMarking(int gc_flags,
GarbageCollectionReason gc_reason, GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags) { GCCallbackFlags gc_callback_flags) {
DCHECK(incremental_marking()->IsStopped()); DCHECK(incremental_marking()->IsStopped());
if (!incremental_marking()->CanBeActivated()) return false;
// Sweeping needs to be completed such that markbits are all cleared before // Sweeping needs to be completed such that markbits are all cleared before
// starting marking again. // starting marking again.
...@@ -2014,6 +2014,7 @@ void Heap::StartIncrementalMarking(int gc_flags, ...@@ -2014,6 +2014,7 @@ void Heap::StartIncrementalMarking(int gc_flags,
set_current_gc_flags(gc_flags); set_current_gc_flags(gc_flags);
current_gc_callback_flags_ = gc_callback_flags; current_gc_callback_flags_ = gc_callback_flags;
incremental_marking()->Start(gc_reason); incremental_marking()->Start(gc_reason);
return true;
} }
void Heap::CompleteSweepingFull() { void Heap::CompleteSweepingFull() {
...@@ -4168,7 +4169,7 @@ void Heap::CheckMemoryPressure() { ...@@ -4168,7 +4169,7 @@ void Heap::CheckMemoryPressure() {
TRACE_EVENT0("devtools.timeline,v8", "V8.CheckMemoryPressure"); TRACE_EVENT0("devtools.timeline,v8", "V8.CheckMemoryPressure");
CollectGarbageOnMemoryPressure(); CollectGarbageOnMemoryPressure();
} else if (memory_pressure_level == MemoryPressureLevel::kModerate) { } else if (memory_pressure_level == MemoryPressureLevel::kModerate) {
if (FLAG_incremental_marking && incremental_marking()->IsStopped()) { if (incremental_marking()->IsStopped()) {
TRACE_EVENT0("devtools.timeline,v8", "V8.CheckMemoryPressure"); TRACE_EVENT0("devtools.timeline,v8", "V8.CheckMemoryPressure");
StartIncrementalMarking(kReduceMemoryFootprintMask, StartIncrementalMarking(kReduceMemoryFootprintMask,
GarbageCollectionReason::kMemoryPressure); GarbageCollectionReason::kMemoryPressure);
......
...@@ -1064,7 +1064,9 @@ class Heap { ...@@ -1064,7 +1064,9 @@ class Heap {
// Starts incremental marking assuming incremental marking is currently // Starts incremental marking assuming incremental marking is currently
// stopped. // stopped.
V8_EXPORT_PRIVATE void StartIncrementalMarking( // Returns true if starting was successful. Returns false if marking cannot be
// activated.
V8_EXPORT_PRIVATE bool StartIncrementalMarking(
int gc_flags, GarbageCollectionReason gc_reason, int gc_flags, GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags); GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
...@@ -1309,7 +1311,7 @@ class Heap { ...@@ -1309,7 +1311,7 @@ class Heap {
size_t CommittedMemoryOfUnmapper(); size_t CommittedMemoryOfUnmapper();
// Returns the amount of memory currently committed for the heap. // Returns the amount of memory currently committed for the heap.
size_t CommittedMemory(); V8_EXPORT_PRIVATE size_t CommittedMemory();
// Returns the amount of memory currently committed for the old space. // Returns the amount of memory currently committed for the old space.
size_t CommittedOldGenerationMemory(); size_t CommittedOldGenerationMemory();
......
...@@ -146,7 +146,8 @@ bool IncrementalMarking::CanBeActivated() { ...@@ -146,7 +146,8 @@ bool IncrementalMarking::CanBeActivated() {
// 2) when we are currently not in a GC, and // 2) when we are currently not in a GC, and
// 3) when we are currently not serializing or deserializing the heap, and // 3) when we are currently not serializing or deserializing the heap, and
// 4) not a shared heap. // 4) not a shared heap.
return FLAG_incremental_marking && heap_->gc_state() == Heap::NOT_IN_GC && return FLAG_incremental_marking &&
heap_->isolate()->current_vm_state() != GC &&
heap_->deserialization_complete() && heap_->deserialization_complete() &&
!heap_->isolate()->serializer_enabled() && !heap_->IsShared(); !heap_->isolate()->serializer_enabled() && !heap_->IsShared();
} }
......
...@@ -6506,7 +6506,7 @@ void OOMCallback(const char* location, bool is_heap_oom) { ...@@ -6506,7 +6506,7 @@ void OOMCallback(const char* location, bool is_heap_oom) {
Heap* heap = oom_isolate->heap(); Heap* heap = oom_isolate->heap();
size_t kSlack = heap->new_space() ? heap->new_space()->Capacity() : 0; size_t kSlack = heap->new_space() ? heap->new_space()->Capacity() : 0;
CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack); CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack);
CHECK_LE(heap->memory_allocator()->Size(), heap->MaxReserved() + kSlack); CHECK_LE(heap->memory_allocator()->Size(), heap->CommittedMemory());
base::OS::ExitProcess(0); base::OS::ExitProcess(0);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment