Commit e71ce309 authored by Tobias Tebbi's avatar Tobias Tebbi Committed by V8 LUCI CQ

Revert "[heap] Only start incremental marking when V8 is not in GC VM state."

This reverts commit f124b28d.

Reason for revert: https://logs.chromium.org/logs/v8/buildbucket/cr-buildbucket/8818719400214419665/+/u/Check_-_stress_concurrent_allocation__flakes_/flush-baseline-code

Original change's description:
> [heap] Only start incremental marking when V8 is not in GC VM state.
>
> Bug: v8:12503
> Change-Id: Icda291d9770c46c7fee3c70dd4df97f320b1956a
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3398113
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Commit-Queue: Hannes Payer <hpayer@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#79623}

Bug: v8:12503
Change-Id: I067b308cfc4511d89144d2bb65a1dba24db62179
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3553104
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Owners-Override: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79629}
parent 0a0ad98a
......@@ -1334,15 +1334,9 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
THREAD_LOCAL_TOP_ACCESSOR(EmbedderState*, current_embedder_state)
inline void set_current_vm_state(StateTag state_tag) {
current_vm_state_.store(state_tag, std::memory_order_relaxed);
}
inline StateTag current_vm_state() const {
return current_vm_state_.load(std::memory_order_relaxed);
}
void SetData(uint32_t slot, void* data) {
DCHECK_LT(slot, Internals::kNumIsolateDataSlots);
isolate_data_.embedder_data_[slot] = data;
......@@ -2086,7 +2080,6 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
ReadOnlyHeap* read_only_heap_ = nullptr;
std::shared_ptr<ReadOnlyArtifacts> artifacts_;
std::shared_ptr<StringTable> string_table_;
std::atomic<StateTag> current_vm_state_{EXTERNAL};
const int id_;
EntryStackItem* entry_stack_ = nullptr;
......
......@@ -30,6 +30,7 @@ void ThreadLocalTop::Clear() {
simulator_ = nullptr;
js_entry_sp_ = kNullAddress;
external_callback_scope_ = nullptr;
current_vm_state_ = EXTERNAL;
current_embedder_state_ = nullptr;
failed_access_check_callback_ = nullptr;
thread_in_wasm_flag_address_ = kNullAddress;
......
......@@ -34,9 +34,9 @@ class ThreadLocalTop {
// refactor this to really consist of just Addresses and 32-bit
// integer fields.
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
static constexpr uint32_t kSizeInBytes = 25 * kSystemPointerSize;
static constexpr uint32_t kSizeInBytes = 26 * kSystemPointerSize;
#else
static constexpr uint32_t kSizeInBytes = 24 * kSystemPointerSize;
static constexpr uint32_t kSizeInBytes = 25 * kSystemPointerSize;
#endif
// Does early low-level initialization that does not depend on the
......@@ -146,6 +146,7 @@ class ThreadLocalTop {
Address js_entry_sp_;
// The external callback we're currently in.
ExternalCallbackScope* external_callback_scope_;
StateTag current_vm_state_;
EmbedderState* current_embedder_state_;
// Call back function to report unsafe JS accesses.
......
......@@ -1698,10 +1698,11 @@ void Heap::ReportExternalMemoryPressure() {
return;
}
if (incremental_marking()->IsStopped()) {
if (!StartIncrementalMarking(
GCFlagsForIncrementalMarking(),
if (incremental_marking()->CanBeActivated()) {
StartIncrementalMarking(GCFlagsForIncrementalMarking(),
GarbageCollectionReason::kExternalMemoryPressure,
kGCCallbackFlagsForExternalMemory)) {
kGCCallbackFlagsForExternalMemory);
} else {
CollectAllGarbage(i::Heap::kNoGCFlags,
GarbageCollectionReason::kExternalMemoryPressure,
kGCCallbackFlagsForExternalMemory);
......@@ -1985,11 +1986,10 @@ int Heap::NotifyContextDisposed(bool dependant_context) {
return ++contexts_disposed_;
}
bool Heap::StartIncrementalMarking(int gc_flags,
void Heap::StartIncrementalMarking(int gc_flags,
GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags) {
DCHECK(incremental_marking()->IsStopped());
if (!incremental_marking()->CanBeActivated()) return false;
// Sweeping needs to be completed such that markbits are all cleared before
// starting marking again.
......@@ -2014,7 +2014,6 @@ bool Heap::StartIncrementalMarking(int gc_flags,
set_current_gc_flags(gc_flags);
current_gc_callback_flags_ = gc_callback_flags;
incremental_marking()->Start(gc_reason);
return true;
}
void Heap::CompleteSweepingFull() {
......@@ -4169,7 +4168,7 @@ void Heap::CheckMemoryPressure() {
TRACE_EVENT0("devtools.timeline,v8", "V8.CheckMemoryPressure");
CollectGarbageOnMemoryPressure();
} else if (memory_pressure_level == MemoryPressureLevel::kModerate) {
if (incremental_marking()->IsStopped()) {
if (FLAG_incremental_marking && incremental_marking()->IsStopped()) {
TRACE_EVENT0("devtools.timeline,v8", "V8.CheckMemoryPressure");
StartIncrementalMarking(kReduceMemoryFootprintMask,
GarbageCollectionReason::kMemoryPressure);
......
......@@ -1064,9 +1064,7 @@ class Heap {
// Starts incremental marking assuming incremental marking is currently
// stopped.
// Returns true if starting was successful. Returns false if marking cannot be
// activated.
V8_EXPORT_PRIVATE bool StartIncrementalMarking(
V8_EXPORT_PRIVATE void StartIncrementalMarking(
int gc_flags, GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
......@@ -1311,7 +1309,7 @@ class Heap {
size_t CommittedMemoryOfUnmapper();
// Returns the amount of memory currently committed for the heap.
V8_EXPORT_PRIVATE size_t CommittedMemory();
size_t CommittedMemory();
// Returns the amount of memory currently committed for the old space.
size_t CommittedOldGenerationMemory();
......
......@@ -146,8 +146,7 @@ bool IncrementalMarking::CanBeActivated() {
// 2) when we are currently not in a GC, and
// 3) when we are currently not serializing or deserializing the heap, and
// 4) not a shared heap.
return FLAG_incremental_marking &&
heap_->isolate()->current_vm_state() != GC &&
return FLAG_incremental_marking && heap_->gc_state() == Heap::NOT_IN_GC &&
heap_->deserialization_complete() &&
!heap_->isolate()->serializer_enabled() && !heap_->IsShared();
}
......
......@@ -6506,7 +6506,7 @@ void OOMCallback(const char* location, bool is_heap_oom) {
Heap* heap = oom_isolate->heap();
size_t kSlack = heap->new_space() ? heap->new_space()->Capacity() : 0;
CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack);
CHECK_LE(heap->memory_allocator()->Size(), heap->CommittedMemory());
CHECK_LE(heap->memory_allocator()->Size(), heap->MaxReserved() + kSlack);
base::OS::ExitProcess(0);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment