Commit e67caa3b authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

[heap] Incremental marking simplifications

- Remove dead code.
- Remove `was_activated_`.

Bug: v8:12775
Change-Id: Ie54b24f21a8789dc815ab7a96ce4a074e3644342
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3726300Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81423}
parent a1da1458
...@@ -1701,7 +1701,7 @@ void Heap::ReportExternalMemoryPressure() { ...@@ -1701,7 +1701,7 @@ void Heap::ReportExternalMemoryPressure() {
return; return;
} }
if (incremental_marking()->IsStopped()) { if (incremental_marking()->IsStopped()) {
if (incremental_marking()->CanBeActivated()) { if (incremental_marking()->CanBeStarted()) {
StartIncrementalMarking(GCFlagsForIncrementalMarking(), StartIncrementalMarking(GCFlagsForIncrementalMarking(),
GarbageCollectionReason::kExternalMemoryPressure, GarbageCollectionReason::kExternalMemoryPressure,
kGCCallbackFlagsForExternalMemory); kGCCallbackFlagsForExternalMemory);
...@@ -2058,8 +2058,8 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached( ...@@ -2058,8 +2058,8 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
} }
void Heap::StartIncrementalMarkingIfAllocationLimitIsReachedBackground() { void Heap::StartIncrementalMarkingIfAllocationLimitIsReachedBackground() {
if (!incremental_marking()->IsStopped() || if (incremental_marking()->IsRunning() ||
!incremental_marking()->CanBeActivated()) { !incremental_marking()->CanBeStarted()) {
return; return;
} }
...@@ -2248,7 +2248,7 @@ size_t Heap::PerformGarbageCollection( ...@@ -2248,7 +2248,7 @@ size_t Heap::PerformGarbageCollection(
// If incremental marking has been activated, the full GC cycle has already // If incremental marking has been activated, the full GC cycle has already
// started, so don't start a new one. // started, so don't start a new one.
if (!incremental_marking_->WasActivated()) { if (!incremental_marking_->IsRunning()) {
tracer()->StartCycle(collector, gc_reason, collector_reason, tracer()->StartCycle(collector, gc_reason, collector_reason,
GCTracer::MarkingType::kAtomic); GCTracer::MarkingType::kAtomic);
} }
...@@ -2256,7 +2256,7 @@ size_t Heap::PerformGarbageCollection( ...@@ -2256,7 +2256,7 @@ size_t Heap::PerformGarbageCollection(
tracer()->StartAtomicPause(); tracer()->StartAtomicPause();
if (!Heap::IsYoungGenerationCollector(collector) && if (!Heap::IsYoungGenerationCollector(collector) &&
incremental_marking_->WasActivated()) { incremental_marking_->IsRunning()) {
tracer()->UpdateCurrentEvent(gc_reason, collector_reason); tracer()->UpdateCurrentEvent(gc_reason, collector_reason);
} }
...@@ -2396,7 +2396,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator, ...@@ -2396,7 +2396,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
v8::Isolate::Scope isolate_scope(reinterpret_cast<v8::Isolate*>(isolate())); v8::Isolate::Scope isolate_scope(reinterpret_cast<v8::Isolate*>(isolate()));
tracer()->StartObservablePause(); tracer()->StartObservablePause();
DCHECK(!incremental_marking_->WasActivated()); DCHECK(incremental_marking_->IsStopped());
DCHECK_NOT_NULL(isolate()->global_safepoint()); DCHECK_NOT_NULL(isolate()->global_safepoint());
isolate()->global_safepoint()->IterateClientIsolates([](Isolate* client) { isolate()->global_safepoint()->IterateClientIsolates([](Isolate* client) {
...@@ -2649,8 +2649,6 @@ void Heap::MarkCompactEpilogue() { ...@@ -2649,8 +2649,6 @@ void Heap::MarkCompactEpilogue() {
SetGCState(NOT_IN_GC); SetGCState(NOT_IN_GC);
isolate_->counters()->objs_since_last_full()->Set(0); isolate_->counters()->objs_since_last_full()->Set(0);
incremental_marking()->Epilogue();
} }
void Heap::MarkCompactPrologue() { void Heap::MarkCompactPrologue() {
...@@ -3459,10 +3457,8 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object, ...@@ -3459,10 +3457,8 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
Address old_start = object.address(); Address old_start = object.address();
Address new_start = old_start + bytes_to_trim; Address new_start = old_start + bytes_to_trim;
if (incremental_marking()->IsMarking()) { incremental_marking()->NotifyLeftTrimming(object,
incremental_marking()->NotifyLeftTrimming( HeapObject::FromAddress(new_start));
object, HeapObject::FromAddress(new_start));
}
#ifdef DEBUG #ifdef DEBUG
if (MayContainRecordedSlots(object)) { if (MayContainRecordedSlots(object)) {
...@@ -5525,7 +5521,7 @@ double Heap::PercentToGlobalMemoryLimit() { ...@@ -5525,7 +5521,7 @@ double Heap::PercentToGlobalMemoryLimit() {
Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() { Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
// Code using an AlwaysAllocateScope assumes that the GC state does not // Code using an AlwaysAllocateScope assumes that the GC state does not
// change; that implies that no marking steps must be performed. // change; that implies that no marking steps must be performed.
if (!incremental_marking()->CanBeActivated() || always_allocate()) { if (!incremental_marking()->CanBeStarted() || always_allocate()) {
// Incremental marking is disabled or it is too early to start. // Incremental marking is disabled or it is too early to start.
return IncrementalMarkingLimit::kNoLimit; return IncrementalMarkingLimit::kNoLimit;
} }
...@@ -6028,12 +6024,9 @@ void Heap::RegisterExternallyReferencedObject(Address* location) { ...@@ -6028,12 +6024,9 @@ void Heap::RegisterExternallyReferencedObject(Address* location) {
} }
HeapObject heap_object = HeapObject::cast(object); HeapObject heap_object = HeapObject::cast(object);
DCHECK(IsValidHeapObject(this, heap_object)); DCHECK(IsValidHeapObject(this, heap_object));
if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) { DCHECK(incremental_marking()->IsMarking() ||
incremental_marking()->WhiteToGreyAndPush(heap_object); mark_compact_collector()->in_use());
} else { mark_compact_collector()->MarkExternallyReferencedObject(heap_object);
DCHECK(mark_compact_collector()->in_use());
mark_compact_collector()->MarkExternallyReferencedObject(heap_object);
}
} }
void Heap::StartTearDown() { void Heap::StartTearDown() {
......
...@@ -5,12 +5,9 @@ ...@@ -5,12 +5,9 @@
#ifndef V8_HEAP_INCREMENTAL_MARKING_INL_H_ #ifndef V8_HEAP_INCREMENTAL_MARKING_INL_H_
#define V8_HEAP_INCREMENTAL_MARKING_INL_H_ #define V8_HEAP_INCREMENTAL_MARKING_INL_H_
#include "src/heap/incremental-marking.h"
#include "src/execution/isolate.h" #include "src/execution/isolate.h"
#include "src/heap/mark-compact-inl.h" #include "src/heap/heap-inl.h"
#include "src/objects/maybe-object.h" #include "src/heap/incremental-marking.h"
#include "src/objects/objects-inl.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -33,14 +30,6 @@ void IncrementalMarking::TransferColor(HeapObject from, HeapObject to) { ...@@ -33,14 +30,6 @@ void IncrementalMarking::TransferColor(HeapObject from, HeapObject to) {
} }
} }
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) {
local_marking_worklists()->Push(obj);
return true;
}
return false;
}
void IncrementalMarking::RestartIfNotMarking() { void IncrementalMarking::RestartIfNotMarking() {
if (state_ == COMPLETE) { if (state_ == COMPLETE) {
state_ = MARKING; state_ = MARKING;
......
...@@ -79,14 +79,16 @@ void IncrementalMarking::MarkBlackBackground(HeapObject obj, int object_size) { ...@@ -79,14 +79,16 @@ void IncrementalMarking::MarkBlackBackground(HeapObject obj, int object_size) {
} }
void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) { void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
DCHECK(IsMarking()); if (!IsMarking()) return;
DCHECK(MemoryChunk::FromHeapObject(from)->SweepingDone()); DCHECK(MemoryChunk::FromHeapObject(from)->SweepingDone());
DCHECK_EQ(MemoryChunk::FromHeapObject(from), MemoryChunk::FromHeapObject(to)); DCHECK_EQ(MemoryChunk::FromHeapObject(from), MemoryChunk::FromHeapObject(to));
DCHECK_NE(from, to); DCHECK_NE(from, to);
MarkBit new_mark_bit = marking_state()->MarkBitFrom(to); MarkBit new_mark_bit = marking_state()->MarkBitFrom(to);
if (black_allocation() && Marking::IsBlack<kAtomicity>(new_mark_bit)) { if (black_allocation() &&
Marking::IsBlack<AccessMode::ATOMIC>(new_mark_bit)) {
// Nothing to do if the object is in black area. // Nothing to do if the object is in black area.
return; return;
} }
...@@ -96,19 +98,17 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) { ...@@ -96,19 +98,17 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
if (from.address() + kTaggedSize == to.address()) { if (from.address() + kTaggedSize == to.address()) {
// The old and the new markbits overlap. The |to| object has the // The old and the new markbits overlap. The |to| object has the
// grey color. To make it black, we need to set the second bit. // grey color. To make it black, we need to set the second bit.
DCHECK(new_mark_bit.Get<kAtomicity>()); DCHECK(new_mark_bit.Get<AccessMode::ATOMIC>());
new_mark_bit.Next().Set<kAtomicity>(); new_mark_bit.Next().Set<AccessMode::ATOMIC>();
} else { } else {
bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit); bool success = Marking::WhiteToBlack<AccessMode::ATOMIC>(new_mark_bit);
DCHECK(success); DCHECK(success);
USE(success); USE(success);
} }
DCHECK(marking_state()->IsBlack(to)); DCHECK(marking_state()->IsBlack(to));
} }
bool IncrementalMarking::WasActivated() { return was_activated_; } bool IncrementalMarking::CanBeStarted() const {
bool IncrementalMarking::CanBeActivated() {
// Only start incremental marking in a safe state: // Only start incremental marking in a safe state:
// 1) when incremental marking is turned on // 1) when incremental marking is turned on
// 2) when we are currently not in a GC, and // 2) when we are currently not in a GC, and
...@@ -173,7 +173,6 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) { ...@@ -173,7 +173,6 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
scheduled_bytes_to_mark_ = 0; scheduled_bytes_to_mark_ = 0;
schedule_update_time_ms_ = start_time_ms_; schedule_update_time_ms_ = start_time_ms_;
bytes_marked_concurrently_ = 0; bytes_marked_concurrently_ = 0;
was_activated_ = true;
StartMarking(); StartMarking();
...@@ -182,7 +181,16 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) { ...@@ -182,7 +181,16 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
incremental_marking_job()->Start(heap_); incremental_marking_job()->Start(heap_);
} }
class IncrementalMarkingRootMarkingVisitor final : public RootVisitor { bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) {
local_marking_worklists()->Push(obj);
return true;
}
return false;
}
class IncrementalMarking::IncrementalMarkingRootMarkingVisitor final
: public RootVisitor {
public: public:
explicit IncrementalMarkingRootMarkingVisitor(Heap* heap) explicit IncrementalMarkingRootMarkingVisitor(Heap* heap)
: heap_(heap), incremental_marking_(heap->incremental_marking()) {} : heap_(heap), incremental_marking_(heap->incremental_marking()) {}
...@@ -221,22 +229,18 @@ class IncrementalMarkingRootMarkingVisitor final : public RootVisitor { ...@@ -221,22 +229,18 @@ class IncrementalMarkingRootMarkingVisitor final : public RootVisitor {
IncrementalMarking* const incremental_marking_; IncrementalMarking* const incremental_marking_;
}; };
namespace { void IncrementalMarking::MarkRoots() {
IncrementalMarkingRootMarkingVisitor visitor(heap_);
void MarkRoots(Heap* heap) {
IncrementalMarkingRootMarkingVisitor visitor(heap);
CodePageHeaderModificationScope rwx_write_scope( CodePageHeaderModificationScope rwx_write_scope(
"Marking of builtins table entries require write access to Code page " "Marking of builtins table entries require write access to Code page "
"header"); "header");
heap->IterateRoots( heap_->IterateRoots(
&visitor, &visitor,
base::EnumSet<SkipRoot>{SkipRoot::kStack, SkipRoot::kMainThreadHandles, base::EnumSet<SkipRoot>{SkipRoot::kStack, SkipRoot::kMainThreadHandles,
SkipRoot::kWeak}); SkipRoot::kWeak});
} }
} // namespace void IncrementalMarking::MarkRootsForTesting() { MarkRoots(); }
void IncrementalMarking::MarkRootsForTesting() { MarkRoots(heap_); }
void IncrementalMarking::StartMarking() { void IncrementalMarking::StartMarking() {
if (heap_->isolate()->serializer_enabled()) { if (heap_->isolate()->serializer_enabled()) {
...@@ -281,7 +285,7 @@ void IncrementalMarking::StartMarking() { ...@@ -281,7 +285,7 @@ void IncrementalMarking::StartMarking() {
{ {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS);
MarkRoots(heap_); MarkRoots();
} }
if (FLAG_concurrent_marking && !heap_->IsTearingDown()) { if (FLAG_concurrent_marking && !heap_->IsTearingDown()) {
...@@ -618,12 +622,6 @@ void IncrementalMarking::MarkingComplete(CompletionAction action) { ...@@ -618,12 +622,6 @@ void IncrementalMarking::MarkingComplete(CompletionAction action) {
} }
} }
void IncrementalMarking::Epilogue() {
DCHECK(IsStopped());
was_activated_ = false;
}
bool IncrementalMarking::ShouldDoEmbedderStep() { bool IncrementalMarking::ShouldDoEmbedderStep() {
return state_ == MARKING && FLAG_incremental_marking_wrappers && return state_ == MARKING && FLAG_incremental_marking_wrappers &&
heap_->local_embedder_heap_tracer()->InUse(); heap_->local_embedder_heap_tracer()->InUse();
......
...@@ -46,7 +46,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -46,7 +46,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
class V8_NODISCARD PauseBlackAllocationScope { class V8_NODISCARD PauseBlackAllocationScope {
public: public:
explicit PauseBlackAllocationScope(IncrementalMarking* marking) explicit PauseBlackAllocationScope(IncrementalMarking* marking)
: marking_(marking), paused_(false) { : marking_(marking) {
if (marking_->black_allocation()) { if (marking_->black_allocation()) {
paused_ = true; paused_ = true;
marking_->PauseBlackAllocation(); marking_->PauseBlackAllocation();
...@@ -61,7 +61,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -61,7 +61,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
private: private:
IncrementalMarking* marking_; IncrementalMarking* marking_;
bool paused_; bool paused_ = false;
}; };
// It's hard to know how much work the incremental marker should do to make // It's hard to know how much work the incremental marker should do to make
...@@ -85,30 +85,28 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -85,30 +85,28 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
static constexpr size_t kEmbedderActivationThreshold = 0; static constexpr size_t kEmbedderActivationThreshold = 0;
#endif #endif
static const AccessMode kAtomicity = AccessMode::ATOMIC; V8_INLINE void TransferColor(HeapObject from, HeapObject to);
V8_INLINE void RestartIfNotMarking();
IncrementalMarking(Heap* heap, WeakObjects* weak_objects); IncrementalMarking(Heap* heap, WeakObjects* weak_objects);
MarkingState* marking_state() { return &marking_state_; } MarkingState* marking_state() { return &marking_state_; }
AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; } AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; }
NonAtomicMarkingState* non_atomic_marking_state() { NonAtomicMarkingState* non_atomic_marking_state() {
return &non_atomic_marking_state_; return &non_atomic_marking_state_;
} }
void NotifyLeftTrimming(HeapObject from, HeapObject to); void NotifyLeftTrimming(HeapObject from, HeapObject to);
V8_INLINE void TransferColor(HeapObject from, HeapObject to);
bool IsStopped() const { return state() == STOPPED; } bool IsStopped() const { return state() == STOPPED; }
bool IsRunning() const { return !IsStopped(); }
bool IsMarking() const { return state() >= MARKING; } bool IsMarking() const { return state() >= MARKING; }
bool IsComplete() const { return state() == COMPLETE; } bool IsComplete() const { return state() == COMPLETE; }
bool CollectionRequested() const { return collection_requested_; } bool CollectionRequested() const { return collection_requested_; }
bool CanBeActivated(); bool CanBeStarted() const;
bool WasActivated();
void Start(GarbageCollectionReason gc_reason); void Start(GarbageCollectionReason gc_reason);
// Returns true if incremental marking was running and false otherwise. // Returns true if incremental marking was running and false otherwise.
...@@ -117,10 +115,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -117,10 +115,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
void UpdateMarkingWorklistAfterYoungGenGC(); void UpdateMarkingWorklistAfterYoungGenGC();
void UpdateMarkedBytesAfterScavenge(size_t dead_bytes_in_new_space); void UpdateMarkedBytesAfterScavenge(size_t dead_bytes_in_new_space);
void MarkingComplete(CompletionAction action);
void Epilogue();
// Performs incremental marking steps and returns before the deadline_in_ms is // Performs incremental marking steps and returns before the deadline_in_ms is
// reached. It may return earlier if the marker is already ahead of the // reached. It may return earlier if the marker is already ahead of the
// marking schedule, which is indicated with StepResult::kDone. // marking schedule, which is indicated with StepResult::kDone.
...@@ -128,22 +122,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -128,22 +122,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
CompletionAction completion_action, CompletionAction completion_action,
StepOrigin step_origin); StepOrigin step_origin);
void FinalizeSweeping();
bool ContinueConcurrentSweeping();
void SupportConcurrentSweeping();
StepResult Step(double max_step_size_in_ms, CompletionAction action, StepResult Step(double max_step_size_in_ms, CompletionAction action,
StepOrigin step_origin); StepOrigin step_origin);
bool ShouldDoEmbedderStep();
StepResult EmbedderStep(double expected_duration_ms, double* duration_ms);
V8_INLINE void RestartIfNotMarking();
// Returns true if the function succeeds in transitioning the object
// from white to grey.
V8_INLINE bool WhiteToGreyAndPush(HeapObject obj);
// This function is used to color the object black before it undergoes an // This function is used to color the object black before it undergoes an
// unsafe layout change. This is a part of synchronization protocol with // unsafe layout change. This is a part of synchronization protocol with
// the concurrent marker. // the concurrent marker.
...@@ -173,8 +154,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -173,8 +154,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
return collector_->local_marking_worklists(); return collector_->local_marking_worklists();
} }
void Deactivate();
// Ensures that the given region is black allocated if it is in the old // Ensures that the given region is black allocated if it is in the old
// generation. // generation.
void EnsureBlackAllocated(Address allocated, size_t size); void EnsureBlackAllocated(Address allocated, size_t size);
...@@ -189,6 +168,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -189,6 +168,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
void MarkRootsForTesting(); void MarkRootsForTesting();
private: private:
class IncrementalMarkingRootMarkingVisitor;
class Observer : public AllocationObserver { class Observer : public AllocationObserver {
public: public:
Observer(IncrementalMarking* incremental_marking, intptr_t step_size) Observer(IncrementalMarking* incremental_marking, intptr_t step_size)
...@@ -203,6 +184,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -203,6 +184,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
void StartMarking(); void StartMarking();
bool ShouldDoEmbedderStep();
StepResult EmbedderStep(double expected_duration_ms, double* duration_ms);
void StartBlackAllocation(); void StartBlackAllocation();
void PauseBlackAllocation(); void PauseBlackAllocation();
void FinishBlackAllocation(); void FinishBlackAllocation();
...@@ -232,8 +216,15 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -232,8 +216,15 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
// bytes and already marked bytes. // bytes and already marked bytes.
size_t ComputeStepSizeInBytes(StepOrigin step_origin); size_t ComputeStepSizeInBytes(StepOrigin step_origin);
void MarkingComplete(CompletionAction action);
void MarkRoots();
void AdvanceOnAllocation(); void AdvanceOnAllocation();
// Returns true if the function succeeds in transitioning the object
// from white to grey.
bool WhiteToGreyAndPush(HeapObject obj);
State state() const { State state() const {
DCHECK_IMPLIES(state_ != STOPPED, FLAG_incremental_marking); DCHECK_IMPLIES(state_ != STOPPED, FLAG_incremental_marking);
return state_; return state_;
...@@ -269,7 +260,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -269,7 +260,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
std::atomic<State> state_; std::atomic<State> state_;
bool is_compacting_ = false; bool is_compacting_ = false;
bool was_activated_ = false;
bool black_allocation_ = false; bool black_allocation_ = false;
bool collection_requested_ = false; bool collection_requested_ = false;
IncrementalMarkingJob incremental_marking_job_; IncrementalMarkingJob incremental_marking_job_;
......
...@@ -58,7 +58,7 @@ void MemoryReducer::TimerTask::RunInternal() { ...@@ -58,7 +58,7 @@ void MemoryReducer::TimerTask::RunInternal() {
low_allocation_rate || optimize_for_memory; low_allocation_rate || optimize_for_memory;
event.can_start_incremental_gc = event.can_start_incremental_gc =
heap->incremental_marking()->IsStopped() && heap->incremental_marking()->IsStopped() &&
(heap->incremental_marking()->CanBeActivated() || optimize_for_memory); (heap->incremental_marking()->CanBeStarted() || optimize_for_memory);
event.committed_memory = heap->CommittedOldGenerationMemory(); event.committed_memory = heap->CommittedOldGenerationMemory();
memory_reducer_->NotifyTimer(event); memory_reducer_->NotifyTimer(event);
} }
......
...@@ -5,12 +5,15 @@ ...@@ -5,12 +5,15 @@
#ifndef V8_HEAP_SCAVENGER_INL_H_ #ifndef V8_HEAP_SCAVENGER_INL_H_
#define V8_HEAP_SCAVENGER_INL_H_ #define V8_HEAP_SCAVENGER_INL_H_
#include "src/codegen/assembler-inl.h"
#include "src/heap/evacuation-allocator-inl.h" #include "src/heap/evacuation-allocator-inl.h"
#include "src/heap/incremental-marking-inl.h" #include "src/heap/incremental-marking-inl.h"
#include "src/heap/memory-chunk.h" #include "src/heap/memory-chunk.h"
#include "src/heap/new-spaces.h" #include "src/heap/new-spaces.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/scavenger.h" #include "src/heap/scavenger.h"
#include "src/objects/map.h" #include "src/objects/map.h"
#include "src/objects/objects-body-descriptors-inl.h"
#include "src/objects/objects-inl.h" #include "src/objects/objects-inl.h"
#include "src/objects/slots-inl.h" #include "src/objects/slots-inl.h"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment