Commit 66e9152f authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Rename MarkingDeque to MarkingWorklist.

This prepares ground for switching mark-compactor to use
Worklist data-structure instead of the existing marking deque.

BUG=chromium:694255

Change-Id: I0ac4c563018a9619962fb4bf388b5f3cceffb86d
Reviewed-on: https://chromium-review.googlesource.com/544933Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46178}
parent ef1a80d6
......@@ -13,7 +13,7 @@ void LocalEmbedderHeapTracer::TracePrologue() {
if (!InUse()) return;
CHECK(cached_wrappers_to_trace_.empty());
num_v8_marking_deque_was_empty_ = 0;
num_v8_marking_worklist_was_empty_ = 0;
remote_tracer_->TracePrologue();
}
......
......@@ -19,7 +19,7 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
typedef std::pair<void*, void*> WrapperInfo;
LocalEmbedderHeapTracer()
: remote_tracer_(nullptr), num_v8_marking_deque_was_empty_(0) {}
: remote_tracer_(nullptr), num_v8_marking_worklist_was_empty_(0) {}
void SetRemoteTracer(EmbedderHeapTracer* tracer) { remote_tracer_ = tracer; }
bool InUse() { return remote_tracer_ != nullptr; }
......@@ -45,12 +45,14 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
// are too many of them.
bool RequiresImmediateWrapperProcessing();
void NotifyV8MarkingDequeWasEmpty() { num_v8_marking_deque_was_empty_++; }
void NotifyV8MarkingWorklistWasEmpty() {
num_v8_marking_worklist_was_empty_++;
}
bool ShouldFinalizeIncrementalMarking() {
static const size_t kMaxIncrementalFixpointRounds = 3;
return !FLAG_incremental_marking_wrappers || !InUse() ||
NumberOfWrappersToTrace() == 0 ||
num_v8_marking_deque_was_empty_ > kMaxIncrementalFixpointRounds;
num_v8_marking_worklist_was_empty_ > kMaxIncrementalFixpointRounds;
}
private:
......@@ -58,7 +60,7 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
EmbedderHeapTracer* remote_tracer_;
WrapperCache cached_wrappers_to_trace_;
size_t num_v8_marking_deque_was_empty_;
size_t num_v8_marking_worklist_was_empty_;
};
} // namespace internal
......
......@@ -1536,7 +1536,7 @@ void Heap::MarkCompactEpilogue() {
PreprocessStackTraces();
DCHECK(incremental_marking()->IsStopped());
mark_compact_collector()->marking_deque()->StopUsing();
mark_compact_collector()->marking_worklist()->StopUsing();
}
......@@ -1790,7 +1790,7 @@ void Heap::Scavenge() {
promotion_queue_.Destroy();
incremental_marking()->UpdateMarkingDequeAfterScavenge();
incremental_marking()->UpdateMarkingWorklistAfterScavenge();
ScavengeWeakObjectRetainer weak_object_retainer(this);
ProcessYoungWeakReferences(&weak_object_retainer);
......@@ -4254,11 +4254,11 @@ void Heap::FinalizeIncrementalMarkingIfComplete(
if (incremental_marking()->IsMarking() &&
(incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
(!incremental_marking()->finalize_marking_completed() &&
mark_compact_collector()->marking_deque()->IsEmpty() &&
mark_compact_collector()->marking_worklist()->IsEmpty() &&
local_embedder_heap_tracer()->ShouldFinalizeIncrementalMarking()))) {
FinalizeIncrementalMarking(gc_reason);
} else if (incremental_marking()->IsComplete() ||
(mark_compact_collector()->marking_deque()->IsEmpty() &&
(mark_compact_collector()->marking_worklist()->IsEmpty() &&
local_embedder_heap_tracer()
->ShouldFinalizeIncrementalMarking())) {
CollectAllGarbage(current_gc_flags_, gc_reason, current_gc_callback_flags_);
......@@ -5760,11 +5760,11 @@ bool Heap::SetUp() {
tracer_ = new GCTracer(this);
scavenge_collector_ = new Scavenger(this);
mark_compact_collector_ = new MarkCompactCollector(this);
incremental_marking_->set_marking_deque(
mark_compact_collector_->marking_deque());
incremental_marking_->set_marking_worklist(
mark_compact_collector_->marking_worklist());
#ifdef V8_CONCURRENT_MARKING
concurrent_marking_ =
new ConcurrentMarking(this, mark_compact_collector_->marking_deque());
new ConcurrentMarking(this, mark_compact_collector_->marking_worklist());
#else
concurrent_marking_ = new ConcurrentMarking(this, nullptr);
#endif
......
......@@ -33,7 +33,7 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address, size_t) {
IncrementalMarking::IncrementalMarking(Heap* heap)
: heap_(heap),
marking_deque_(nullptr),
marking_worklist_(nullptr),
initial_old_generation_size_(0),
bytes_marked_ahead_of_schedule_(0),
unscanned_bytes_of_large_object_(0),
......@@ -132,7 +132,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) {
marking_deque()->Push(obj);
marking_worklist()->Push(obj);
return true;
}
return false;
......@@ -143,9 +143,9 @@ void IncrementalMarking::MarkBlackAndPush(HeapObject* obj) {
ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj));
if (ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj))) {
#ifdef V8_CONCURRENT_MARKING
marking_deque()->Push(obj, MarkingThread::kMain, TargetDeque::kBailout);
marking_worklist()->Push(obj, MarkingThread::kMain, TargetDeque::kBailout);
#else
if (!marking_deque()->Push(obj)) {
if (!marking_worklist()->Push(obj)) {
ObjectMarking::BlackToGrey<kAtomicity>(obj, marking_state(obj));
}
#endif
......@@ -208,7 +208,7 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) {
DCHECK(success);
USE(success);
}
marking_deque()->Push(to);
marking_worklist()->Push(to);
RestartIfNotMarking();
}
}
......@@ -246,13 +246,14 @@ class IncrementalMarkingMarkingVisitor
HeapObject::RawField(object, end_offset));
start_offset = end_offset;
end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
scan_until_end = heap->incremental_marking()->marking_deque()->IsFull();
scan_until_end =
heap->incremental_marking()->marking_worklist()->IsFull();
} while (scan_until_end && start_offset < object_size);
chunk->set_progress_bar(start_offset);
if (start_offset < object_size) {
if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object))) {
heap->incremental_marking()->marking_deque()->Unshift(object);
heap->incremental_marking()->marking_worklist()->Unshift(object);
} else {
DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object)));
......@@ -571,7 +572,7 @@ void IncrementalMarking::StartMarking() {
PatchIncrementalMarkingRecordWriteStubs(heap_, mode);
marking_deque()->StartUsing();
marking_worklist()->StartUsing();
ActivateIncrementalWriteBarrier();
......@@ -782,7 +783,7 @@ void IncrementalMarking::FinalizeIncrementally() {
ProcessWeakCells();
int marking_progress =
heap_->mark_compact_collector()->marking_deque()->Size() +
heap_->mark_compact_collector()->marking_worklist()->Size() +
static_cast<int>(
heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
......@@ -812,13 +813,13 @@ void IncrementalMarking::FinalizeIncrementally() {
}
}
void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
if (!IsMarking()) return;
Map* filler_map = heap_->one_pointer_filler_map();
marking_deque()->Update([this, filler_map](HeapObject* obj) -> HeapObject* {
marking_worklist()->Update([this,
filler_map](HeapObject* obj) -> HeapObject* {
DCHECK(obj->IsHeapObject());
// Only pointers to from space have to be updated.
if (heap_->InFromSpace(obj)) {
......@@ -908,11 +909,11 @@ void IncrementalMarking::RevisitObject(HeapObject* obj) {
IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
}
intptr_t IncrementalMarking::ProcessMarkingDeque(
intptr_t IncrementalMarking::ProcessMarkingWorklist(
intptr_t bytes_to_process, ForceCompletionAction completion) {
intptr_t bytes_processed = 0;
while (bytes_processed < bytes_to_process || completion == FORCE_COMPLETION) {
HeapObject* obj = marking_deque()->Pop();
HeapObject* obj = marking_worklist()->Pop();
if (obj == nullptr) break;
// Left trimming may result in white, grey, or black filler objects on the
// marking deque. Ignore these objects.
......@@ -942,7 +943,7 @@ void IncrementalMarking::Hurry() {
// forced e.g. in tests. It should not happen when COMPLETE was set when
// incremental marking finished and a regular GC was triggered after that
// because should_hurry_ will force a full GC.
if (!marking_deque()->IsEmpty()) {
if (!marking_worklist()->IsEmpty()) {
double start = 0.0;
if (FLAG_trace_incremental_marking) {
start = heap_->MonotonicallyIncreasingTimeInMs();
......@@ -952,7 +953,7 @@ void IncrementalMarking::Hurry() {
}
// TODO(gc) hurry can mark objects it encounters black as mutator
// was stopped.
ProcessMarkingDeque(0, FORCE_COMPLETION);
ProcessMarkingWorklist(0, FORCE_COMPLETION);
state_ = COMPLETE;
if (FLAG_trace_incremental_marking) {
double end = heap_->MonotonicallyIncreasingTimeInMs();
......@@ -1104,7 +1105,7 @@ double IncrementalMarking::AdvanceIncrementalMarking(
remaining_time_in_ms =
deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
} while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() &&
!marking_deque()->IsEmpty());
!marking_worklist()->IsEmpty());
return remaining_time_in_ms;
}
......@@ -1201,12 +1202,12 @@ size_t IncrementalMarking::Step(size_t bytes_to_process,
size_t bytes_processed = 0;
if (state_ == MARKING) {
bytes_processed = ProcessMarkingDeque(bytes_to_process);
bytes_processed = ProcessMarkingWorklist(bytes_to_process);
if (step_origin == StepOrigin::kTask) {
bytes_marked_ahead_of_schedule_ += bytes_processed;
}
if (marking_deque()->IsEmpty()) {
if (marking_worklist()->IsEmpty()) {
if (heap_->local_embedder_heap_tracer()
->ShouldFinalizeIncrementalMarking()) {
if (completion == FORCE_COMPLETION ||
......@@ -1220,7 +1221,7 @@ size_t IncrementalMarking::Step(size_t bytes_to_process,
IncrementIdleMarkingDelayCounter();
}
} else {
heap_->local_embedder_heap_tracer()->NotifyV8MarkingDequeWasEmpty();
heap_->local_embedder_heap_tracer()->NotifyV8MarkingWorklistWasEmpty();
}
}
}
......
......@@ -138,7 +138,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void FinalizeIncrementally();
void UpdateMarkingDequeAfterScavenge();
void UpdateMarkingWorklistAfterScavenge();
void Hurry();
......@@ -262,13 +262,14 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void AbortBlackAllocation();
MarkingDeque* marking_deque() {
SLOW_DCHECK(marking_deque_ != nullptr);
return marking_deque_;
MarkCompactCollector::MarkingWorklist* marking_worklist() {
SLOW_DCHECK(marking_worklist_ != nullptr);
return marking_worklist_;
}
void set_marking_deque(MarkingDeque* marking_deque) {
marking_deque_ = marking_deque;
void set_marking_worklist(
MarkCompactCollector::MarkingWorklist* marking_worklist) {
marking_worklist_ = marking_worklist;
}
private:
......@@ -311,7 +312,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
static void SetNewSpacePageFlags(MemoryChunk* chunk, bool is_marking);
INLINE(intptr_t ProcessMarkingDeque(
INLINE(intptr_t ProcessMarkingWorklist(
intptr_t bytes_to_process,
ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION));
......@@ -328,7 +329,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
size_t StepSizeToMakeProgress();
Heap* heap_;
MarkingDeque* marking_deque_;
MarkCompactCollector::MarkingWorklist* marking_worklist_;
double start_time_ms_;
size_t initial_old_generation_size_;
......
......@@ -15,7 +15,7 @@ namespace internal {
void MarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK((ObjectMarking::IsBlack<AccessMode::NON_ATOMIC>(
obj, MarkingState::Internal(obj))));
if (!marking_deque()->Push(obj)) {
if (!marking_worklist()->Push(obj)) {
ObjectMarking::BlackToGrey<AccessMode::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
}
......@@ -23,7 +23,7 @@ void MarkCompactCollector::PushBlack(HeapObject* obj) {
void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
DCHECK(ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)));
if (!marking_deque()->Unshift(obj)) {
if (!marking_worklist()->Unshift(obj)) {
ObjectMarking::BlackToGrey(obj, MarkingState::Internal(obj));
}
}
......
This diff is collapsed.
......@@ -33,12 +33,6 @@ class ThreadLocalTop;
class Worklist;
class YoungGenerationMarkingVisitor;
#ifdef V8_CONCURRENT_MARKING
using MarkingDeque = ConcurrentMarkingDeque;
#else
using MarkingDeque = SequentialMarkingDeque;
#endif
class ObjectMarking : public AllStatic {
public:
V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj,
......@@ -290,8 +284,8 @@ class MarkCompactCollectorBase {
virtual void MarkLiveObjects() = 0;
// Mark objects reachable (transitively) from objects in the marking
// stack.
virtual void EmptyMarkingDeque() = 0;
virtual void ProcessMarkingDeque() = 0;
virtual void EmptyMarkingWorklist() = 0;
virtual void ProcessMarkingWorklist() = 0;
// Clear non-live references held in side data structures.
virtual void ClearNonLiveReferences() = 0;
virtual void EvacuatePrologue() = 0;
......@@ -346,6 +340,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
void CleanupSweepToIteratePages();
private:
using MarkingWorklist = WorklistView;
class RootMarkingVisitorSeedOnly;
class RootMarkingVisitor;
......@@ -360,8 +355,8 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
void MarkLiveObjects() override;
void MarkRootSetInParallel();
void ProcessMarkingDeque() override;
void EmptyMarkingDeque() override;
void ProcessMarkingWorklist() override;
void EmptyMarkingWorklist() override;
void ClearNonLiveReferences() override;
void EvacuatePrologue() override;
......@@ -386,6 +381,12 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
// Collector for young and old generation.
class MarkCompactCollector final : public MarkCompactCollectorBase {
public:
#ifdef V8_CONCURRENT_MARKING
using MarkingWorklist = ConcurrentMarkingDeque;
#else
using MarkingWorklist = SequentialMarkingDeque;
#endif
class RootMarkingVisitor;
class Sweeper {
......@@ -539,7 +540,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
bool evacuation() const { return evacuation_; }
MarkingDeque* marking_deque() { return &marking_deque_; }
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
Sweeper& sweeper() { return sweeper_; }
......@@ -595,7 +596,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// the string table are weak.
void MarkStringTable(RootMarkingVisitor* visitor);
void ProcessMarkingDeque() override;
void ProcessMarkingWorklist() override;
// Mark objects reachable (transitively) from objects in the marking stack
// or overflowed in the heap. This respects references only considered in
......@@ -615,15 +616,15 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// This function empties the marking stack, but may leave overflowed objects
// in the heap, in which case the marking stack's overflow flag will be set.
void EmptyMarkingDeque() override;
void EmptyMarkingWorklist() override;
// Refill the marking stack with overflowed objects from the heap. This
// function either leaves the marking stack full or clears the overflow
// flag on the marking stack.
void RefillMarkingDeque();
void RefillMarkingWorklist();
// Helper methods for refilling the marking stack by discovering grey objects
// on various pages of the heap. Used by {RefillMarkingDeque} only.
// on various pages of the heap. Used by {RefillMarkingWorklist} only.
template <class T>
void DiscoverGreyObjectsWithIterator(T* it);
void DiscoverGreyObjectsOnPage(MemoryChunk* p);
......@@ -713,7 +714,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
bool have_code_to_deoptimize_;
MarkingDeque marking_deque_;
MarkingWorklist marking_worklist_;
// Candidates for pages that should be evacuated.
List<Page*> evacuation_candidates_;
......
......@@ -2370,7 +2370,7 @@ TEST(IdleNotificationFinishMarking) {
IncrementalMarking::DO_NOT_FORCE_COMPLETION, StepOrigin::kV8);
CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
} while (
!CcTest::heap()->mark_compact_collector()->marking_deque()->IsEmpty());
!CcTest::heap()->mark_compact_collector()->marking_worklist()->IsEmpty());
// The next invocations of incremental marking are not going to complete
// marking
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment