Commit 4085827c authored by Leon Bettscheider's avatar Leon Bettscheider Committed by V8 LUCI CQ

[heap] Enable MinorMC incremental marking on soft limit

This CL adds a soft limit (via AllocationObserver) to run
incremental marking for MinorMC.

Once the soft limit is triggered, roots are marked.
This a stepping stone for concurrent marking
(YoungGenerationConcurrentMarkingVisitor, go/YGCMV) integration.

Bug: v8:13012
Change-Id: I5bc9aeb80511159561845deb494023ade3fb7365
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3824339Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Leon Bettscheider <bettscheider@google.com>
Cr-Commit-Position: refs/heads/main@{#82695}
parent 5b78f174
......@@ -1179,7 +1179,7 @@ DEFINE_BOOL(huge_max_old_generation_size, true,
"the physical memory bigger than 16 GB")
DEFINE_SIZE_T(initial_old_space_size, 0, "initial old space size (in Mbytes)")
DEFINE_BOOL(separate_gc_phases, false,
"yound and full garbage collection phases are not overlapping")
"young and full garbage collection phases are not overlapping")
DEFINE_BOOL(global_gc_scheduling, true,
"enable GC scheduling based on global memory")
DEFINE_BOOL(gc_global, false, "always perform global GCs")
......@@ -1255,6 +1255,8 @@ DEFINE_INT(incremental_marking_hard_trigger, 0,
"threshold for starting incremental marking immediately in percent "
"of available space: limit - size")
DEFINE_BOOL(trace_unmapper, false, "Trace the unmapping")
DEFINE_INT(minor_mc_task_trigger, 80,
"minormc task trigger in percent of the current heap limit")
DEFINE_BOOL(parallel_scavenge, true, "parallel scavenge")
DEFINE_BOOL(scavenge_task, true, "schedule scavenge tasks")
DEFINE_INT(scavenge_task_trigger, 80,
......@@ -1326,6 +1328,8 @@ DEFINE_GENERIC_IMPLICATION(
TracingFlags::gc_stats.store(
v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE))
DEFINE_NEG_IMPLICATION(trace_gc_object_stats, incremental_marking)
DEFINE_NEG_NEG_IMPLICATION(incremental_marking, concurrent_marking)
DEFINE_IMPLICATION(concurrent_marking, incremental_marking)
DEFINE_NEG_IMPLICATION(track_retaining_path, parallel_marking)
DEFINE_NEG_IMPLICATION(track_retaining_path, concurrent_marking)
DEFINE_BOOL(track_detached_contexts, true,
......@@ -1924,6 +1928,11 @@ DEFINE_BOOL(trace_minor_mc_parallel_marking, false,
"trace parallel marking for the young generation")
DEFINE_BOOL(minor_mc, false, "perform young generation mark compact GCs")
DEFINE_IMPLICATION(minor_mc, separate_gc_phases)
DEFINE_BOOL(concurrent_minor_mc, false,
"perform young generation mark compact GCs concurrently")
DEFINE_NEG_NEG_IMPLICATION(concurrent_marking, concurrent_minor_mc)
DEFINE_IMPLICATION(concurrent_minor_mc, minor_mc)
DEFINE_IMPLICATION(concurrent_minor_mc, concurrent_marking)
//
// Dev shell flags
......
......@@ -108,7 +108,8 @@ constexpr int GCTracer::Scope::IncrementalOffset(ScopeId id) {
constexpr bool GCTracer::Event::IsYoungGenerationEvent(Type type) {
DCHECK_NE(START, type);
return type == SCAVENGER || type == MINOR_MARK_COMPACTOR;
return type == SCAVENGER || type == MINOR_MARK_COMPACTOR ||
type == INCREMENTAL_MINOR_MARK_COMPACTOR;
}
CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId id) const {
......@@ -124,7 +125,8 @@ bool GCTracer::IsConsistentWithCollector(GarbageCollector collector) const {
return (collector == GarbageCollector::SCAVENGER &&
current_.type == Event::SCAVENGER) ||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
current_.type == Event::MINOR_MARK_COMPACTOR) ||
(current_.type == Event::MINOR_MARK_COMPACTOR ||
current_.type == Event::INCREMENTAL_MINOR_MARK_COMPACTOR)) ||
(collector == GarbageCollector::MARK_COMPACTOR &&
(current_.type == Event::MARK_COMPACTOR ||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR));
......
......@@ -78,6 +78,7 @@ const char* GCTracer::Event::TypeName(bool short_name) const {
case INCREMENTAL_MARK_COMPACTOR:
return (short_name) ? "ms" : "Mark-sweep";
case MINOR_MARK_COMPACTOR:
case INCREMENTAL_MINOR_MARK_COMPACTOR:
return (short_name) ? "mmc" : "Minor Mark-Compact";
case START:
return (short_name) ? "st" : "Start";
......@@ -275,7 +276,9 @@ void GCTracer::StartCycle(GarbageCollector collector,
type = Event::SCAVENGER;
break;
case GarbageCollector::MINOR_MARK_COMPACTOR:
type = Event::MINOR_MARK_COMPACTOR;
type = marking == MarkingType::kIncremental
? Event::INCREMENTAL_MINOR_MARK_COMPACTOR
: Event::MINOR_MARK_COMPACTOR;
break;
case GarbageCollector::MARK_COMPACTOR:
type = marking == MarkingType::kIncremental
......@@ -302,7 +305,9 @@ void GCTracer::StartCycle(GarbageCollector collector,
break;
case MarkingType::kIncremental:
// The current event will be updated later.
DCHECK(!Heap::IsYoungGenerationCollector(collector));
DCHECK_IMPLIES(Heap::IsYoungGenerationCollector(collector),
(FLAG_minor_mc &&
collector == GarbageCollector::MINOR_MARK_COMPACTOR));
DCHECK(!IsInObservablePause());
break;
}
......
......@@ -120,7 +120,8 @@ class V8_EXPORT_PRIVATE GCTracer {
MARK_COMPACTOR = 1,
INCREMENTAL_MARK_COMPACTOR = 2,
MINOR_MARK_COMPACTOR = 3,
START = 4
START = 4,
INCREMENTAL_MINOR_MARK_COMPACTOR = 5,
};
// Returns true if the event corresponds to a young generation GC.
......
......@@ -179,7 +179,7 @@ void Heap::SetSerializedGlobalProxySizes(FixedArray sizes) {
void Heap::SetBasicBlockProfilingData(Handle<ArrayList> list) {
set_basic_block_profiling_data(*list);
}
class ScavengeTaskObserver : public AllocationObserver {
class ScavengeTaskObserver final : public AllocationObserver {
public:
ScavengeTaskObserver(Heap* heap, intptr_t step_size)
: AllocationObserver(step_size), heap_(heap) {}
......@@ -192,6 +192,19 @@ class ScavengeTaskObserver : public AllocationObserver {
Heap* heap_;
};
class MinorMCTaskObserver final : public AllocationObserver {
public:
MinorMCTaskObserver(Heap* heap, intptr_t step_size)
: AllocationObserver(step_size), heap_(heap) {}
void Step(int bytes_allocated, Address, size_t) override {
heap_->StartMinorMCIncrementalMarkingIfNeeded();
}
private:
Heap* heap_;
};
Heap::Heap()
: isolate_(isolate()),
heap_allocator_(this),
......@@ -1552,6 +1565,21 @@ void Heap::ScheduleScavengeTaskIfNeeded() {
scavenge_job_->ScheduleTaskIfNeeded(this);
}
size_t Heap::MinorMCTaskTriggerSize() const {
return new_space()->Capacity() * FLAG_minor_mc_task_trigger / 100;
}
void Heap::StartMinorMCIncrementalMarkingIfNeeded() {
if (FLAG_concurrent_minor_mc && !IsTearingDown() &&
!incremental_marking()->IsMarking() &&
incremental_marking()->CanBeStarted() && V8_LIKELY(!FLAG_gc_global) &&
(new_space()->Size() >= MinorMCTaskTriggerSize())) {
StartIncrementalMarking(Heap::kNoGCFlags, GarbageCollectionReason::kTask,
kNoGCCallbackFlags,
GarbageCollector::MINOR_MARK_COMPACTOR);
}
}
void Heap::CollectAllGarbage(int flags, GarbageCollectionReason gc_reason,
const v8::GCCallbackFlags gc_callback_flags) {
// Since we are ignoring the return value, the exact choice of space does
......@@ -1769,13 +1797,25 @@ bool Heap::CollectGarbage(AllocationSpace space,
DCHECK(AllowGarbageCollection::IsAllowed());
GarbageCollector collector;
const char* collector_reason = nullptr;
if (gc_reason == GarbageCollectionReason::kFinalizeMinorMC) {
collector = GarbageCollector::MINOR_MARK_COMPACTOR;
collector_reason = "finalize MinorMC";
} else {
collector = SelectGarbageCollector(space, &collector_reason);
}
if (collector == GarbageCollector::MARK_COMPACTOR &&
incremental_marking()->IsMinorMarking()) {
CollectGarbage(NEW_SPACE, GarbageCollectionReason::kFinalizeMinorMC);
}
// Ensure that all pending phantom callbacks are invoked.
isolate()->global_handles()->InvokeSecondPassPhantomCallbacks();
const char* collector_reason = nullptr;
GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
GCType gc_type = GetGCTypeFromGarbageCollector(collector);
{
GCCallbacksScope scope(this);
// Temporary override any embedder stack state as callbacks may create
......@@ -1973,7 +2013,8 @@ int Heap::NotifyContextDisposed(bool dependant_context) {
void Heap::StartIncrementalMarking(int gc_flags,
GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags) {
GCCallbackFlags gc_callback_flags,
GarbageCollector collector) {
DCHECK(incremental_marking()->IsStopped());
// Sweeping needs to be completed such that markbits are all cleared before
......@@ -1993,12 +2034,13 @@ void Heap::StartIncrementalMarking(int gc_flags,
#endif
// Now that sweeping is completed, we can start the next full GC cycle.
tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR, gc_reason, nullptr,
tracer()->StartCycle(collector, gc_reason, nullptr,
GCTracer::MarkingType::kIncremental);
set_current_gc_flags(gc_flags);
current_gc_callback_flags_ = gc_callback_flags;
incremental_marking()->Start(gc_reason);
incremental_marking()->Start(collector, gc_reason);
}
void Heap::CompleteSweepingFull() {
......@@ -2222,8 +2264,13 @@ size_t Heap::PerformGarbageCollection(
CompleteSweepingFull();
}
#endif // VERIFY_HEAP
tracer()->StartCycle(collector, gc_reason, collector_reason,
GCTracer::MarkingType::kAtomic);
if (!FLAG_minor_mc || incremental_marking_->IsStopped()) {
// If FLAG_minor_mc is false, then the young GC is Scavenger, which may
// interrupt an incremental full GC. If MinorMC incremental marking was
// running before, there is already an active GCTracer cycle.
tracer()->StartCycle(collector, gc_reason, collector_reason,
GCTracer::MarkingType::kAtomic);
}
} else {
DCHECK_EQ(GarbageCollector::MARK_COMPACTOR, collector);
CompleteSweepingFull();
......@@ -4350,6 +4397,8 @@ const char* Heap::GarbageCollectionReasonToString(
return "unknown";
case GarbageCollectionReason::kBackgroundAllocationFailure:
return "background allocation failure";
case GarbageCollectionReason::kFinalizeMinorMC:
return "finalize MinorMC";
}
UNREACHABLE();
}
......@@ -5828,6 +5877,10 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
scavenge_task_observer_.reset(new ScavengeTaskObserver(
this, ScavengeJob::YoungGenerationTaskTriggerSize(this)));
new_space()->AddAllocationObserver(scavenge_task_observer_.get());
minor_mc_task_observer_.reset(
new MinorMCTaskObserver(this, MinorMCTaskTriggerSize()));
new_space()->AddAllocationObserver(minor_mc_task_observer_.get());
}
SetGetExternallyAllocatedMemoryInBytesCallback(
......@@ -6093,11 +6146,14 @@ void Heap::TearDown() {
if (new_space()) {
new_space()->RemoveAllocationObserver(scavenge_task_observer_.get());
new_space()->RemoveAllocationObserver(minor_mc_task_observer_.get());
}
scavenge_task_observer_.reset();
scavenge_job_.reset();
minor_mc_task_observer_.reset();
if (need_to_remove_stress_concurrent_allocation_observer_) {
RemoveAllocationObserversFromAllSpaces(
stress_concurrent_allocation_observer_.get(),
......
......@@ -177,6 +177,7 @@ enum class GarbageCollectionReason : int {
kGlobalAllocationLimit = 23,
kMeasureMemory = 24,
kBackgroundAllocationFailure = 25,
kFinalizeMinorMC = 26,
kLastReason = kBackgroundAllocationFailure,
};
......@@ -870,17 +871,17 @@ class Heap {
inline Address NewSpaceTop();
NewSpace* new_space() { return new_space_; }
OldSpace* old_space() { return old_space_; }
OldSpace* shared_old_space() { return shared_old_space_; }
CodeSpace* code_space() { return code_space_; }
MapSpace* map_space() { return map_space_; }
NewSpace* new_space() const { return new_space_; }
OldSpace* old_space() const { return old_space_; }
OldSpace* shared_old_space() const { return shared_old_space_; }
CodeSpace* code_space() const { return code_space_; }
MapSpace* map_space() const { return map_space_; }
inline PagedSpace* space_for_maps();
OldLargeObjectSpace* lo_space() { return lo_space_; }
OldLargeObjectSpace* shared_lo_space() { return shared_lo_space_; }
CodeLargeObjectSpace* code_lo_space() { return code_lo_space_; }
NewLargeObjectSpace* new_lo_space() { return new_lo_space_; }
ReadOnlySpace* read_only_space() { return read_only_space_; }
OldLargeObjectSpace* lo_space() const { return lo_space_; }
OldLargeObjectSpace* shared_lo_space() const { return shared_lo_space_; }
CodeLargeObjectSpace* code_lo_space() const { return code_lo_space_; }
NewLargeObjectSpace* new_lo_space() const { return new_lo_space_; }
ReadOnlySpace* read_only_space() const { return read_only_space_; }
inline PagedSpace* paged_space(int idx);
inline Space* space(int idx);
......@@ -1104,7 +1105,8 @@ class Heap {
// stopped.
V8_EXPORT_PRIVATE void StartIncrementalMarking(
int gc_flags, GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags,
GarbageCollector collector = GarbageCollector::MARK_COMPACTOR);
V8_EXPORT_PRIVATE void StartIncrementalMarkingIfAllocationLimitIsReached(
int gc_flags,
......@@ -2063,6 +2065,9 @@ class Heap {
// ===========================================================================
void ScheduleScavengeTaskIfNeeded();
void StartMinorMCIncrementalMarkingIfNeeded();
size_t MinorMCTaskTriggerSize() const;
bool MinorMCSizeTaskTriggerReached() const;
// ===========================================================================
// Allocation methods. =======================================================
......@@ -2334,6 +2339,7 @@ class Heap {
std::unique_ptr<ObjectStats> dead_object_stats_;
std::unique_ptr<ScavengeJob> scavenge_job_;
std::unique_ptr<AllocationObserver> scavenge_task_observer_;
std::unique_ptr<AllocationObserver> minor_mc_task_observer_;
std::unique_ptr<AllocationObserver> stress_concurrent_allocation_observer_;
std::unique_ptr<LocalEmbedderHeapTracer> local_embedder_heap_tracer_;
std::unique_ptr<AllocationTrackerForDebugging>
......@@ -2473,6 +2479,7 @@ class Heap {
friend class MarkCompactCollector;
friend class MarkCompactCollectorBase;
friend class MinorMarkCompactCollector;
friend class MinorMCTaskObserver;
friend class NewLargeObjectSpace;
friend class NewSpace;
friend class ObjectStatsCollector;
......
This diff is collapsed.
......@@ -7,6 +7,7 @@
#include "src/base/logging.h"
#include "src/base/platform/mutex.h"
#include "src/common/globals.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking-job.h"
#include "src/heap/mark-compact.h"
......@@ -35,6 +36,8 @@ enum class StepOrigin {
kTask
};
enum class CurrentCollector { kNone, kMinorMC, kMajorMC };
class V8_EXPORT_PRIVATE IncrementalMarking final {
public:
class V8_NODISCARD PauseBlackAllocationScope {
......@@ -103,7 +106,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
bool CanBeStarted() const;
void Start(GarbageCollectionReason gc_reason);
void Start(GarbageCollector garbage_collector,
GarbageCollectionReason gc_reason);
// Returns true if incremental marking was running and false otherwise.
bool Stop();
......@@ -140,7 +144,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
bool black_allocation() { return black_allocation_; }
MarkingWorklists::Local* local_marking_worklists() const {
return collector_->local_marking_worklists();
return current_local_marking_worklists;
}
bool IsBelowActivationThresholds() const;
......@@ -155,7 +159,12 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
// Performs incremental marking step for unit tests.
void AdvanceForTesting(double max_step_size_in_ms);
bool is_minor() const { return false; }
bool IsMinorMarking() const {
return IsMarking() && current_collector_ == CurrentCollector::kMinorMC;
}
bool IsMajorMarking() const {
return IsMarking() && current_collector_ == CurrentCollector::kMajorMC;
}
private:
class IncrementalMarkingRootMarkingVisitor;
......@@ -172,7 +181,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
IncrementalMarking* incremental_marking_;
};
void StartMarking();
void StartMarkingMajor();
void StartMarkingMinor();
void EmbedderStep(double expected_duration_ms, double* duration_ms);
......@@ -224,9 +234,16 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
double CurrentTimeToMarkingTask() const;
Heap* const heap_;
MarkCompactCollector* const collector_;
CurrentCollector current_collector_{CurrentCollector::kNone};
MarkCompactCollector* const major_collector_;
MinorMarkCompactCollector* const minor_collector_;
WeakObjects* weak_objects_;
MarkingWorklists::Local* current_local_marking_worklists;
double start_time_ms_ = 0.0;
size_t initial_old_generation_size_ = 0;
size_t old_generation_allocation_counter_ = 0;
......
......@@ -63,10 +63,11 @@ LocalHeap::LocalHeap(Heap* heap, ThreadKind kind,
if (!is_main_thread()) {
WriteBarrier::SetForThread(marking_barrier_.get());
if (heap_->incremental_marking()->IsMarking()) {
marking_barrier_->Activate(heap_->incremental_marking()->IsCompacting(),
heap_->incremental_marking()->is_minor()
? MarkingBarrierType::kMinor
: MarkingBarrierType::kMajor);
marking_barrier_->Activate(
heap_->incremental_marking()->IsCompacting(),
heap_->incremental_marking()->IsMinorMarking()
? MarkingBarrierType::kMinor
: MarkingBarrierType::kMajor);
}
}
});
......
......@@ -5772,6 +5772,12 @@ void MinorMarkCompactCollector::StartMarking() {
std::make_unique<MarkingWorklists::Local>(&marking_worklists_);
main_marking_visitor_ = std::make_unique<YoungGenerationMainMarkingVisitor>(
heap()->isolate(), marking_state(), local_marking_worklists());
#ifdef VERIFY_HEAP
for (Page* page : *heap()->new_space()) {
CHECK(page->marking_bitmap<AccessMode::NON_ATOMIC>()->IsClean());
}
#endif // VERIFY_HEAP
}
void MinorMarkCompactCollector::Finish() {
......@@ -5782,11 +5788,6 @@ void MinorMarkCompactCollector::Finish() {
void MinorMarkCompactCollector::CollectGarbage() {
DCHECK(!heap()->mark_compact_collector()->in_use());
#ifdef VERIFY_HEAP
for (Page* page : *heap()->new_space()) {
CHECK(page->marking_bitmap<AccessMode::NON_ATOMIC>()->IsClean());
}
#endif // VERIFY_HEAP
// Minor MC does not support processing the ephemeron remembered set.
DCHECK(heap()->ephemeron_remembered_set_.empty());
......@@ -6235,6 +6236,15 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
PostponeInterruptsScope postpone(isolate());
bool was_marked_incrementally = false;
{
// TODO(v8:13012): TRACE_GC with MINOR_MC_MARK_FINISH_INCREMENTAL.
if (heap_->incremental_marking()->Stop()) {
MarkingBarrier::PublishAll(heap());
was_marked_incrementally = true;
}
}
RootMarkingVisitor root_visitor(this);
MarkRootSetInParallel(&root_visitor);
......@@ -6255,6 +6265,11 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
if (FLAG_minor_mc_trace_fragmentation) {
TraceFragmentation();
}
if (was_marked_incrementally) {
MarkingBarrier::DeactivateAll(heap());
GlobalHandles::DisableMarkingBarrier(heap()->isolate());
}
}
void MinorMarkCompactCollector::DrainMarkingWorklist() {
......
......@@ -179,6 +179,13 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
if (marking->IsMinorMarking()) {
// If minor incremental marking is running, we need to finalize it first
// because of the AdvanceForTesting call in this function which is currently
// only possible for MajorMC.
heap->CollectGarbage(NEW_SPACE, GarbageCollectionReason::kFinalizeMinorMC);
}
if (marking->IsStopped()) {
heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
i::GarbageCollectionReason::kTesting);
......
......@@ -6508,7 +6508,8 @@ HEAP_TEST(Regress670675) {
heap->tracer()->StartCycle(
GarbageCollector::MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector cctest", GCTracer::MarkingType::kIncremental);
marking->Start(i::GarbageCollectionReason::kTesting);
marking->Start(GarbageCollector::MARK_COMPACTOR,
i::GarbageCollectionReason::kTesting);
}
size_t array_length = 128 * KB;
size_t n = heap->OldGenerationSpaceAvailable() / array_length;
......
......@@ -122,7 +122,8 @@ TEST_WITH_PLATFORM(IncrementalMarkingUsingTasks, MockPlatform) {
heap->tracer()->StartCycle(
GarbageCollector::MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector cctest", GCTracer::MarkingType::kIncremental);
marking->Start(i::GarbageCollectionReason::kTesting);
marking->Start(GarbageCollector::MARK_COMPACTOR,
i::GarbageCollectionReason::kTesting);
}
CHECK(platform.PendingTask());
while (platform.PendingTask()) {
......
......@@ -474,7 +474,8 @@ TEST_F(EmbedderTracingTest, FinalizeTracingWhenMarking) {
heap->tracer()->StartCycle(
GarbageCollector::MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector cctest", GCTracer::MarkingType::kIncremental);
marking->Start(GarbageCollectionReason::kTesting);
marking->Start(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting);
}
// Sweeping is not runing so we should immediately start marking.
......
......@@ -388,7 +388,8 @@ TEST_F(HeapTest, Regress978156) {
heap->tracer()->StartCycle(
GarbageCollector::MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector cctest", GCTracer::MarkingType::kIncremental);
marking->Start(i::GarbageCollectionReason::kTesting);
marking->Start(GarbageCollector::MARK_COMPACTOR,
i::GarbageCollectionReason::kTesting);
}
MarkingState* marking_state = marking->marking_state();
// 6. Mark the filler black to access its two markbits. This triggers
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment