Commit 19ae2fc1 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Refactor object marking state (part 2).

This follows up 4af9cfcc by separating incremental marking state
from the full MC marking state. Runtime and tests now use only
the incremental marking state. The full MC marking state used
by MC during atomic pause.

This separation decouples atomicity of markbit accesses
during incremental marking and during full MC.

Bug: chromium:694255
TBR: mlippautz@chromium.org
Change-Id: Ia409ab06515cd0d1403a272a016633295c0d6692
Reviewed-on: https://chromium-review.googlesource.com/612350
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47336}
parent d758e130
......@@ -3361,9 +3361,9 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by) {
lo_space()->AdjustLiveBytes(by);
} else if (!in_heap_iterator() &&
!mark_compact_collector()->sweeping_in_progress() &&
mark_compact_collector()->marking_state()->IsBlack(object)) {
incremental_marking()->marking_state()->IsBlack(object)) {
DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone());
mark_compact_collector()->marking_state()->IncrementLiveBytes(
incremental_marking()->marking_state()->IncrementLiveBytes(
MemoryChunk::FromAddress(object->address()), by);
}
}
......@@ -3479,9 +3479,9 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) {
// Clear the mark bits of the black area that belongs now to the filler.
// This is an optimization. The sweeper will release black fillers anyway.
if (incremental_marking()->black_allocation() &&
mark_compact_collector()->marking_state()->IsBlackOrGrey(filler)) {
incremental_marking()->marking_state()->IsBlackOrGrey(filler)) {
Page* page = Page::FromAddress(new_end);
mark_compact_collector()->marking_state()->bitmap(page)->ClearRange(
incremental_marking()->marking_state()->bitmap(page)->ClearRange(
page->AddressToMarkbitIndex(new_end),
page->AddressToMarkbitIndex(new_end + bytes_to_trim));
}
......@@ -4569,8 +4569,8 @@ void Heap::RegisterDeserializedObjectsForBlackAllocation(
// Iterate black objects in old space, code space, map space, and large
// object space for side effects.
MarkCompactCollector::MarkingState* marking_state =
mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state =
incremental_marking()->marking_state();
for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) {
const Heap::Reservation& res = reservations[i];
for (auto& chunk : res) {
......
......@@ -47,10 +47,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
trace_wrappers_toggle_(false),
request_type_(NONE),
new_generation_observer_(*this, kAllocatedThreshold),
old_generation_observer_(*this, kAllocatedThreshold),
marking_state_(nullptr),
atomic_marking_state_(nullptr),
non_atomic_marking_state_(nullptr) {
old_generation_observer_(*this, kAllocatedThreshold) {
SetState(STOPPED);
}
......@@ -471,11 +468,6 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
DCHECK(state_ == STOPPED);
DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
DCHECK(!heap_->isolate()->serializer_enabled());
marking_state_ = heap_->mark_compact_collector()->marking_state();
atomic_marking_state_ =
heap_->mark_compact_collector()->atomic_marking_state();
non_atomic_marking_state_ =
heap_->mark_compact_collector()->non_atomic_marking_state();
Counters* counters = heap_->isolate()->counters();
......
......@@ -22,6 +22,52 @@ class PagedSpace;
enum class StepOrigin { kV8, kTask };
class IncrementalAtomicMarkingState final
: public MarkingStateBase<IncrementalAtomicMarkingState,
AccessMode::ATOMIC> {
public:
Bitmap* bitmap(const MemoryChunk* chunk) const {
return Bitmap::FromAddress(chunk->address() + MemoryChunk::kHeaderSize);
}
void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
reinterpret_cast<base::AtomicNumber<intptr_t>*>(&chunk->live_byte_count_)
->Increment(by);
}
intptr_t live_bytes(MemoryChunk* chunk) const {
return reinterpret_cast<base::AtomicNumber<intptr_t>*>(
&chunk->live_byte_count_)
->Value();
}
void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
reinterpret_cast<base::AtomicNumber<intptr_t>*>(&chunk->live_byte_count_)
->SetValue(value);
}
};
class IncrementalNonAtomicMarkingState final
: public MarkingStateBase<IncrementalNonAtomicMarkingState,
AccessMode::NON_ATOMIC> {
public:
Bitmap* bitmap(const MemoryChunk* chunk) const {
return Bitmap::FromAddress(chunk->address() + MemoryChunk::kHeaderSize);
}
void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
chunk->live_byte_count_ += by;
}
intptr_t live_bytes(MemoryChunk* chunk) const {
return chunk->live_byte_count_;
}
void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
chunk->live_byte_count_ = value;
}
};
class V8_EXPORT_PRIVATE IncrementalMarking {
public:
enum State { STOPPED, SWEEPING, MARKING, COMPLETE };
......@@ -32,6 +78,14 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
#ifdef V8_CONCURRENT_MARKING
using MarkingState = IncrementalAtomicMarkingState;
#else
using MarkingState = IncrementalNonAtomicMarkingState;
#endif
using AtomicMarkingState = IncrementalAtomicMarkingState;
using NonAtomicMarkingState = IncrementalNonAtomicMarkingState;
class PauseBlackAllocationScope {
public:
explicit PauseBlackAllocationScope(IncrementalMarking* marking)
......@@ -55,20 +109,12 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
explicit IncrementalMarking(Heap* heap);
MarkCompactCollector::MarkingState* marking_state() const {
DCHECK_NOT_NULL(marking_state_);
return marking_state_;
}
MarkingState* marking_state() { return &marking_state_; }
MarkCompactCollector::AtomicMarkingState* atomic_marking_state() const {
DCHECK_NOT_NULL(atomic_marking_state_);
return atomic_marking_state_;
}
AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; }
MarkCompactCollector::NonAtomicMarkingState* non_atomic_marking_state()
const {
DCHECK_NOT_NULL(non_atomic_marking_state_);
return non_atomic_marking_state_;
NonAtomicMarkingState* non_atomic_marking_state() {
return &non_atomic_marking_state_;
}
void NotifyLeftTrimming(HeapObject* from, HeapObject* to);
......@@ -361,9 +407,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
Observer new_generation_observer_;
Observer old_generation_observer_;
MarkCompactCollector::MarkingState* marking_state_;
MarkCompactCollector::AtomicMarkingState* atomic_marking_state_;
MarkCompactCollector::NonAtomicMarkingState* non_atomic_marking_state_;
MarkingState marking_state_;
AtomicMarkingState atomic_marking_state_;
NonAtomicMarkingState non_atomic_marking_state_;
DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
};
......
......@@ -1567,7 +1567,8 @@ class YoungGenerationMigrationObserver final : public MigrationObserver {
// Migrate color to old generation marking in case the object survived young
// generation garbage collection.
if (heap_->incremental_marking()->IsMarking()) {
DCHECK(mark_compact_collector_->atomic_marking_state()->IsWhite(dst));
DCHECK(
heap_->incremental_marking()->atomic_marking_state()->IsWhite(dst));
heap_->incremental_marking()->TransferColor(src, dst);
}
}
......
......@@ -432,30 +432,6 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
friend class YoungGenerationMarkingVisitor;
};
class MajorMarkingState final
: public MarkingStateBase<MajorMarkingState, AccessMode::ATOMIC> {
public:
Bitmap* bitmap(const MemoryChunk* chunk) const {
return Bitmap::FromAddress(chunk->address() + MemoryChunk::kHeaderSize);
}
void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
reinterpret_cast<base::AtomicNumber<intptr_t>*>(&chunk->live_byte_count_)
->Increment(by);
}
intptr_t live_bytes(MemoryChunk* chunk) const {
return reinterpret_cast<base::AtomicNumber<intptr_t>*>(
&chunk->live_byte_count_)
->Value();
}
void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
reinterpret_cast<base::AtomicNumber<intptr_t>*>(&chunk->live_byte_count_)
->SetValue(value);
}
};
class MajorNonAtomicMarkingState final
: public MarkingStateBase<MajorNonAtomicMarkingState,
AccessMode::NON_ATOMIC> {
......@@ -486,13 +462,7 @@ struct WeakObjects {
// Collector for young and old generation.
class MarkCompactCollector final : public MarkCompactCollectorBase {
public:
#ifdef V8_CONCURRENT_MARKING
using MarkingState = MajorMarkingState;
#else
using MarkingState = MajorNonAtomicMarkingState;
#endif
using NonAtomicMarkingState = MajorNonAtomicMarkingState;
using AtomicMarkingState = MajorMarkingState;
static const int kMainThread = 0;
// Wrapper for the shared and bailout worklists.
......@@ -677,10 +647,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
kClearMarkbits,
};
MarkingState* marking_state() { return &marking_state_; }
AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; }
NonAtomicMarkingState* non_atomic_marking_state() {
return &non_atomic_marking_state_;
}
......@@ -956,8 +922,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
Sweeper sweeper_;
MarkingState marking_state_;
AtomicMarkingState atomic_marking_state_;
NonAtomicMarkingState non_atomic_marking_state_;
friend class FullEvacuator;
......
......@@ -222,7 +222,7 @@ int MarkingVisitor<ConcreteVisitor>::VisitWeakCell(Map* map,
// contain smi zero.
if (!weak_cell->cleared()) {
HeapObject* value = HeapObject::cast(weak_cell->value());
if (collector_->marking_state()->IsBlackOrGrey(value)) {
if (heap_->incremental_marking()->marking_state()->IsBlackOrGrey(value)) {
// Weak cells with live values are directly processed here to reduce
// the processing time of weak cells during the main GC pause.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
......
......@@ -84,9 +84,8 @@ bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
HeapObject* target = nullptr;
if (allocation.To(&target)) {
DCHECK(
heap()->mark_compact_collector()->non_atomic_marking_state()->IsWhite(
target));
DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
target));
const bool self_success = MigrateObject(map, object, target, object_size);
if (!self_success) {
allocator_.FreeLast(NEW_SPACE, target, object_size);
......@@ -111,9 +110,8 @@ bool Scavenger::PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
HeapObject* target = nullptr;
if (allocation.To(&target)) {
DCHECK(
heap()->mark_compact_collector()->non_atomic_marking_state()->IsWhite(
target));
DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
target));
const bool self_success = MigrateObject(map, object, target, object_size);
if (!self_success) {
allocator_.FreeLast(OLD_SPACE, target, object_size);
......
......@@ -80,7 +80,7 @@ void Scavenger::IterateAndScavengePromotedObject(HeapObject* target, int size) {
// it would be a violation of the invariant to record it's slots.
const bool record_slots =
is_compacting_ &&
heap()->mark_compact_collector()->atomic_marking_state()->IsBlack(target);
heap()->incremental_marking()->atomic_marking_state()->IsBlack(target);
IterateAndScavengePromotedObjectsVisitor visitor(heap(), this, record_slots);
if (target->IsJSFunction()) {
// JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
......
......@@ -568,8 +568,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
heap->mark_compact_collector()->non_atomic_marking_state()->ClearLiveness(
chunk);
heap->incremental_marking()->non_atomic_marking_state()->ClearLiveness(chunk);
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
......@@ -941,8 +940,8 @@ void Page::CreateBlackArea(Address start, Address end) {
DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_NE(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this);
MarkCompactCollector::MarkingState* marking_state =
heap()->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state =
heap()->incremental_marking()->marking_state();
marking_state->bitmap(this)->SetRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end));
marking_state->IncrementLiveBytes(this, static_cast<int>(end - start));
......@@ -953,8 +952,8 @@ void Page::DestroyBlackArea(Address start, Address end) {
DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_NE(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this);
MarkCompactCollector::MarkingState* marking_state =
heap()->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state =
heap()->incremental_marking()->marking_state();
marking_state->bitmap(this)->ClearRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end));
marking_state->IncrementLiveBytes(this, -static_cast<int>(end - start));
......@@ -1626,8 +1625,8 @@ void PagedSpace::EmptyAllocationInfo() {
// Clear the bits in the unused black area.
if (current_top != current_limit) {
MarkCompactCollector::MarkingState* marking_state =
heap()->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state =
heap()->incremental_marking()->marking_state();
marking_state->bitmap(page)->ClearRange(
page->AddressToMarkbitIndex(current_top),
page->AddressToMarkbitIndex(current_limit));
......@@ -1647,9 +1646,8 @@ void PagedSpace::IncreaseCapacity(size_t bytes) {
void PagedSpace::ReleasePage(Page* page) {
DCHECK_EQ(
0,
heap()->mark_compact_collector()->non_atomic_marking_state()->live_bytes(
page));
0, heap()->incremental_marking()->non_atomic_marking_state()->live_bytes(
page));
DCHECK_EQ(page->owner(), this);
free_list_.EvictFreeListItems(page);
......@@ -1682,8 +1680,8 @@ void PagedSpace::Print() {}
void PagedSpace::Verify(ObjectVisitor* visitor) {
bool allocation_pointer_found_in_space =
(allocation_info_.top() == allocation_info_.limit());
MarkCompactCollector::MarkingState* marking_state =
heap()->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state =
heap()->incremental_marking()->marking_state();
for (Page* page : *this) {
CHECK(page->owner() == this);
if (page == Page::FromAllocationAreaAddress(allocation_info_.top())) {
......@@ -1847,8 +1845,8 @@ bool SemiSpace::EnsureCurrentCapacity() {
to_remove);
}
}
MarkCompactCollector::NonAtomicMarkingState* marking_state =
heap()->mark_compact_collector()->non_atomic_marking_state();
IncrementalMarking::NonAtomicMarkingState* marking_state =
heap()->incremental_marking()->non_atomic_marking_state();
while (actual_pages < expected_pages) {
actual_pages++;
current_page =
......@@ -1930,8 +1928,8 @@ void NewSpace::ResetAllocationInfo() {
to_space_.Reset();
UpdateAllocationInfo();
// Clear all mark-bits in the to-space.
MarkCompactCollector::NonAtomicMarkingState* marking_state =
heap()->mark_compact_collector()->non_atomic_marking_state();
IncrementalMarking::NonAtomicMarkingState* marking_state =
heap()->incremental_marking()->non_atomic_marking_state();
for (Page* p : to_space_) {
marking_state->ClearLiveness(p);
}
......@@ -2221,8 +2219,8 @@ bool SemiSpace::GrowTo(size_t new_capacity) {
const int delta_pages = static_cast<int>(delta / Page::kPageSize);
Page* last_page = anchor()->prev_page();
DCHECK_NE(last_page, anchor());
MarkCompactCollector::NonAtomicMarkingState* marking_state =
heap()->mark_compact_collector()->non_atomic_marking_state();
IncrementalMarking::NonAtomicMarkingState* marking_state =
heap()->incremental_marking()->non_atomic_marking_state();
for (int pages_added = 0; pages_added < delta_pages; pages_added++) {
Page* new_page =
heap()->memory_allocator()->AllocatePage<MemoryAllocator::kPooled>(
......@@ -2293,10 +2291,8 @@ void SemiSpace::FixPagesFlags(intptr_t flags, intptr_t mask) {
page->ClearFlag(MemoryChunk::IN_FROM_SPACE);
page->SetFlag(MemoryChunk::IN_TO_SPACE);
page->ClearFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK);
heap()
->mark_compact_collector()
->non_atomic_marking_state()
->SetLiveBytes(page, 0);
heap()->incremental_marking()->non_atomic_marking_state()->SetLiveBytes(
page, 0);
} else {
page->SetFlag(MemoryChunk::IN_FROM_SPACE);
page->ClearFlag(MemoryChunk::IN_TO_SPACE);
......@@ -3293,7 +3289,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
ClearRecordedSlots::kNo);
if (heap()->incremental_marking()->black_allocation()) {
heap()->mark_compact_collector()->marking_state()->WhiteToBlack(object);
heap()->incremental_marking()->marking_state()->WhiteToBlack(object);
}
return object;
}
......@@ -3338,8 +3334,8 @@ LargePage* LargeObjectSpace::FindPage(Address a) {
void LargeObjectSpace::ClearMarkingStateOfLiveObjects() {
MarkCompactCollector::NonAtomicMarkingState* marking_state =
heap()->mark_compact_collector()->non_atomic_marking_state();
IncrementalMarking::NonAtomicMarkingState* marking_state =
heap()->incremental_marking()->non_atomic_marking_state();
LargeObjectIterator it(this);
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
if (marking_state->IsBlackOrGrey(obj)) {
......@@ -3388,8 +3384,8 @@ void LargeObjectSpace::RemoveChunkMapEntries(LargePage* page,
void LargeObjectSpace::FreeUnmarkedObjects() {
LargePage* previous = nullptr;
LargePage* current = first_page_;
MarkCompactCollector::NonAtomicMarkingState* marking_state =
heap()->mark_compact_collector()->non_atomic_marking_state();
IncrementalMarking::NonAtomicMarkingState* marking_state =
heap()->incremental_marking()->non_atomic_marking_state();
while (current != nullptr) {
HeapObject* object = current->GetObject();
DCHECK(!marking_state->IsGrey(object));
......@@ -3550,8 +3546,7 @@ void Page::Print() {
for (HeapObject* object = objects.Next(); object != NULL;
object = objects.Next()) {
bool is_marked =
heap()->mark_compact_collector()->marking_state()->IsBlackOrGrey(
object);
heap()->incremental_marking()->marking_state()->IsBlackOrGrey(object);
PrintF(" %c ", (is_marked ? '!' : ' ')); // Indent a little.
if (is_marked) {
mark_size += object->Size();
......@@ -3561,7 +3556,7 @@ void Page::Print() {
}
printf(" --------------------------------------\n");
printf(" Marked: %x, LiveCount: %" V8PRIdPTR "\n", mark_size,
heap()->mark_compact_collector()->marking_state()->live_bytes(this));
heap()->incremental_marking()->marking_state()->live_bytes(this));
}
#endif // DEBUG
......
......@@ -697,12 +697,13 @@ class MemoryChunk {
void InitializeReservedMemory() { reservation_.Reset(); }
friend class ConcurrentMarkingState;
friend class MinorMarkingState;
friend class MinorNonAtomicMarkingState;
friend class MajorMarkingState;
friend class IncrementalAtomicMarkingState;
friend class IncrementalNonAtomicMarkingState;
friend class MajorNonAtomicMarkingState;
friend class MemoryAllocator;
friend class MemoryChunkValidator;
friend class MinorMarkingState;
friend class MinorNonAtomicMarkingState;
};
static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory,
......
......@@ -1442,7 +1442,7 @@ void WeakCell::initialize(HeapObject* val) {
// all weak cells.
Heap* heap = val->GetHeap();
WriteBarrierMode mode =
heap->mark_compact_collector()->marking_state()->IsBlack(this)
heap->incremental_marking()->marking_state()->IsBlack(this)
? UPDATE_WRITE_BARRIER
: UPDATE_WEAK_WRITE_BARRIER;
CONDITIONAL_WRITE_BARRIER(heap, this, kValueOffset, val, mode);
......
......@@ -29,7 +29,7 @@ void CheckInvariantsOfAbortedPage(Page* page) {
// 3) The page is not marked as aborted compaction anymore.
CHECK(page->heap()
->mark_compact_collector()
->marking_state()
->non_atomic_marking_state()
->bitmap(page)
->IsClean());
CHECK(!page->IsEvacuationCandidate());
......
......@@ -2201,8 +2201,7 @@ TEST(InstanceOfStubWriteBarrier) {
CHECK(f->IsOptimized());
MarkCompactCollector::MarkingState* marking_state =
CcTest::heap()->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) {
// Discard any pending GC requests otherwise we will get GC when we enter
......@@ -4882,8 +4881,7 @@ TEST(Regress3631) {
v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
HeapObject* weak_map_table = HeapObject::cast(weak_map->table());
MarkCompactCollector::MarkingState* marking_state =
CcTest::heap()->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
while (!marking_state->IsBlack(weak_map_table) && !marking->IsStopped()) {
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
IncrementalMarking::FORCE_COMPLETION, StepOrigin::kV8);
......@@ -5610,8 +5608,8 @@ TEST(Regress598319) {
}
CHECK(heap->lo_space()->Contains(arr.get()));
MarkCompactCollector::MarkingState* marking_state =
CcTest::heap()->mark_compact_collector()->marking_state();
IncrementalMarking* marking = heap->incremental_marking();
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
CHECK(marking_state->IsWhite(arr.get()));
for (int i = 0; i < arr.get()->length(); i++) {
HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
......@@ -5619,7 +5617,6 @@ TEST(Regress598319) {
}
// Start incremental marking.
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
......@@ -5813,8 +5810,7 @@ TEST(LeftTrimFixedArrayInBlackArea) {
isolate->factory()->NewFixedArray(4, TENURED);
Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
CHECK(heap->old_space()->Contains(*array));
MarkCompactCollector::MarkingState* marking_state =
CcTest::heap()->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
CHECK(marking_state->IsBlack(*array));
// Now left trim the allocated black area. A filler has to be installed
......@@ -5858,8 +5854,7 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
Address start_address = array->address();
Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address);
MarkCompactCollector::MarkingState* marking_state =
CcTest::heap()->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
CHECK(marking_state->IsBlack(*array));
CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
page->AddressToMarkbitIndex(start_address),
......@@ -5926,8 +5921,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
Address start_address = array->address();
Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address);
MarkCompactCollector::MarkingState* marking_state =
CcTest::heap()->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
CHECK(marking_state->IsBlack(*array));
CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
......
......@@ -358,8 +358,7 @@ TEST(Regress5829) {
ClearRecordedSlots::kNo);
heap->old_space()->EmptyAllocationInfo();
Page* page = Page::FromAddress(array->address());
MarkCompactCollector::MarkingState* marking_state =
heap->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
for (auto object_and_size :
LiveObjectRange<kGreyObjects>(page, marking_state->bitmap(page))) {
CHECK(!object_and_size.first->IsFiller());
......
......@@ -76,7 +76,7 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) {
// Sanity check that the page meets the requirements for promotion.
const int threshold_bytes =
FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100;
CHECK_GE(heap->mark_compact_collector()->marking_state()->live_bytes(
CHECK_GE(heap->incremental_marking()->marking_state()->live_bytes(
to_be_promoted_page),
threshold_bytes);
......
......@@ -1178,8 +1178,8 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
// in compacting mode and |obj_value|'s page is an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsCompacting());
MarkCompactCollector::MarkingState* marking_state =
heap->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state =
heap->incremental_marking()->marking_state();
CHECK(marking_state->IsBlack(*obj));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
......@@ -1501,8 +1501,7 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
// still active and |obj_value|'s page is indeed an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsMarking());
MarkCompactCollector::MarkingState* marking_state =
heap->mark_compact_collector()->marking_state();
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
CHECK(marking_state->IsBlack(*obj));
CHECK(marking_state->IsBlack(*obj_value));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment