Commit 76262021 authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

[heap] Merge marking states

MinorMC maintained a separate marking state to support interleaved GCs.
Since MinorMC now assumes that interleaving is not possible, MinorMC can
use the same marking state as the full GC.

Bug: v8:12612
Change-Id: Ibeb7df2eb24e448f811b497c9d16b3b132f87ec2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3735163Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81468}
parent b9af74c8
...@@ -367,12 +367,10 @@ class BasicMemoryChunk { ...@@ -367,12 +367,10 @@ class BasicMemoryChunk {
friend class BasicMemoryChunkValidator; friend class BasicMemoryChunkValidator;
friend class ConcurrentMarkingState; friend class ConcurrentMarkingState;
friend class MajorMarkingState; friend class MarkingState;
friend class MajorAtomicMarkingState; friend class AtomicMarkingState;
friend class MajorNonAtomicMarkingState; friend class NonAtomicMarkingState;
friend class MemoryAllocator; friend class MemoryAllocator;
friend class MinorMarkingState;
friend class MinorNonAtomicMarkingState;
friend class PagedSpace; friend class PagedSpace;
}; };
......
...@@ -652,7 +652,7 @@ void ConcurrentMarking::FlushNativeContexts(NativeContextStats* main_stats) { ...@@ -652,7 +652,7 @@ void ConcurrentMarking::FlushNativeContexts(NativeContextStats* main_stats) {
} }
void ConcurrentMarking::FlushMemoryChunkData( void ConcurrentMarking::FlushMemoryChunkData(
MajorNonAtomicMarkingState* marking_state) { NonAtomicMarkingState* marking_state) {
DCHECK(!job_handle_ || !job_handle_->IsValid()); DCHECK(!job_handle_ || !job_handle_->IsValid());
for (int i = 1; i <= kMaxTasks; i++) { for (int i = 1; i <= kMaxTasks; i++) {
MemoryChunkDataMap& memory_chunk_data = task_state_[i].memory_chunk_data; MemoryChunkDataMap& memory_chunk_data = task_state_[i].memory_chunk_data;
......
...@@ -26,7 +26,7 @@ namespace internal { ...@@ -26,7 +26,7 @@ namespace internal {
class Heap; class Heap;
class Isolate; class Isolate;
class MajorNonAtomicMarkingState; class NonAtomicMarkingState;
class MemoryChunk; class MemoryChunk;
class WeakObjects; class WeakObjects;
...@@ -80,7 +80,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarking { ...@@ -80,7 +80,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarking {
// Flushes native context sizes to the given table of the main thread. // Flushes native context sizes to the given table of the main thread.
void FlushNativeContexts(NativeContextStats* main_stats); void FlushNativeContexts(NativeContextStats* main_stats);
// Flushes memory chunk data using the given marking state. // Flushes memory chunk data using the given marking state.
void FlushMemoryChunkData(MajorNonAtomicMarkingState* marking_state); void FlushMemoryChunkData(NonAtomicMarkingState* marking_state);
// This function is called for a new space page that was cleared after // This function is called for a new space page that was cleared after
// scavenge and is going to be re-used. // scavenge and is going to be re-used.
void ClearMemoryChunkData(MemoryChunk* chunk); void ClearMemoryChunkData(MemoryChunk* chunk);
......
...@@ -28,7 +28,7 @@ class UnifiedHeapMarkingState final { ...@@ -28,7 +28,7 @@ class UnifiedHeapMarkingState final {
private: private:
Heap* const heap_; Heap* const heap_;
MarkCompactCollector::MarkingState* const marking_state_; MarkingState* const marking_state_;
MarkingWorklists::Local* local_marking_worklist_ = nullptr; MarkingWorklists::Local* local_marking_worklist_ = nullptr;
const bool track_retaining_path_; const bool track_retaining_path_;
}; };
......
...@@ -375,7 +375,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() { ...@@ -375,7 +375,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() {
Map filler_map = ReadOnlyRoots(heap_).one_pointer_filler_map(); Map filler_map = ReadOnlyRoots(heap_).one_pointer_filler_map();
MinorMarkCompactCollector::MarkingState* minor_marking_state = MarkingState* minor_marking_state =
heap()->minor_mark_compact_collector()->marking_state(); heap()->minor_mark_compact_collector()->marking_state();
collector_->local_marking_worklists()->Publish(); collector_->local_marking_worklists()->Publish();
......
...@@ -39,10 +39,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -39,10 +39,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
// is triggered via stack guard. // is triggered via stack guard.
enum class CompletionAction { kGcViaStackGuard, kGCViaTask }; enum class CompletionAction { kGcViaStackGuard, kGCViaTask };
using MarkingState = MarkCompactCollector::MarkingState;
using AtomicMarkingState = MarkCompactCollector::AtomicMarkingState;
using NonAtomicMarkingState = MarkCompactCollector::NonAtomicMarkingState;
class V8_NODISCARD PauseBlackAllocationScope { class V8_NODISCARD PauseBlackAllocationScope {
public: public:
explicit PauseBlackAllocationScope(IncrementalMarking* marking) explicit PauseBlackAllocationScope(IncrementalMarking* marking)
......
...@@ -23,7 +23,7 @@ namespace internal { ...@@ -23,7 +23,7 @@ namespace internal {
// change. // change.
using InvalidatedSlots = std::map<HeapObject, int, Object::Comparer>; using InvalidatedSlots = std::map<HeapObject, int, Object::Comparer>;
class MajorNonAtomicMarkingState; class NonAtomicMarkingState;
// This class provides IsValid predicate that takes into account the set // This class provides IsValid predicate that takes into account the set
// of invalidated objects in the given memory chunk. // of invalidated objects in the given memory chunk.
...@@ -64,7 +64,7 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter { ...@@ -64,7 +64,7 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
Address sentinel_; Address sentinel_;
InvalidatedObjectInfo current_{kNullAddress, 0, false}; InvalidatedObjectInfo current_{kNullAddress, 0, false};
InvalidatedObjectInfo next_{kNullAddress, 0, false}; InvalidatedObjectInfo next_{kNullAddress, 0, false};
MajorNonAtomicMarkingState* marking_state_; NonAtomicMarkingState* marking_state_;
InvalidatedSlots empty_; InvalidatedSlots empty_;
#ifdef DEBUG #ifdef DEBUG
Address last_slot_; Address last_slot_;
......
...@@ -500,7 +500,6 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) { ...@@ -500,7 +500,6 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) {
page->SetFlag(MemoryChunk::TO_PAGE); page->SetFlag(MemoryChunk::TO_PAGE);
UpdatePendingObject(result); UpdatePendingObject(result);
if (FLAG_minor_mc) { if (FLAG_minor_mc) {
page->AllocateYoungGenerationBitmap();
heap() heap()
->minor_mark_compact_collector() ->minor_mark_compact_collector()
->non_atomic_marking_state() ->non_atomic_marking_state()
......
...@@ -296,7 +296,7 @@ class FullMarkingVerifier : public MarkingVerifier { ...@@ -296,7 +296,7 @@ class FullMarkingVerifier : public MarkingVerifier {
} }
} }
MarkCompactCollector::NonAtomicMarkingState* marking_state_; NonAtomicMarkingState* marking_state_;
}; };
class EvacuationVerifier : public ObjectVisitorWithCageBases, class EvacuationVerifier : public ObjectVisitorWithCageBases,
...@@ -1446,7 +1446,7 @@ class ExternalStringTableCleaner : public RootVisitor { ...@@ -1446,7 +1446,7 @@ class ExternalStringTableCleaner : public RootVisitor {
void VisitRootPointers(Root root, const char* description, void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override { FullObjectSlot start, FullObjectSlot end) override {
// Visit all HeapObject pointers in [start, end). // Visit all HeapObject pointers in [start, end).
MarkCompactCollector::NonAtomicMarkingState* marking_state = NonAtomicMarkingState* marking_state =
heap_->mark_compact_collector()->non_atomic_marking_state(); heap_->mark_compact_collector()->non_atomic_marking_state();
Object the_hole = ReadOnlyRoots(heap_).the_hole_value(); Object the_hole = ReadOnlyRoots(heap_).the_hole_value();
for (FullObjectSlot p = start; p < end; ++p) { for (FullObjectSlot p = start; p < end; ++p) {
...@@ -1475,8 +1475,7 @@ class ExternalStringTableCleaner : public RootVisitor { ...@@ -1475,8 +1475,7 @@ class ExternalStringTableCleaner : public RootVisitor {
// are retained. // are retained.
class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
public: public:
explicit MarkCompactWeakObjectRetainer( explicit MarkCompactWeakObjectRetainer(MarkingState* marking_state)
MarkCompactCollector::MarkingState* marking_state)
: marking_state_(marking_state) {} : marking_state_(marking_state) {}
Object RetainAs(Object object) override { Object RetainAs(Object object) override {
...@@ -1506,7 +1505,7 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { ...@@ -1506,7 +1505,7 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
} }
private: private:
MarkCompactCollector::MarkingState* const marking_state_; MarkingState* const marking_state_;
}; };
class RecordMigratedSlotVisitor : public ObjectVisitorWithCageBases { class RecordMigratedSlotVisitor : public ObjectVisitorWithCageBases {
...@@ -2534,8 +2533,7 @@ void MarkCompactCollector::RecordObjectStats() { ...@@ -2534,8 +2533,7 @@ void MarkCompactCollector::RecordObjectStats() {
namespace { namespace {
bool ShouldRetainMap(MarkCompactCollector::MarkingState* marking_state, Map map, bool ShouldRetainMap(MarkingState* marking_state, Map map, int age) {
int age) {
if (age == 0) { if (age == 0) {
// The map has aged. Do not retain this map. // The map has aged. Do not retain this map.
return false; return false;
...@@ -4025,8 +4023,7 @@ class FullEvacuator : public Evacuator { ...@@ -4025,8 +4023,7 @@ class FullEvacuator : public Evacuator {
void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) { void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
const EvacuationMode evacuation_mode = ComputeEvacuationMode(chunk); const EvacuationMode evacuation_mode = ComputeEvacuationMode(chunk);
MarkCompactCollector::NonAtomicMarkingState* marking_state = NonAtomicMarkingState* marking_state = collector_->non_atomic_marking_state();
collector_->non_atomic_marking_state();
*live_bytes = marking_state->live_bytes(chunk); *live_bytes = marking_state->live_bytes(chunk);
TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"), TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
"FullEvacuator::RawEvacuatePage", "evacuation_mode", "FullEvacuator::RawEvacuatePage", "evacuation_mode",
...@@ -5005,10 +5002,9 @@ void MarkCompactCollector::ReportAbortedEvacuationCandidateDueToFlags( ...@@ -5005,10 +5002,9 @@ void MarkCompactCollector::ReportAbortedEvacuationCandidateDueToFlags(
namespace { namespace {
void ReRecordPage( void ReRecordPage(Heap* heap,
Heap* heap, v8::internal::NonAtomicMarkingState* marking_state,
v8::internal::MarkCompactCollector::NonAtomicMarkingState* marking_state, Address failed_start, Page* page) {
Address failed_start, Page* page) {
page->SetFlag(Page::COMPACTION_WAS_ABORTED); page->SetFlag(Page::COMPACTION_WAS_ABORTED);
// Aborted compaction page. We have to record slots here, since we // Aborted compaction page. We have to record slots here, since we
// might not have recorded them in first place. // might not have recorded them in first place.
...@@ -5266,7 +5262,7 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier { ...@@ -5266,7 +5262,7 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
} }
} }
MinorMarkCompactCollector::NonAtomicMarkingState* marking_state_; NonAtomicMarkingState* marking_state_;
}; };
class YoungGenerationEvacuationVerifier : public EvacuationVerifier { class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
...@@ -5342,7 +5338,7 @@ class YoungGenerationMarkingVisitor final ...@@ -5342,7 +5338,7 @@ class YoungGenerationMarkingVisitor final
: public NewSpaceVisitor<YoungGenerationMarkingVisitor> { : public NewSpaceVisitor<YoungGenerationMarkingVisitor> {
public: public:
YoungGenerationMarkingVisitor( YoungGenerationMarkingVisitor(
Isolate* isolate, MinorMarkCompactCollector::MarkingState* marking_state, Isolate* isolate, MarkingState* marking_state,
MinorMarkCompactCollector::MarkingWorklist::Local* worklist_local) MinorMarkCompactCollector::MarkingWorklist::Local* worklist_local)
: NewSpaceVisitor(isolate), : NewSpaceVisitor(isolate),
worklist_local_(worklist_local), worklist_local_(worklist_local),
...@@ -5418,7 +5414,7 @@ class YoungGenerationMarkingVisitor final ...@@ -5418,7 +5414,7 @@ class YoungGenerationMarkingVisitor final
} }
MinorMarkCompactCollector::MarkingWorklist::Local* worklist_local_; MinorMarkCompactCollector::MarkingWorklist::Local* worklist_local_;
MinorMarkCompactCollector::MarkingState* marking_state_; MarkingState* marking_state_;
}; };
void MinorMarkCompactCollector::SetUp() {} void MinorMarkCompactCollector::SetUp() {}
...@@ -5624,6 +5620,11 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor { ...@@ -5624,6 +5620,11 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
void MinorMarkCompactCollector::CollectGarbage() { void MinorMarkCompactCollector::CollectGarbage() {
DCHECK(!heap()->mark_compact_collector()->in_use()); DCHECK(!heap()->mark_compact_collector()->in_use());
#ifdef VERIFY_HEAP
for (Page* page : *heap()->new_space()) {
CHECK(page->marking_bitmap<AccessMode::NON_ATOMIC>()->IsClean());
}
#endif // VERIFY_HEAP
// Minor MC does not support processing the ephemeron remembered set. // Minor MC does not support processing the ephemeron remembered set.
DCHECK(heap()->ephemeron_remembered_set_.empty()); DCHECK(heap()->ephemeron_remembered_set_.empty());
...@@ -5758,7 +5759,7 @@ class YoungGenerationExternalStringTableCleaner : public RootVisitor { ...@@ -5758,7 +5759,7 @@ class YoungGenerationExternalStringTableCleaner : public RootVisitor {
private: private:
Heap* heap_; Heap* heap_;
MinorMarkCompactCollector::NonAtomicMarkingState* marking_state_; NonAtomicMarkingState* marking_state_;
}; };
} // namespace } // namespace
...@@ -5856,7 +5857,7 @@ class YoungGenerationMarkingTask { ...@@ -5856,7 +5857,7 @@ class YoungGenerationMarkingTask {
private: private:
MinorMarkCompactCollector::MarkingWorklist::Local marking_worklist_local_; MinorMarkCompactCollector::MarkingWorklist::Local marking_worklist_local_;
MinorMarkCompactCollector::MarkingState* marking_state_; MarkingState* marking_state_;
YoungGenerationMarkingVisitor visitor_; YoungGenerationMarkingVisitor visitor_;
}; };
...@@ -6212,8 +6213,7 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk, ...@@ -6212,8 +6213,7 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
intptr_t* live_bytes) { intptr_t* live_bytes) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
"YoungGenerationEvacuator::RawEvacuatePage"); "YoungGenerationEvacuator::RawEvacuatePage");
MinorMarkCompactCollector::NonAtomicMarkingState* marking_state = NonAtomicMarkingState* marking_state = collector_->non_atomic_marking_state();
collector_->non_atomic_marking_state();
*live_bytes = marking_state->live_bytes(chunk); *live_bytes = marking_state->live_bytes(chunk);
switch (ComputeEvacuationMode(chunk)) { switch (ComputeEvacuationMode(chunk)) {
case kObjectsNewToOld: case kObjectsNewToOld:
......
...@@ -184,65 +184,11 @@ enum class AlwaysPromoteYoung { kYes, kNo }; ...@@ -184,65 +184,11 @@ enum class AlwaysPromoteYoung { kYes, kNo };
enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD }; enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD };
enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY }; enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
class MinorMarkingState final
: public MarkingStateBase<MinorMarkingState, AccessMode::ATOMIC> {
public:
explicit MinorMarkingState(PtrComprCageBase cage_base)
: MarkingStateBase(cage_base) {}
ConcurrentBitmap<AccessMode::ATOMIC>* bitmap(
const BasicMemoryChunk* chunk) const {
return MemoryChunk::cast(chunk)
->young_generation_bitmap<AccessMode::ATOMIC>();
}
void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
chunk->young_generation_live_byte_count_ += by;
}
intptr_t live_bytes(const MemoryChunk* chunk) const {
return chunk->young_generation_live_byte_count_;
}
void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
chunk->young_generation_live_byte_count_ = value;
}
};
class MinorNonAtomicMarkingState final
: public MarkingStateBase<MinorNonAtomicMarkingState,
AccessMode::NON_ATOMIC> {
public:
explicit MinorNonAtomicMarkingState(PtrComprCageBase cage_base)
: MarkingStateBase(cage_base) {}
ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap(
const BasicMemoryChunk* chunk) const {
return MemoryChunk::cast(chunk)
->young_generation_bitmap<AccessMode::NON_ATOMIC>();
}
void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
chunk->young_generation_live_byte_count_.fetch_add(
by, std::memory_order_relaxed);
}
intptr_t live_bytes(const MemoryChunk* chunk) const {
return chunk->young_generation_live_byte_count_.load(
std::memory_order_relaxed);
}
void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
chunk->young_generation_live_byte_count_.store(value,
std::memory_order_relaxed);
}
};
// This is used by marking visitors. // This is used by marking visitors.
class MajorMarkingState final class MarkingState final
: public MarkingStateBase<MajorMarkingState, AccessMode::ATOMIC> { : public MarkingStateBase<MarkingState, AccessMode::ATOMIC> {
public: public:
explicit MajorMarkingState(PtrComprCageBase cage_base) explicit MarkingState(PtrComprCageBase cage_base)
: MarkingStateBase(cage_base) {} : MarkingStateBase(cage_base) {}
ConcurrentBitmap<AccessMode::ATOMIC>* bitmap( ConcurrentBitmap<AccessMode::ATOMIC>* bitmap(
...@@ -267,10 +213,10 @@ class MajorMarkingState final ...@@ -267,10 +213,10 @@ class MajorMarkingState final
// This is used by Scavenger and Evacuator in TransferColor. // This is used by Scavenger and Evacuator in TransferColor.
// Live byte increments have to be atomic. // Live byte increments have to be atomic.
class MajorAtomicMarkingState final class AtomicMarkingState final
: public MarkingStateBase<MajorAtomicMarkingState, AccessMode::ATOMIC> { : public MarkingStateBase<AtomicMarkingState, AccessMode::ATOMIC> {
public: public:
explicit MajorAtomicMarkingState(PtrComprCageBase cage_base) explicit AtomicMarkingState(PtrComprCageBase cage_base)
: MarkingStateBase(cage_base) {} : MarkingStateBase(cage_base) {}
ConcurrentBitmap<AccessMode::ATOMIC>* bitmap( ConcurrentBitmap<AccessMode::ATOMIC>* bitmap(
...@@ -283,11 +229,10 @@ class MajorAtomicMarkingState final ...@@ -283,11 +229,10 @@ class MajorAtomicMarkingState final
} }
}; };
class MajorNonAtomicMarkingState final class NonAtomicMarkingState final
: public MarkingStateBase<MajorNonAtomicMarkingState, : public MarkingStateBase<NonAtomicMarkingState, AccessMode::NON_ATOMIC> {
AccessMode::NON_ATOMIC> {
public: public:
explicit MajorNonAtomicMarkingState(PtrComprCageBase cage_base) explicit NonAtomicMarkingState(PtrComprCageBase cage_base)
: MarkingStateBase(cage_base) {} : MarkingStateBase(cage_base) {}
ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap( ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap(
...@@ -387,10 +332,6 @@ class MainMarkingVisitor final ...@@ -387,10 +332,6 @@ class MainMarkingVisitor final
// Collector for young and old generation. // Collector for young and old generation.
class MarkCompactCollector final { class MarkCompactCollector final {
public: public:
using MarkingState = MajorMarkingState;
using AtomicMarkingState = MajorAtomicMarkingState;
using NonAtomicMarkingState = MajorNonAtomicMarkingState;
using MarkingVisitor = MainMarkingVisitor<MarkingState>; using MarkingVisitor = MainMarkingVisitor<MarkingState>;
class RootMarkingVisitor; class RootMarkingVisitor;
...@@ -816,9 +757,6 @@ class V8_NODISCARD EvacuationScope { ...@@ -816,9 +757,6 @@ class V8_NODISCARD EvacuationScope {
// Collector for young-generation only. // Collector for young-generation only.
class MinorMarkCompactCollector final { class MinorMarkCompactCollector final {
public: public:
using MarkingState = MinorMarkingState;
using NonAtomicMarkingState = MinorNonAtomicMarkingState;
static constexpr size_t kMaxParallelTasks = 8; static constexpr size_t kMaxParallelTasks = 8;
explicit MinorMarkCompactCollector(Heap* heap); explicit MinorMarkCompactCollector(Heap* heap);
......
...@@ -44,7 +44,7 @@ class MarkingBarrier { ...@@ -44,7 +44,7 @@ class MarkingBarrier {
inline bool MarkValue(HeapObject host, HeapObject value); inline bool MarkValue(HeapObject host, HeapObject value);
private: private:
using MarkingState = MarkCompactCollector::MarkingState; using MarkingState = MarkingState;
inline bool WhiteToGreyAndPush(HeapObject value); inline bool WhiteToGreyAndPush(HeapObject value);
......
...@@ -66,8 +66,6 @@ class V8_EXPORT_PRIVATE MemoryChunkLayout { ...@@ -66,8 +66,6 @@ class V8_EXPORT_PRIVATE MemoryChunkLayout {
FIELD(std::atomic<size_t>[kNumTypes], ExternalBackingStoreBytes), FIELD(std::atomic<size_t>[kNumTypes], ExternalBackingStoreBytes),
FIELD(heap::ListNode<MemoryChunk>, ListNode), FIELD(heap::ListNode<MemoryChunk>, ListNode),
FIELD(FreeListCategory**, Categories), FIELD(FreeListCategory**, Categories),
FIELD(std::atomic<intptr_t>, YoungGenerationLiveByteCount),
FIELD(Bitmap*, YoungGenerationBitmap),
FIELD(CodeObjectRegistry*, CodeObjectRegistry), FIELD(CodeObjectRegistry*, CodeObjectRegistry),
FIELD(PossiblyEmptyBuckets, PossiblyEmptyBuckets), FIELD(PossiblyEmptyBuckets, PossiblyEmptyBuckets),
FIELD(ActiveSystemPages, ActiveSystemPages), FIELD(ActiveSystemPages, ActiveSystemPages),
......
...@@ -157,7 +157,6 @@ MemoryChunk::MemoryChunk(Heap* heap, BaseSpace* space, size_t chunk_size, ...@@ -157,7 +157,6 @@ MemoryChunk::MemoryChunk(Heap* heap, BaseSpace* space, size_t chunk_size,
page_protection_change_mutex_ = new base::Mutex(); page_protection_change_mutex_ = new base::Mutex();
write_unprotect_counter_ = 0; write_unprotect_counter_ = 0;
mutex_ = new base::Mutex(); mutex_ = new base::Mutex();
young_generation_bitmap_ = nullptr;
external_backing_store_bytes_[ExternalBackingStoreType::kArrayBuffer] = 0; external_backing_store_bytes_[ExternalBackingStoreType::kArrayBuffer] = 0;
external_backing_store_bytes_[ExternalBackingStoreType::kExternalString] = 0; external_backing_store_bytes_[ExternalBackingStoreType::kExternalString] = 0;
...@@ -260,8 +259,6 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() { ...@@ -260,8 +259,6 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() {
ReleaseInvalidatedSlots<OLD_TO_OLD>(); ReleaseInvalidatedSlots<OLD_TO_OLD>();
ReleaseInvalidatedSlots<OLD_TO_SHARED>(); ReleaseInvalidatedSlots<OLD_TO_SHARED>();
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
if (!IsLargePage()) { if (!IsLargePage()) {
Page* page = static_cast<Page*>(this); Page* page = static_cast<Page*>(this);
page->ReleaseFreeListCategories(); page->ReleaseFreeListCategories();
...@@ -456,18 +453,6 @@ bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject object) { ...@@ -456,18 +453,6 @@ bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject object) {
invalidated_slots<type>()->end(); invalidated_slots<type>()->end();
} }
void MemoryChunk::AllocateYoungGenerationBitmap() {
DCHECK_NULL(young_generation_bitmap_);
young_generation_bitmap_ =
static_cast<Bitmap*>(base::Calloc(1, Bitmap::kSize));
}
void MemoryChunk::ReleaseYoungGenerationBitmap() {
DCHECK_NOT_NULL(young_generation_bitmap_);
base::Free(young_generation_bitmap_);
young_generation_bitmap_ = nullptr;
}
#ifdef DEBUG #ifdef DEBUG
void MemoryChunk::ValidateOffsets(MemoryChunk* chunk) { void MemoryChunk::ValidateOffsets(MemoryChunk* chunk) {
// Note that we cannot use offsetof because MemoryChunk is not a POD. // Note that we cannot use offsetof because MemoryChunk is not a POD.
...@@ -502,13 +487,6 @@ void MemoryChunk::ValidateOffsets(MemoryChunk* chunk) { ...@@ -502,13 +487,6 @@ void MemoryChunk::ValidateOffsets(MemoryChunk* chunk) {
MemoryChunkLayout::kListNodeOffset); MemoryChunkLayout::kListNodeOffset);
DCHECK_EQ(reinterpret_cast<Address>(&chunk->categories_) - chunk->address(), DCHECK_EQ(reinterpret_cast<Address>(&chunk->categories_) - chunk->address(),
MemoryChunkLayout::kCategoriesOffset); MemoryChunkLayout::kCategoriesOffset);
DCHECK_EQ(
reinterpret_cast<Address>(&chunk->young_generation_live_byte_count_) -
chunk->address(),
MemoryChunkLayout::kYoungGenerationLiveByteCountOffset);
DCHECK_EQ(reinterpret_cast<Address>(&chunk->young_generation_bitmap_) -
chunk->address(),
MemoryChunkLayout::kYoungGenerationBitmapOffset);
DCHECK_EQ(reinterpret_cast<Address>(&chunk->code_object_registry_) - DCHECK_EQ(reinterpret_cast<Address>(&chunk->code_object_registry_) -
chunk->address(), chunk->address(),
MemoryChunkLayout::kCodeObjectRegistryOffset); MemoryChunkLayout::kCodeObjectRegistryOffset);
......
...@@ -152,9 +152,6 @@ class MemoryChunk : public BasicMemoryChunk { ...@@ -152,9 +152,6 @@ class MemoryChunk : public BasicMemoryChunk {
return invalidated_slots_[type]; return invalidated_slots_[type];
} }
void AllocateYoungGenerationBitmap();
void ReleaseYoungGenerationBitmap();
int FreeListsLength(); int FreeListsLength();
// Approximate amount of physical memory committed for this chunk. // Approximate amount of physical memory committed for this chunk.
...@@ -233,10 +230,6 @@ class MemoryChunk : public BasicMemoryChunk { ...@@ -233,10 +230,6 @@ class MemoryChunk : public BasicMemoryChunk {
void DecrementWriteUnprotectCounterAndMaybeSetPermissions( void DecrementWriteUnprotectCounterAndMaybeSetPermissions(
PageAllocator::Permission permission); PageAllocator::Permission permission);
template <AccessMode mode>
ConcurrentBitmap<mode>* young_generation_bitmap() const {
return reinterpret_cast<ConcurrentBitmap<mode>*>(young_generation_bitmap_);
}
#ifdef DEBUG #ifdef DEBUG
static void ValidateOffsets(MemoryChunk* chunk); static void ValidateOffsets(MemoryChunk* chunk);
#endif #endif
...@@ -284,9 +277,6 @@ class MemoryChunk : public BasicMemoryChunk { ...@@ -284,9 +277,6 @@ class MemoryChunk : public BasicMemoryChunk {
FreeListCategory** categories_; FreeListCategory** categories_;
std::atomic<intptr_t> young_generation_live_byte_count_;
Bitmap* young_generation_bitmap_;
CodeObjectRegistry* code_object_registry_; CodeObjectRegistry* code_object_registry_;
PossiblyEmptyBuckets possibly_empty_buckets_; PossiblyEmptyBuckets possibly_empty_buckets_;
...@@ -299,13 +289,11 @@ class MemoryChunk : public BasicMemoryChunk { ...@@ -299,13 +289,11 @@ class MemoryChunk : public BasicMemoryChunk {
private: private:
friend class ConcurrentMarkingState; friend class ConcurrentMarkingState;
friend class MajorMarkingState; friend class MarkingState;
friend class MajorAtomicMarkingState; friend class AtomicMarkingState;
friend class MajorNonAtomicMarkingState; friend class NonAtomicMarkingState;
friend class MemoryAllocator; friend class MemoryAllocator;
friend class MemoryChunkValidator; friend class MemoryChunkValidator;
friend class MinorMarkingState;
friend class MinorNonAtomicMarkingState;
friend class PagedSpace; friend class PagedSpace;
}; };
......
...@@ -25,7 +25,6 @@ Page* SemiSpace::InitializePage(MemoryChunk* chunk) { ...@@ -25,7 +25,6 @@ Page* SemiSpace::InitializePage(MemoryChunk* chunk) {
page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking()); page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking());
page->list_node().Initialize(); page->list_node().Initialize();
if (FLAG_minor_mc) { if (FLAG_minor_mc) {
page->AllocateYoungGenerationBitmap();
heap() heap()
->minor_mark_compact_collector() ->minor_mark_compact_collector()
->non_atomic_marking_state() ->non_atomic_marking_state()
...@@ -76,7 +75,7 @@ bool SemiSpace::EnsureCurrentCapacity() { ...@@ -76,7 +75,7 @@ bool SemiSpace::EnsureCurrentCapacity() {
} }
// Add more pages if we have less than expected_pages. // Add more pages if we have less than expected_pages.
IncrementalMarking::NonAtomicMarkingState* marking_state = NonAtomicMarkingState* marking_state =
heap()->incremental_marking()->non_atomic_marking_state(); heap()->incremental_marking()->non_atomic_marking_state();
while (actual_pages < expected_pages) { while (actual_pages < expected_pages) {
actual_pages++; actual_pages++;
...@@ -181,7 +180,7 @@ bool SemiSpace::GrowTo(size_t new_capacity) { ...@@ -181,7 +180,7 @@ bool SemiSpace::GrowTo(size_t new_capacity) {
DCHECK(IsAligned(delta, AllocatePageSize())); DCHECK(IsAligned(delta, AllocatePageSize()));
const int delta_pages = static_cast<int>(delta / Page::kPageSize); const int delta_pages = static_cast<int>(delta / Page::kPageSize);
DCHECK(last_page()); DCHECK(last_page());
IncrementalMarking::NonAtomicMarkingState* marking_state = NonAtomicMarkingState* marking_state =
heap()->incremental_marking()->non_atomic_marking_state(); heap()->incremental_marking()->non_atomic_marking_state();
for (int pages_added = 0; pages_added < delta_pages; pages_added++) { for (int pages_added = 0; pages_added < delta_pages; pages_added++) {
Page* new_page = heap()->memory_allocator()->AllocatePage( Page* new_page = heap()->memory_allocator()->AllocatePage(
...@@ -237,8 +236,6 @@ void SemiSpace::FixPagesFlags(Page::MainThreadFlags flags, ...@@ -237,8 +236,6 @@ void SemiSpace::FixPagesFlags(Page::MainThreadFlags flags,
page->ClearFlag(MemoryChunk::FROM_PAGE); page->ClearFlag(MemoryChunk::FROM_PAGE);
page->SetFlag(MemoryChunk::TO_PAGE); page->SetFlag(MemoryChunk::TO_PAGE);
page->ClearFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK); page->ClearFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK);
heap()->incremental_marking()->non_atomic_marking_state()->SetLiveBytes(
page, 0);
} else { } else {
page->SetFlag(MemoryChunk::FROM_PAGE); page->SetFlag(MemoryChunk::FROM_PAGE);
page->ClearFlag(MemoryChunk::TO_PAGE); page->ClearFlag(MemoryChunk::TO_PAGE);
...@@ -682,7 +679,7 @@ void SemiSpaceNewSpace::ResetLinearAllocationArea() { ...@@ -682,7 +679,7 @@ void SemiSpaceNewSpace::ResetLinearAllocationArea() {
to_space_.Reset(); to_space_.Reset();
UpdateLinearAllocationArea(); UpdateLinearAllocationArea();
// Clear all mark-bits in the to-space. // Clear all mark-bits in the to-space.
IncrementalMarking::NonAtomicMarkingState* marking_state = NonAtomicMarkingState* marking_state =
heap()->incremental_marking()->non_atomic_marking_state(); heap()->incremental_marking()->non_atomic_marking_state();
for (Page* p : to_space_) { for (Page* p : to_space_) {
marking_state->ClearLiveness(p); marking_state->ClearLiveness(p);
......
...@@ -450,7 +450,7 @@ class ObjectStatsCollectorImpl { ...@@ -450,7 +450,7 @@ class ObjectStatsCollectorImpl {
Heap* heap_; Heap* heap_;
ObjectStats* stats_; ObjectStats* stats_;
MarkCompactCollector::NonAtomicMarkingState* marking_state_; NonAtomicMarkingState* marking_state_;
std::unordered_set<HeapObject, Object::Hasher> virtual_objects_; std::unordered_set<HeapObject, Object::Hasher> virtual_objects_;
std::unordered_set<Address> external_resources_; std::unordered_set<Address> external_resources_;
FieldStatsCollector field_stats_collector_; FieldStatsCollector field_stats_collector_;
...@@ -1103,7 +1103,7 @@ class ObjectStatsVisitor { ...@@ -1103,7 +1103,7 @@ class ObjectStatsVisitor {
private: private:
ObjectStatsCollectorImpl* live_collector_; ObjectStatsCollectorImpl* live_collector_;
ObjectStatsCollectorImpl* dead_collector_; ObjectStatsCollectorImpl* dead_collector_;
MarkCompactCollector::NonAtomicMarkingState* marking_state_; NonAtomicMarkingState* marking_state_;
ObjectStatsCollectorImpl::Phase phase_; ObjectStatsCollectorImpl::Phase phase_;
}; };
......
...@@ -841,8 +841,7 @@ void PagedSpaceBase::Verify(Isolate* isolate, ObjectVisitor* visitor) const { ...@@ -841,8 +841,7 @@ void PagedSpaceBase::Verify(Isolate* isolate, ObjectVisitor* visitor) const {
} }
void PagedSpaceBase::VerifyLiveBytes() const { void PagedSpaceBase::VerifyLiveBytes() const {
IncrementalMarking::MarkingState* marking_state = MarkingState* marking_state = heap()->incremental_marking()->marking_state();
heap()->incremental_marking()->marking_state();
PtrComprCageBase cage_base(heap()->isolate()); PtrComprCageBase cage_base(heap()->isolate());
for (const Page* page : *this) { for (const Page* page : *this) {
CHECK(page->SweepingDone()); CHECK(page->SweepingDone());
......
...@@ -524,7 +524,7 @@ void ScavengerCollector::SweepArrayBufferExtensions() { ...@@ -524,7 +524,7 @@ void ScavengerCollector::SweepArrayBufferExtensions() {
void ScavengerCollector::HandleSurvivingNewLargeObjects() { void ScavengerCollector::HandleSurvivingNewLargeObjects() {
const bool is_compacting = heap_->incremental_marking()->IsCompacting(); const bool is_compacting = heap_->incremental_marking()->IsCompacting();
MajorAtomicMarkingState* marking_state = AtomicMarkingState* marking_state =
heap_->incremental_marking()->atomic_marking_state(); heap_->incremental_marking()->atomic_marking_state();
for (SurvivingNewLargeObjectMapEntry update_info : for (SurvivingNewLargeObjectMapEntry update_info :
......
...@@ -173,8 +173,7 @@ void Page::CreateBlackArea(Address start, Address end) { ...@@ -173,8 +173,7 @@ void Page::CreateBlackArea(Address start, Address end) {
DCHECK_EQ(Page::FromAddress(start), this); DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_LT(start, end); DCHECK_LT(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this); DCHECK_EQ(Page::FromAddress(end - 1), this);
IncrementalMarking::MarkingState* marking_state = MarkingState* marking_state = heap()->incremental_marking()->marking_state();
heap()->incremental_marking()->marking_state();
marking_state->bitmap(this)->SetRange(AddressToMarkbitIndex(start), marking_state->bitmap(this)->SetRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end)); AddressToMarkbitIndex(end));
marking_state->IncrementLiveBytes(this, static_cast<intptr_t>(end - start)); marking_state->IncrementLiveBytes(this, static_cast<intptr_t>(end - start));
...@@ -185,7 +184,7 @@ void Page::CreateBlackAreaBackground(Address start, Address end) { ...@@ -185,7 +184,7 @@ void Page::CreateBlackAreaBackground(Address start, Address end) {
DCHECK_EQ(Page::FromAddress(start), this); DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_LT(start, end); DCHECK_LT(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this); DCHECK_EQ(Page::FromAddress(end - 1), this);
IncrementalMarking::AtomicMarkingState* marking_state = AtomicMarkingState* marking_state =
heap()->incremental_marking()->atomic_marking_state(); heap()->incremental_marking()->atomic_marking_state();
marking_state->bitmap(this)->SetRange(AddressToMarkbitIndex(start), marking_state->bitmap(this)->SetRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end)); AddressToMarkbitIndex(end));
...@@ -198,8 +197,7 @@ void Page::DestroyBlackArea(Address start, Address end) { ...@@ -198,8 +197,7 @@ void Page::DestroyBlackArea(Address start, Address end) {
DCHECK_EQ(Page::FromAddress(start), this); DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_LT(start, end); DCHECK_LT(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this); DCHECK_EQ(Page::FromAddress(end - 1), this);
IncrementalMarking::MarkingState* marking_state = MarkingState* marking_state = heap()->incremental_marking()->marking_state();
heap()->incremental_marking()->marking_state();
marking_state->bitmap(this)->ClearRange(AddressToMarkbitIndex(start), marking_state->bitmap(this)->ClearRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end)); AddressToMarkbitIndex(end));
marking_state->IncrementLiveBytes(this, -static_cast<intptr_t>(end - start)); marking_state->IncrementLiveBytes(this, -static_cast<intptr_t>(end - start));
...@@ -210,7 +208,7 @@ void Page::DestroyBlackAreaBackground(Address start, Address end) { ...@@ -210,7 +208,7 @@ void Page::DestroyBlackAreaBackground(Address start, Address end) {
DCHECK_EQ(Page::FromAddress(start), this); DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_LT(start, end); DCHECK_LT(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this); DCHECK_EQ(Page::FromAddress(end - 1), this);
IncrementalMarking::AtomicMarkingState* marking_state = AtomicMarkingState* marking_state =
heap()->incremental_marking()->atomic_marking_state(); heap()->incremental_marking()->atomic_marking_state();
marking_state->bitmap(this)->ClearRange(AddressToMarkbitIndex(start), marking_state->bitmap(this)->ClearRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end)); AddressToMarkbitIndex(end));
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
Sweeper::Sweeper(Heap* heap, MajorNonAtomicMarkingState* marking_state) Sweeper::Sweeper(Heap* heap, NonAtomicMarkingState* marking_state)
: heap_(heap), : heap_(heap),
marking_state_(marking_state), marking_state_(marking_state),
sweeping_in_progress_(false), sweeping_in_progress_(false),
...@@ -115,7 +115,7 @@ void Sweeper::TearDown() { ...@@ -115,7 +115,7 @@ void Sweeper::TearDown() {
void Sweeper::StartSweeping() { void Sweeper::StartSweeping() {
sweeping_in_progress_ = true; sweeping_in_progress_ = true;
should_reduce_memory_ = heap_->ShouldReduceMemory(); should_reduce_memory_ = heap_->ShouldReduceMemory();
MajorNonAtomicMarkingState* marking_state = NonAtomicMarkingState* marking_state =
heap_->mark_compact_collector()->non_atomic_marking_state(); heap_->mark_compact_collector()->non_atomic_marking_state();
ForAllSweepingSpaces([this, marking_state](AllocationSpace space) { ForAllSweepingSpaces([this, marking_state](AllocationSpace space) {
// Sorting is done in order to make compaction more efficient: by sweeping // Sorting is done in order to make compaction more efficient: by sweeping
......
...@@ -18,7 +18,7 @@ namespace v8 { ...@@ -18,7 +18,7 @@ namespace v8 {
namespace internal { namespace internal {
class InvalidatedSlotsCleanup; class InvalidatedSlotsCleanup;
class MajorNonAtomicMarkingState; class NonAtomicMarkingState;
class Page; class Page;
class PagedSpaceBase; class PagedSpaceBase;
class Space; class Space;
...@@ -74,7 +74,7 @@ class Sweeper { ...@@ -74,7 +74,7 @@ class Sweeper {
enum AddPageMode { REGULAR, READD_TEMPORARY_REMOVED_PAGE }; enum AddPageMode { REGULAR, READD_TEMPORARY_REMOVED_PAGE };
enum class SweepingMode { kEagerDuringGC, kLazyOrConcurrent }; enum class SweepingMode { kEagerDuringGC, kLazyOrConcurrent };
Sweeper(Heap* heap, MajorNonAtomicMarkingState* marking_state); Sweeper(Heap* heap, NonAtomicMarkingState* marking_state);
bool sweeping_in_progress() const { return sweeping_in_progress_; } bool sweeping_in_progress() const { return sweeping_in_progress_; }
...@@ -177,7 +177,7 @@ class Sweeper { ...@@ -177,7 +177,7 @@ class Sweeper {
} }
Heap* const heap_; Heap* const heap_;
MajorNonAtomicMarkingState* marking_state_; NonAtomicMarkingState* marking_state_;
std::unique_ptr<JobHandle> job_handle_; std::unique_ptr<JobHandle> job_handle_;
base::Mutex mutex_; base::Mutex mutex_;
base::ConditionVariable cv_page_swept_; base::ConditionVariable cv_page_swept_;
......
...@@ -2369,7 +2369,7 @@ TEST(InstanceOfStubWriteBarrier) { ...@@ -2369,7 +2369,7 @@ TEST(InstanceOfStubWriteBarrier) {
CHECK(f->HasAttachedOptimizedCode()); CHECK(f->HasAttachedOptimizedCode());
IncrementalMarking::MarkingState* marking_state = marking->marking_state(); MarkingState* marking_state = marking->marking_state();
const double kStepSizeInMs = 100; const double kStepSizeInMs = 100;
while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) { while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) {
...@@ -5689,7 +5689,7 @@ TEST(Regress598319) { ...@@ -5689,7 +5689,7 @@ TEST(Regress598319) {
CHECK(heap->lo_space()->Contains(arr.get())); CHECK(heap->lo_space()->Contains(arr.get()));
IncrementalMarking* marking = heap->incremental_marking(); IncrementalMarking* marking = heap->incremental_marking();
IncrementalMarking::MarkingState* marking_state = marking->marking_state(); MarkingState* marking_state = marking->marking_state();
CHECK(marking_state->IsWhite(arr.get())); CHECK(marking_state->IsWhite(arr.get()));
for (int i = 0; i < arr.get().length(); i++) { for (int i = 0; i < arr.get().length(); i++) {
HeapObject arr_value = HeapObject::cast(arr.get().get(i)); HeapObject arr_value = HeapObject::cast(arr.get().get(i));
...@@ -5933,7 +5933,7 @@ TEST(LeftTrimFixedArrayInBlackArea) { ...@@ -5933,7 +5933,7 @@ TEST(LeftTrimFixedArrayInBlackArea) {
Handle<FixedArray> array = Handle<FixedArray> array =
isolate->factory()->NewFixedArray(50, AllocationType::kOld); isolate->factory()->NewFixedArray(50, AllocationType::kOld);
CHECK(heap->old_space()->Contains(*array)); CHECK(heap->old_space()->Contains(*array));
IncrementalMarking::MarkingState* marking_state = marking->marking_state(); MarkingState* marking_state = marking->marking_state();
CHECK(marking_state->IsBlack(*array)); CHECK(marking_state->IsBlack(*array));
// Now left trim the allocated black area. A filler has to be installed // Now left trim the allocated black area. A filler has to be installed
...@@ -5979,8 +5979,7 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) { ...@@ -5979,8 +5979,7 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
Address start_address = array->address(); Address start_address = array->address();
Address end_address = start_address + array->Size(); Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address); Page* page = Page::FromAddress(start_address);
IncrementalMarking::NonAtomicMarkingState* marking_state = NonAtomicMarkingState* marking_state = marking->non_atomic_marking_state();
marking->non_atomic_marking_state();
CHECK(marking_state->IsBlack(*array)); CHECK(marking_state->IsBlack(*array));
CHECK(marking_state->bitmap(page)->AllBitsSetInRange( CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
page->AddressToMarkbitIndex(start_address), page->AddressToMarkbitIndex(start_address),
...@@ -6049,8 +6048,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) { ...@@ -6049,8 +6048,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
Address start_address = array->address(); Address start_address = array->address();
Address end_address = start_address + array->Size(); Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address); Page* page = Page::FromAddress(start_address);
IncrementalMarking::NonAtomicMarkingState* marking_state = NonAtomicMarkingState* marking_state = marking->non_atomic_marking_state();
marking->non_atomic_marking_state();
CHECK(marking_state->IsBlack(*array)); CHECK(marking_state->IsBlack(*array));
CHECK(marking_state->bitmap(page)->AllBitsSetInRange( CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
...@@ -7106,7 +7104,7 @@ TEST(Regress978156) { ...@@ -7106,7 +7104,7 @@ TEST(Regress978156) {
"collector cctest", GCTracer::MarkingType::kIncremental); "collector cctest", GCTracer::MarkingType::kIncremental);
marking->Start(i::GarbageCollectionReason::kTesting); marking->Start(i::GarbageCollectionReason::kTesting);
} }
IncrementalMarking::MarkingState* marking_state = marking->marking_state(); MarkingState* marking_state = marking->marking_state();
// 6. Mark the filler black to access its two markbits. This triggers // 6. Mark the filler black to access its two markbits. This triggers
// an out-of-bounds access of the marking bitmap in a bad case. // an out-of-bounds access of the marking bitmap in a bad case.
marking_state->WhiteToGrey(filler); marking_state->WhiteToGrey(filler);
......
...@@ -471,7 +471,7 @@ TEST(Regress5829) { ...@@ -471,7 +471,7 @@ TEST(Regress5829) {
heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize); heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize);
heap->old_space()->FreeLinearAllocationArea(); heap->old_space()->FreeLinearAllocationArea();
Page* page = Page::FromAddress(array->address()); Page* page = Page::FromAddress(array->address());
IncrementalMarking::MarkingState* marking_state = marking->marking_state(); MarkingState* marking_state = marking->marking_state();
for (auto object_and_size : for (auto object_and_size :
LiveObjectRange<kGreyObjects>(page, marking_state->bitmap(page))) { LiveObjectRange<kGreyObjects>(page, marking_state->bitmap(page))) {
CHECK(!object_and_size.first.IsFreeSpaceOrFiller()); CHECK(!object_and_size.first.IsFreeSpaceOrFiller());
......
...@@ -214,7 +214,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnConstruction) { ...@@ -214,7 +214,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnConstruction) {
v8::Local<v8::Object> local = v8::Local<v8::Object> local =
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate())); v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
SimulateIncrementalMarking(); SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state(i_isolate()); MarkingState state(i_isolate());
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local)))); ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
auto ref = auto ref =
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local); std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
...@@ -234,7 +234,7 @@ TEST_F(TracedReferenceTest, WriteBarrierOnHeapReset) { ...@@ -234,7 +234,7 @@ TEST_F(TracedReferenceTest, WriteBarrierOnHeapReset) {
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate())); v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
auto ref = std::make_unique<v8::TracedReference<v8::Object>>(); auto ref = std::make_unique<v8::TracedReference<v8::Object>>();
SimulateIncrementalMarking(); SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state(i_isolate()); MarkingState state(i_isolate());
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local)))); ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref->Reset(v8_isolate(), local); ref->Reset(v8_isolate(), local);
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local)))); EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
...@@ -254,7 +254,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackReset) { ...@@ -254,7 +254,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackReset) {
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate())); v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
v8::TracedReference<v8::Object> ref; v8::TracedReference<v8::Object> ref;
SimulateIncrementalMarking(); SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state(i_isolate()); MarkingState state(i_isolate());
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local)))); ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref.Reset(v8_isolate(), local); ref.Reset(v8_isolate(), local);
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local)))); EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
...@@ -274,7 +274,7 @@ TEST_F(TracedReferenceTest, WriteBarrierOnHeapCopy) { ...@@ -274,7 +274,7 @@ TEST_F(TracedReferenceTest, WriteBarrierOnHeapCopy) {
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local); std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
auto ref_to = std::make_unique<v8::TracedReference<v8::Object>>(); auto ref_to = std::make_unique<v8::TracedReference<v8::Object>>();
SimulateIncrementalMarking(); SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state(i_isolate()); MarkingState state(i_isolate());
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local)))); ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
*ref_to = *ref_from; *ref_to = *ref_from;
EXPECT_TRUE(!ref_from->IsEmpty()); EXPECT_TRUE(!ref_from->IsEmpty());
...@@ -297,7 +297,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackCopy) { ...@@ -297,7 +297,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackCopy) {
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local); std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
v8::TracedReference<v8::Object> ref_to; v8::TracedReference<v8::Object> ref_to;
SimulateIncrementalMarking(); SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state(i_isolate()); MarkingState state(i_isolate());
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local)))); ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref_to = *ref_from; ref_to = *ref_from;
EXPECT_TRUE(!ref_from->IsEmpty()); EXPECT_TRUE(!ref_from->IsEmpty());
...@@ -318,7 +318,7 @@ TEST_F(TracedReferenceTest, WriteBarrierOnMove) { ...@@ -318,7 +318,7 @@ TEST_F(TracedReferenceTest, WriteBarrierOnMove) {
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local); std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
auto ref_to = std::make_unique<v8::TracedReference<v8::Object>>(); auto ref_to = std::make_unique<v8::TracedReference<v8::Object>>();
SimulateIncrementalMarking(); SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state(i_isolate()); MarkingState state(i_isolate());
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local)))); ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
*ref_to = std::move(*ref_from); *ref_to = std::move(*ref_from);
ASSERT_TRUE(ref_from->IsEmpty()); ASSERT_TRUE(ref_from->IsEmpty());
...@@ -341,7 +341,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackMove) { ...@@ -341,7 +341,7 @@ TEST_F(TracedReferenceTest, NoWriteBarrierOnStackMove) {
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local); std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
v8::TracedReference<v8::Object> ref_to; v8::TracedReference<v8::Object> ref_to;
SimulateIncrementalMarking(); SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state(i_isolate()); MarkingState state(i_isolate());
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local)))); ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref_to = std::move(*ref_from); ref_to = std::move(*ref_from);
ASSERT_TRUE(ref_from->IsEmpty()); ASSERT_TRUE(ref_from->IsEmpty());
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment