Commit fee3bf09 authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

heap: Remove build flag for MinorMC

The build flag is on by default and the actual functionality is guarded
by a runtime flag.

Bug: v8:12612
Change-Id: I6adbd5b766f502400af32eeeb035edca3a3606ef
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3448383Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79036}
parent ca849f24
......@@ -53,7 +53,6 @@ load(":bazel/v8-non-pointer-compression.bzl", "v8_binary_non_pointer_compression
# v8_can_use_fpu_instructions
# v8_use_mips_abi_hardfloat
# v8_enable_gdbjit
# v8_enable_minor_mc
# v8_check_header_includes
# v8_enable_shared_ro_heap
# v8_enable_lazy_source_positions
......@@ -122,11 +121,6 @@ v8_flag(
default = True,
)
v8_flag(
name = "v8_enable_minor_mc",
default = True,
)
v8_flag(name = "v8_enable_object_print")
v8_flag(name = "v8_enable_slow_dchecks")
......@@ -259,7 +253,6 @@ v8_config(
"v8_enable_hugepage": "ENABLE_HUGEPAGE",
"v8_enable_future": "V8_ENABLE_FUTURE",
"v8_enable_lazy_source_positions": "V8_ENABLE_LAZY_SOURCE_POSITIONS",
"v8_enable_minor_mc": "ENABLE_MINOR_MC",
"v8_enable_object_print": "OBJECT_PRINT",
"v8_enable_slow_dchecks": "ENABLE_SLOW_DCHECKS",
"v8_enable_snapshot_native_code_counters": "V8_SNAPSHOT_NATIVE_CODE_COUNTERS",
......
......@@ -225,9 +225,6 @@ declare_args() {
(is_linux || is_chromeos || is_mac)) ||
(v8_current_cpu == "ppc64" && (is_linux || is_chromeos))
# Enable minor mark compact.
v8_enable_minor_mc = true
# Check that each header can be included in isolation (requires also
# setting the "check_v8_header_includes" gclient variable to run a
# specific hook).
......@@ -861,9 +858,6 @@ config("features") {
if (v8_enable_hugepage) {
defines += [ "ENABLE_HUGEPAGE" ]
}
if (v8_enable_minor_mc) {
defines += [ "ENABLE_MINOR_MC" ]
}
if (v8_enable_object_print) {
defines += [ "OBJECT_PRINT" ]
}
......
......@@ -1855,14 +1855,9 @@ DEFINE_NEG_NEG_IMPLICATION(text_is_readable, partial_constant_pool)
//
// Minor mark compact collector flags.
//
#ifdef ENABLE_MINOR_MC
DEFINE_BOOL(trace_minor_mc_parallel_marking, false,
"trace parallel marking for the young generation")
DEFINE_BOOL(minor_mc, false, "perform young generation mark compact GCs")
#else
DEFINE_BOOL_READONLY(minor_mc, false,
"perform young generation mark compact GCs")
#endif // ENABLE_MINOR_MC
//
// Dev shell flags
......
......@@ -2577,7 +2577,6 @@ void Heap::MarkCompact() {
}
void Heap::MinorMarkCompact() {
#ifdef ENABLE_MINOR_MC
DCHECK(FLAG_minor_mc);
DCHECK(new_space());
......@@ -2603,9 +2602,6 @@ void Heap::MinorMarkCompact() {
minor_mark_compact_collector()->CollectGarbage();
SetGCState(NOT_IN_GC);
#else
UNREACHABLE();
#endif // ENABLE_MINOR_MC
}
void Heap::MarkCompactEpilogue() {
......@@ -4669,9 +4665,7 @@ class OldToNewSlotVerifyingVisitor : public SlotVerifyingVisitor {
void VisitEphemeron(HeapObject host, int index, ObjectSlot key,
ObjectSlot target) override {
VisitPointer(host, target);
#ifdef ENABLE_MINOR_MC
if (FLAG_minor_mc) return VisitPointer(host, target);
#endif
// Keys are handled separately and should never appear in this set.
CHECK(!InUntypedSet(key));
Object k = *key;
......@@ -5829,11 +5823,7 @@ void Heap::SetUpSpaces(LinearAllocationArea* new_allocation_info,
}
tracer_.reset(new GCTracer(this));
#ifdef ENABLE_MINOR_MC
minor_mark_compact_collector_ = new MinorMarkCompactCollector(this);
#else
minor_mark_compact_collector_ = nullptr;
#endif // ENABLE_MINOR_MC
array_buffer_sweeper_.reset(new ArrayBufferSweeper(this));
gc_idle_time_handler_.reset(new GCIdleTimeHandler());
memory_measurement_.reset(new MemoryMeasurement(isolate()));
......@@ -5854,11 +5844,9 @@ void Heap::SetUpSpaces(LinearAllocationArea* new_allocation_info,
LOG(isolate_, IntPtrTEvent("heap-available", Available()));
mark_compact_collector()->SetUp();
#ifdef ENABLE_MINOR_MC
if (minor_mark_compact_collector() != nullptr) {
minor_mark_compact_collector()->SetUp();
}
#endif // ENABLE_MINOR_MC
if (new_space()) {
scavenge_job_.reset(new ScavengeJob());
......@@ -6137,13 +6125,11 @@ void Heap::TearDown() {
mark_compact_collector_.reset();
}
#ifdef ENABLE_MINOR_MC
if (minor_mark_compact_collector_ != nullptr) {
minor_mark_compact_collector_->TearDown();
delete minor_mark_compact_collector_;
minor_mark_compact_collector_ = nullptr;
}
#endif // ENABLE_MINOR_MC
scavenger_collector_.reset();
array_buffer_sweeper_.reset();
......
......@@ -483,12 +483,8 @@ class Heap {
}
static inline GarbageCollector YoungGenerationCollector() {
#if ENABLE_MINOR_MC
return (FLAG_minor_mc) ? GarbageCollector::MINOR_MARK_COMPACTOR
: GarbageCollector::SCAVENGER;
#else
return GarbageCollector::SCAVENGER;
#endif // ENABLE_MINOR_MC
}
static inline const char* CollectorName(GarbageCollector collector) {
......
......@@ -441,10 +441,8 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
Map filler_map = ReadOnlyRoots(heap_).one_pointer_filler_map();
#ifdef ENABLE_MINOR_MC
MinorMarkCompactCollector::MarkingState* minor_marking_state =
heap()->minor_mark_compact_collector()->marking_state();
#endif // ENABLE_MINOR_MC
collector_->local_marking_worklists()->Publish();
MarkingBarrier::PublishAll(heap());
......@@ -455,10 +453,8 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
// this is referred inside DCHECK.
this,
#endif
#ifdef ENABLE_MINOR_MC
minor_marking_state,
#endif
cage_base, filler_map](HeapObject obj, HeapObject* out) -> bool {
minor_marking_state, cage_base,
filler_map](HeapObject obj, HeapObject* out) -> bool {
DCHECK(obj.IsHeapObject());
// Only pointers to from space have to be updated.
if (Heap::InFromPage(obj)) {
......@@ -481,24 +477,20 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
// new space.
DCHECK(Heap::IsLargeObject(obj) ||
Page::FromHeapObject(obj)->IsFlagSet(Page::SWEEP_TO_ITERATE));
#ifdef ENABLE_MINOR_MC
if (minor_marking_state->IsWhite(obj)) {
return false;
}
#endif // ENABLE_MINOR_MC
// Either a large object or an object marked by the minor
// mark-compactor.
// Either a large object or an object marked by the minor
// mark-compactor.
*out = obj;
return true;
} else {
// The object may be on a page that was moved from new to old space.
// Only applicable during minor MC garbage collections.
if (Page::FromHeapObject(obj)->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
#ifdef ENABLE_MINOR_MC
if (minor_marking_state->IsWhite(obj)) {
return false;
}
#endif // ENABLE_MINOR_MC
*out = obj;
return true;
}
......
......@@ -489,7 +489,6 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) {
page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking());
page->SetFlag(MemoryChunk::TO_PAGE);
UpdatePendingObject(result);
#ifdef ENABLE_MINOR_MC
if (FLAG_minor_mc) {
page->AllocateYoungGenerationBitmap();
heap()
......@@ -497,7 +496,6 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) {
->non_atomic_marking_state()
->ClearLiveness(page);
}
#endif // ENABLE_MINOR_MC
page->InitializationMemoryFence();
DCHECK(page->IsLargePage());
DCHECK_EQ(page->owner_identity(), NEW_LO_SPACE);
......
......@@ -40,8 +40,6 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
}
}
#ifdef ENABLE_MINOR_MC
void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
if (Heap::InYoungGeneration(obj) &&
non_atomic_marking_state_.WhiteToGrey(obj)) {
......@@ -49,8 +47,6 @@ void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
}
}
#endif
void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) {
local_marking_worklists()->Push(obj);
......
......@@ -613,9 +613,7 @@ void MarkCompactCollector::CollectGarbage() {
// update the state as they proceed.
DCHECK(state_ == PREPARE_GC);
#ifdef ENABLE_MINOR_MC
heap()->minor_mark_compact_collector()->CleanupSweepToIteratePages();
#endif // ENABLE_MINOR_MC
MarkLiveObjects();
ClearNonLiveReferences();
......@@ -4806,8 +4804,6 @@ void MarkCompactCollector::StartSweepSpaces() {
}
}
#ifdef ENABLE_MINOR_MC
namespace {
#ifdef VERIFY_HEAP
......@@ -5989,7 +5985,5 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() {
}
}
#endif // ENABLE_MINOR_MC
} // namespace internal
} // namespace v8
......@@ -844,8 +844,6 @@ class V8_NODISCARD EvacuationScope {
MarkCompactCollector* collector_;
};
#ifdef ENABLE_MINOR_MC
// Collector for young-generation only.
class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
public:
......@@ -925,8 +923,6 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
friend class YoungGenerationMarkingVisitor;
};
#endif // ENABLE_MINOR_MC
} // namespace internal
} // namespace v8
......
......@@ -24,7 +24,6 @@ Page* SemiSpace::InitializePage(MemoryChunk* chunk) {
Page* page = static_cast<Page*>(chunk);
page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking());
page->list_node().Initialize();
#ifdef ENABLE_MINOR_MC
if (FLAG_minor_mc) {
page->AllocateYoungGenerationBitmap();
heap()
......@@ -32,7 +31,6 @@ Page* SemiSpace::InitializePage(MemoryChunk* chunk) {
->non_atomic_marking_state()
->ClearLiveness(page);
}
#endif // ENABLE_MINOR_MC
page->InitializationMemoryFence();
return page;
}
......
......@@ -1368,11 +1368,9 @@ class OneByteVectorResource : public v8::String::ExternalOneByteStringResource {
};
TEST(InternalizeExternal) {
#ifdef ENABLE_MINOR_MC
// TODO(mlippautz): Remove once we add support for forwarding ThinStrings in
// minor MC
if (FLAG_minor_mc) return;
#endif // ENABLE_MINOR_MC
FLAG_stress_incremental_marking = false;
CcTest::InitializeVM();
i::Isolate* isolate = CcTest::i_isolate();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment