Commit df5b86de authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Add basic infrastructure for Minor Mark-Compact collector

BUG=chromium:651354

Review-Url: https://codereview.chromium.org/2493803002
Cr-Commit-Position: refs/heads/master@{#40935}
parent 70bedcb1
......@@ -727,6 +727,8 @@ DEFINE_INT(min_progress_during_incremental_marking_finalization, 32,
"least this many unmarked objects")
DEFINE_INT(max_incremental_marking_finalization_rounds, 3,
"at most try this many times to finalize incremental marking")
DEFINE_BOOL(minor_mc, false, "perform young generation mark compact GCs")
DEFINE_NEG_IMPLICATION(minor_mc, incremental_marking)
DEFINE_BOOL(black_allocation, false, "use black allocation")
DEFINE_BOOL(concurrent_sweeping, true, "use concurrent sweeping")
DEFINE_BOOL(parallel_compaction, true, "use parallel compaction")
......
......@@ -1062,7 +1062,7 @@ int GlobalHandles::PostGarbageCollectionProcessing(
// PostScavengeProcessing.
return freed_nodes;
}
if (collector == SCAVENGER) {
if (Heap::IsYoungGenerationCollector(collector)) {
freed_nodes += PostScavengeProcessing(initial_post_gc_processing_count);
} else {
freed_nodes += PostMarkSweepProcessing(initial_post_gc_processing_count);
......
......@@ -577,7 +577,7 @@ enum MinimumCapacity {
USE_CUSTOM_MINIMUM_CAPACITY
};
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR };
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR };
enum Executability { NOT_EXECUTABLE, EXECUTABLE };
......
......@@ -84,28 +84,17 @@ GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
}
}
const char* GCTracer::Event::TypeName(bool short_name) const {
switch (type) {
case SCAVENGER:
if (short_name) {
return "s";
} else {
return "Scavenge";
}
return (short_name) ? "s" : "Scavenge";
case MARK_COMPACTOR:
case INCREMENTAL_MARK_COMPACTOR:
if (short_name) {
return "ms";
} else {
return "Mark-sweep";
}
return (short_name) ? "ms" : "Mark-sweep";
case MINOR_MARK_COMPACTOR:
return (short_name) ? "mmc" : "Minor Mark-Compact";
case START:
if (short_name) {
return "st";
} else {
return "Start";
}
return (short_name) ? "st" : "Start";
}
return "Unknown Event Type";
}
......@@ -116,6 +105,7 @@ GCTracer::GCTracer(Heap* heap)
previous_(current_),
incremental_marking_bytes_(0),
incremental_marking_duration_(0.0),
incremental_marking_start_time_(0.0),
recorded_incremental_marking_speed_(0.0),
allocation_time_ms_(0.0),
new_space_allocation_counter_bytes_(0),
......@@ -140,8 +130,8 @@ void GCTracer::ResetForTesting() {
new_space_allocation_in_bytes_since_gc_ = 0.0;
old_generation_allocation_in_bytes_since_gc_ = 0.0;
combined_mark_compact_speed_cache_ = 0.0;
recorded_scavenges_total_.Reset();
recorded_scavenges_survived_.Reset();
recorded_minor_gcs_total_.Reset();
recorded_minor_gcs_survived_.Reset();
recorded_compactions_.Reset();
recorded_mark_compacts_.Reset();
recorded_incremental_mark_compacts_.Reset();
......@@ -163,15 +153,22 @@ void GCTracer::Start(GarbageCollector collector,
SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
heap_->OldGenerationAllocationCounter());
if (collector == SCAVENGER) {
current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
} else if (collector == MARK_COMPACTOR) {
if (heap_->incremental_marking()->WasActivated()) {
switch (collector) {
case SCAVENGER:
current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
break;
case MINOR_MARK_COMPACTOR:
current_ =
Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason);
} else {
current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
}
Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
break;
case MARK_COMPACTOR:
if (heap_->incremental_marking()->WasActivated()) {
current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
collector_reason);
} else {
current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
}
break;
}
current_.reduce_memory = heap_->ShouldReduceMemory();
......@@ -194,7 +191,7 @@ void GCTracer::Start(GarbageCollector collector,
Counters* counters = heap_->isolate()->counters();
if (collector == SCAVENGER) {
if (Heap::IsYoungGenerationCollector(collector)) {
counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
} else {
counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
......@@ -220,15 +217,16 @@ void GCTracer::ResetIncrementalMarkingCounters() {
void GCTracer::Stop(GarbageCollector collector) {
start_counter_--;
if (start_counter_ != 0) {
heap_->isolate()->PrintWithTimestamp(
"[Finished reentrant %s during %s.]\n",
collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
current_.TypeName(false));
heap_->isolate()->PrintWithTimestamp("[Finished reentrant %s during %s.]\n",
Heap::CollectorName(collector),
current_.TypeName(false));
return;
}
DCHECK(start_counter_ >= 0);
DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
(collector == MINOR_MARK_COMPACTOR &&
current_.type == Event::MINOR_MARK_COMPACTOR) ||
(collector == MARK_COMPACTOR &&
(current_.type == Event::MARK_COMPACTOR ||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
......@@ -250,36 +248,45 @@ void GCTracer::Stop(GarbageCollector collector) {
double duration = current_.end_time - current_.start_time;
if (current_.type == Event::SCAVENGER) {
recorded_scavenges_total_.Push(
MakeBytesAndDuration(current_.new_space_object_size, duration));
recorded_scavenges_survived_.Push(MakeBytesAndDuration(
current_.survived_new_space_object_size, duration));
} else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
current_.incremental_marking_bytes = incremental_marking_bytes_;
current_.incremental_marking_duration = incremental_marking_duration_;
for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
current_.scopes[i] = incremental_marking_scopes_[i].duration;
}
RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
current_.incremental_marking_duration);
recorded_incremental_mark_compacts_.Push(
MakeBytesAndDuration(current_.start_object_size, duration));
ResetIncrementalMarkingCounters();
combined_mark_compact_speed_cache_ = 0.0;
} else {
DCHECK_EQ(0u, current_.incremental_marking_bytes);
DCHECK_EQ(0, current_.incremental_marking_duration);
recorded_mark_compacts_.Push(
MakeBytesAndDuration(current_.start_object_size, duration));
ResetIncrementalMarkingCounters();
combined_mark_compact_speed_cache_ = 0.0;
switch (current_.type) {
case Event::SCAVENGER:
case Event::MINOR_MARK_COMPACTOR:
recorded_minor_gcs_total_.Push(
MakeBytesAndDuration(current_.new_space_object_size, duration));
recorded_minor_gcs_survived_.Push(MakeBytesAndDuration(
current_.survived_new_space_object_size, duration));
break;
case Event::INCREMENTAL_MARK_COMPACTOR:
current_.incremental_marking_bytes = incremental_marking_bytes_;
current_.incremental_marking_duration = incremental_marking_duration_;
for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
current_.scopes[i] = incremental_marking_scopes_[i].duration;
}
RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
current_.incremental_marking_duration);
recorded_incremental_mark_compacts_.Push(
MakeBytesAndDuration(current_.start_object_size, duration));
ResetIncrementalMarkingCounters();
combined_mark_compact_speed_cache_ = 0.0;
break;
case Event::MARK_COMPACTOR:
DCHECK_EQ(0u, current_.incremental_marking_bytes);
DCHECK_EQ(0, current_.incremental_marking_duration);
recorded_mark_compacts_.Push(
MakeBytesAndDuration(current_.start_object_size, duration));
ResetIncrementalMarkingCounters();
combined_mark_compact_speed_cache_ = 0.0;
break;
case Event::START:
UNREACHABLE();
}
heap_->UpdateTotalGCTime(duration);
if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
if ((current_.type == Event::SCAVENGER ||
current_.type == Event::MINOR_MARK_COMPACTOR) &&
FLAG_trace_gc_ignore_scavenger)
return;
if (FLAG_trace_gc_nvp) {
......@@ -500,6 +507,15 @@ void GCTracer::PrintNVP() const {
NewSpaceAllocationThroughputInBytesPerMillisecond(),
ContextDisposalRateInMilliseconds());
break;
case Event::MINOR_MARK_COMPACTOR:
heap_->isolate()->PrintWithTimestamp(
"pause=%.1f "
"mutator=%.1f "
"gc=%s "
"reduce_memory=%d\n",
duration, spent_in_mutator, current_.TypeName(true),
current_.reduce_memory);
break;
case Event::MARK_COMPACTOR:
case Event::INCREMENTAL_MARK_COMPACTOR:
heap_->isolate()->PrintWithTimestamp(
......@@ -721,9 +737,9 @@ double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
double GCTracer::ScavengeSpeedInBytesPerMillisecond(
ScavengeSpeedMode mode) const {
if (mode == kForAllObjects) {
return AverageSpeed(recorded_scavenges_total_);
return AverageSpeed(recorded_minor_gcs_total_);
} else {
return AverageSpeed(recorded_scavenges_survived_);
return AverageSpeed(recorded_minor_gcs_survived_);
}
}
......
......@@ -152,7 +152,8 @@ class V8_EXPORT_PRIVATE GCTracer {
SCAVENGER = 0,
MARK_COMPACTOR = 1,
INCREMENTAL_MARK_COMPACTOR = 2,
START = 3
MINOR_MARK_COMPACTOR = 3,
START = 4
};
Event(Type type, GarbageCollectionReason gc_reason,
......@@ -413,8 +414,8 @@ class V8_EXPORT_PRIVATE GCTracer {
// Separate timer used for --runtime_call_stats
RuntimeCallTimer timer_;
base::RingBuffer<BytesAndDuration> recorded_scavenges_total_;
base::RingBuffer<BytesAndDuration> recorded_scavenges_survived_;
base::RingBuffer<BytesAndDuration> recorded_minor_gcs_total_;
base::RingBuffer<BytesAndDuration> recorded_minor_gcs_survived_;
base::RingBuffer<BytesAndDuration> recorded_compactions_;
base::RingBuffer<BytesAndDuration> recorded_incremental_mark_compacts_;
base::RingBuffer<BytesAndDuration> recorded_mark_compacts_;
......
......@@ -286,7 +286,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
// Default
*reason = NULL;
return SCAVENGER;
return YoungGenerationCollector();
}
......@@ -822,7 +822,7 @@ void Heap::FinalizeIncrementalMarking(GarbageCollectionReason gc_reason) {
HistogramTimer* Heap::GCTypeTimer(GarbageCollector collector) {
if (collector == SCAVENGER) {
if (IsYoungGenerationCollector(collector)) {
return isolate_->counters()->gc_scavenger();
} else {
if (!incremental_marking()->IsStopped()) {
......@@ -952,7 +952,8 @@ bool Heap::CollectGarbage(GarbageCollector collector,
EnsureFillerObjectAtTop();
if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
if (IsYoungGenerationCollector(collector) &&
!incremental_marking()->IsStopped()) {
if (FLAG_trace_incremental_marking) {
isolate()->PrintWithTimestamp(
"[IncrementalMarking] Scavenge during marking.\n");
......@@ -970,7 +971,7 @@ bool Heap::CollectGarbage(GarbageCollector collector,
isolate()->PrintWithTimestamp(
"[IncrementalMarking] Delaying MarkSweep.\n");
}
collector = SCAVENGER;
collector = YoungGenerationCollector();
collector_reason = "incremental marking delaying mark-sweep";
}
}
......@@ -1035,7 +1036,8 @@ bool Heap::CollectGarbage(GarbageCollector collector,
// generator needs incremental marking to stay off after it aborted.
// We do this only for scavenger to avoid a loop where mark-compact
// causes another mark-compact.
if (collector == SCAVENGER && !ShouldAbortIncrementalMarking()) {
if (IsYoungGenerationCollector(collector) &&
!ShouldAbortIncrementalMarking()) {
StartIncrementalMarkingIfAllocationLimitIsReached(kNoGCFlags,
kNoGCCallbackFlags);
}
......@@ -1275,7 +1277,7 @@ bool Heap::PerformGarbageCollection(
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
int freed_global_handles = 0;
if (collector != SCAVENGER) {
if (!IsYoungGenerationCollector(collector)) {
PROFILE(isolate_, CodeMovingGCEvent());
}
......@@ -1306,18 +1308,25 @@ bool Heap::PerformGarbageCollection(
{
Heap::PretenuringScope pretenuring_scope(this);
if (collector == MARK_COMPACTOR) {
UpdateOldGenerationAllocationCounter();
// Perform mark-sweep with optional compaction.
MarkCompact();
old_generation_size_configured_ = true;
// This should be updated before PostGarbageCollectionProcessing, which
// can cause another GC. Take into account the objects promoted during GC.
old_generation_allocation_counter_at_last_gc_ +=
static_cast<size_t>(promoted_objects_size_);
old_generation_size_at_last_gc_ = PromotedSpaceSizeOfObjects();
} else {
Scavenge();
switch (collector) {
case MARK_COMPACTOR:
UpdateOldGenerationAllocationCounter();
// Perform mark-sweep with optional compaction.
MarkCompact();
old_generation_size_configured_ = true;
// This should be updated before PostGarbageCollectionProcessing, which
// can cause another GC. Take into account the objects promoted during
// GC.
old_generation_allocation_counter_at_last_gc_ +=
static_cast<size_t>(promoted_objects_size_);
old_generation_size_at_last_gc_ = PromotedSpaceSizeOfObjects();
break;
case MINOR_MARK_COMPACTOR:
MinorMarkCompact();
break;
case SCAVENGER:
Scavenge();
break;
}
ProcessPretenuringFeedback();
......@@ -1440,6 +1449,7 @@ void Heap::MarkCompact() {
}
}
void Heap::MinorMarkCompact() { UNREACHABLE(); }
void Heap::MarkCompactEpilogue() {
TRACE_GC(tracer(), GCTracer::Scope::MC_EPILOGUE);
......
......@@ -684,6 +684,26 @@ class Heap {
#endif
}
static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
return collector == SCAVENGER || collector == MINOR_MARK_COMPACTOR;
}
static inline GarbageCollector YoungGenerationCollector() {
return (FLAG_minor_mc) ? MINOR_MARK_COMPACTOR : SCAVENGER;
}
static inline const char* CollectorName(GarbageCollector collector) {
switch (collector) {
case SCAVENGER:
return "Scavenger";
case MARK_COMPACTOR:
return "Mark-Compact";
case MINOR_MARK_COMPACTOR:
return "Minor Mark-Compact";
}
return "Unknown collector";
}
V8_EXPORT_PRIVATE static double HeapGrowingFactor(double gc_speed,
double mutator_speed);
......@@ -1773,6 +1793,8 @@ class Heap {
// Performs a major collection in the whole heap.
void MarkCompact();
// Performs a minor collection of just the young generation.
void MinorMarkCompact();
// Code to be run before and after mark-compact.
void MarkCompactPrologue();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment