gc-tracer.cc 50.5 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5
#include "src/heap/gc-tracer.h"
6

7 8
#include <cstdarg>

9
#include "src/base/atomic-utils.h"
10
#include "src/common/globals.h"
11
#include "src/execution/isolate.h"
12
#include "src/execution/thread-id.h"
13
#include "src/heap/heap-inl.h"
14 15
#include "src/heap/incremental-marking.h"
#include "src/heap/spaces.h"
16
#include "src/logging/counters-inl.h"
17

18 19 20
namespace v8 {
namespace internal {

ulan's avatar
ulan committed
21 22
static size_t CountTotalHolesSize(Heap* heap) {
  size_t holes_size = 0;
23
  PagedSpaceIterator spaces(heap);
24 25
  for (PagedSpace* space = spaces.Next(); space != nullptr;
       space = spaces.Next()) {
ulan's avatar
ulan committed
26
    DCHECK_GE(holes_size + space->Waste() + space->Available(), holes_size);
27
    holes_size += space->Waste() + space->Available();
28 29 30
  }
  return holes_size;
}
31 32 33
WorkerThreadRuntimeCallStats* GCTracer::worker_thread_runtime_call_stats() {
  return heap_->isolate()->counters()->worker_thread_runtime_call_stats();
}
34

35 36 37 38 39
RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
  STATIC_ASSERT(Scope::FIRST_SCOPE == Scope::MC_INCREMENTAL);
  return static_cast<RuntimeCallCounterId>(
      static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
      static_cast<int>(id));
40
}
41

42 43 44 45 46 47 48 49 50
double GCTracer::MonotonicallyIncreasingTimeInMs() {
  if (V8_UNLIKELY(FLAG_predictable)) {
    return heap_->MonotonicallyIncreasingTimeInMs();
  } else {
    return base::TimeTicks::Now().ToInternalValue() /
           static_cast<double>(base::Time::kMicrosecondsPerMillisecond);
  }
}

51 52 53 54 55 56 57 58
CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId scope_id) {
  if (Scope::NeedsYoungEpoch(scope_id)) {
    return heap_->epoch_young();
  } else {
    return heap_->epoch_full();
  }
}

59 60
GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
    : tracer_(tracer), scope_(scope), thread_kind_(thread_kind) {
61
  start_time_ = tracer_->MonotonicallyIncreasingTimeInMs();
62
  if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
63 64 65 66 67 68 69 70 71 72 73 74

  if (thread_kind_ == ThreadKind::kMain) {
    DCHECK_EQ(tracer_->heap_->isolate()->thread_id(), ThreadId::Current());
    runtime_stats_ =
        tracer_->heap_->isolate()->counters()->runtime_call_stats();
    runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
  } else {
    runtime_call_stats_scope_.emplace(
        tracer->worker_thread_runtime_call_stats());
    runtime_stats_ = runtime_call_stats_scope_->Get();
    runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
  }
75 76 77
}

GCTracer::Scope::~Scope() {
78
  double duration_ms = tracer_->MonotonicallyIncreasingTimeInMs() - start_time_;
79

80 81 82 83 84 85
  if (thread_kind_ == ThreadKind::kMain) {
    DCHECK_EQ(tracer_->heap_->isolate()->thread_id(), ThreadId::Current());
    tracer_->AddScopeSample(scope_, duration_ms);
  } else {
    tracer_->AddScopeSampleBackground(scope_, duration_ms);
  }
86

87 88
  if (V8_LIKELY(runtime_stats_ == nullptr)) return;
  runtime_stats_->Leave(&timer_);
89 90
}

91 92 93 94 95 96
const char* GCTracer::Scope::Name(ScopeId id) {
#define CASE(scope)  \
  case Scope::scope: \
    return "V8.GC_" #scope;
  switch (id) {
    TRACER_SCOPES(CASE)
97
    TRACER_BACKGROUND_SCOPES(CASE)
98 99 100 101
    case Scope::NUMBER_OF_SCOPES:
      break;
  }
#undef CASE
102 103 104 105
  UNREACHABLE();
  return nullptr;
}

106 107 108 109 110 111 112 113 114 115 116 117 118
bool GCTracer::Scope::NeedsYoungEpoch(ScopeId id) {
#define CASE(scope)  \
  case Scope::scope: \
    return true;
  switch (id) {
    TRACER_YOUNG_EPOCH_SCOPES(CASE)
    default:
      return false;
  }
#undef CASE
  UNREACHABLE();
}

119
GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
120 121 122 123 124 125
                       const char* collector_reason)
    : type(type),
      gc_reason(gc_reason),
      collector_reason(collector_reason),
      start_time(0.0),
      end_time(0.0),
126
      reduce_memory(false),
127 128 129 130 131 132
      start_object_size(0),
      end_object_size(0),
      start_memory_size(0),
      end_memory_size(0),
      start_holes_size(0),
      end_holes_size(0),
133 134
      young_object_size(0),
      survived_young_object_size(0),
135
      incremental_marking_bytes(0),
136
      incremental_marking_duration(0.0) {
137 138 139 140 141 142 143 144
  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
    scopes[i] = 0;
  }
}

const char* GCTracer::Event::TypeName(bool short_name) const {
  switch (type) {
    case SCAVENGER:
145
      return (short_name) ? "s" : "Scavenge";
146
    case MARK_COMPACTOR:
147
    case INCREMENTAL_MARK_COMPACTOR:
148 149 150
      return (short_name) ? "ms" : "Mark-sweep";
    case MINOR_MARK_COMPACTOR:
      return (short_name) ? "mmc" : "Minor Mark-Compact";
151
    case START:
152
      return (short_name) ? "st" : "Start";
153 154 155 156 157 158
  }
  return "Unknown Event Type";
}

GCTracer::GCTracer(Heap* heap)
    : heap_(heap),
159
      current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
160
      previous_(current_),
161 162
      incremental_marking_bytes_(0),
      incremental_marking_duration_(0.0),
163
      incremental_marking_start_time_(0.0),
164
      recorded_incremental_marking_speed_(0.0),
165
      allocation_time_ms_(0.0),
166
      new_space_allocation_counter_bytes_(0),
167
      old_generation_allocation_counter_bytes_(0),
168
      embedder_allocation_counter_bytes_(0),
169
      allocation_duration_since_gc_(0.0),
170
      new_space_allocation_in_bytes_since_gc_(0),
171
      old_generation_allocation_in_bytes_since_gc_(0),
172
      embedder_allocation_in_bytes_since_gc_(0),
173
      combined_mark_compact_speed_cache_(0.0),
174 175 176 177 178
      start_counter_(0),
      average_mutator_duration_(0),
      average_mark_compact_duration_(0),
      current_mark_compact_mutator_utilization_(1.0),
      previous_mark_compact_end_time_(0) {
179 180 181 182 183 184
  // All accesses to incremental_marking_scope assume that incremental marking
  // scopes come first.
  STATIC_ASSERT(0 == Scope::FIRST_INCREMENTAL_SCOPE);
  // We assume that MC_INCREMENTAL is the first scope so that we can properly
  // map it to RuntimeCallStats.
  STATIC_ASSERT(0 == Scope::MC_INCREMENTAL);
185
  current_.end_time = MonotonicallyIncreasingTimeInMs();
186
  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
187 188
    background_counter_[i].total_duration_ms = 0;
  }
189 190
}

191
void GCTracer::ResetForTesting() {
192
  current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
193
  current_.end_time = MonotonicallyIncreasingTimeInMs();
194 195
  previous_ = current_;
  ResetIncrementalMarkingCounters();
196 197 198 199 200 201 202
  allocation_time_ms_ = 0.0;
  new_space_allocation_counter_bytes_ = 0.0;
  old_generation_allocation_counter_bytes_ = 0.0;
  allocation_duration_since_gc_ = 0.0;
  new_space_allocation_in_bytes_since_gc_ = 0.0;
  old_generation_allocation_in_bytes_since_gc_ = 0.0;
  combined_mark_compact_speed_cache_ = 0.0;
203 204
  recorded_minor_gcs_total_.Reset();
  recorded_minor_gcs_survived_.Reset();
205 206 207 208 209
  recorded_compactions_.Reset();
  recorded_mark_compacts_.Reset();
  recorded_incremental_mark_compacts_.Reset();
  recorded_new_generation_allocations_.Reset();
  recorded_old_generation_allocations_.Reset();
210
  recorded_embedder_generation_allocations_.Reset();
211 212
  recorded_context_disposal_times_.Reset();
  recorded_survival_ratios_.Reset();
213
  start_counter_ = 0;
214 215 216 217
  average_mutator_duration_ = 0;
  average_mark_compact_duration_ = 0;
  current_mark_compact_mutator_utilization_ = 1.0;
  previous_mark_compact_end_time_ = 0;
218
  base::MutexGuard guard(&background_counter_mutex_);
219
  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
220 221
    background_counter_[i].total_duration_ms = 0;
  }
222
}
223

224 225 226 227 228 229 230
void GCTracer::NotifyYoungGenerationHandling(
    YoungGenerationHandling young_generation_handling) {
  DCHECK(current_.type == Event::SCAVENGER || start_counter_ > 1);
  heap_->isolate()->counters()->young_generation_handling()->AddSample(
      static_cast<int>(young_generation_handling));
}

231 232
void GCTracer::Start(GarbageCollector collector,
                     GarbageCollectionReason gc_reason,
233
                     const char* collector_reason) {
hpayer's avatar
hpayer committed
234 235 236
  start_counter_++;
  if (start_counter_ != 1) return;

237 238
  previous_ = current_;

239 240 241 242 243
  switch (collector) {
    case SCAVENGER:
      current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
      break;
    case MINOR_MARK_COMPACTOR:
244
      current_ =
245 246 247 248 249 250 251 252 253 254
          Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
      break;
    case MARK_COMPACTOR:
      if (heap_->incremental_marking()->WasActivated()) {
        current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
                         collector_reason);
      } else {
        current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
      }
      break;
255 256
  }

257
  current_.reduce_memory = heap_->ShouldReduceMemory();
258
  current_.start_time = MonotonicallyIncreasingTimeInMs();
259 260 261 262
  current_.start_object_size = 0;
  current_.start_memory_size = 0;
  current_.start_holes_size = 0;
  current_.young_object_size = 0;
263

264 265
  current_.incremental_marking_bytes = 0;
  current_.incremental_marking_duration = 0;
266 267 268 269

  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
    current_.scopes[i] = 0;
  }
270

271 272
  Counters* counters = heap_->isolate()->counters();

273
  if (Heap::IsYoungGenerationCollector(collector)) {
274 275 276
    counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
  } else {
    counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
277 278 279 280 281 282

    if (FLAG_trace_gc_freelists) {
      PrintIsolate(heap_->isolate(),
                   "FreeLists statistics before collection:\n");
      heap_->PrintFreeListsStats();
    }
283
  }
284 285
}

286
void GCTracer::StartInSafepoint() {
287 288 289 290
  SampleAllocation(current_.start_time, heap_->NewSpaceAllocationCounter(),
                   heap_->OldGenerationAllocationCounter(),
                   heap_->EmbedderAllocationCounter());

291 292 293 294 295 296 297
  current_.start_object_size = heap_->SizeOfObjects();
  current_.start_memory_size = heap_->memory_allocator()->Size();
  current_.start_holes_size = CountTotalHolesSize(heap_);
  current_.young_object_size =
      heap_->new_space()->Size() + heap_->new_lo_space()->SizeOfObjects();
}

298 299 300 301 302
void GCTracer::ResetIncrementalMarkingCounters() {
  incremental_marking_bytes_ = 0;
  incremental_marking_duration_ = 0;
  for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
    incremental_marking_scopes_[i].ResetCurrentCycle();
303 304 305
  }
}

306 307 308 309 310 311 312
void GCTracer::StopInSafepoint() {
  current_.end_object_size = heap_->SizeOfObjects();
  current_.end_memory_size = heap_->memory_allocator()->Size();
  current_.end_holes_size = CountTotalHolesSize(heap_);
  current_.survived_young_object_size = heap_->SurvivedYoungObjectSize();
}

hpayer's avatar
hpayer committed
313 314 315
void GCTracer::Stop(GarbageCollector collector) {
  start_counter_--;
  if (start_counter_ != 0) {
316 317 318 319 320
    if (FLAG_trace_gc_verbose) {
      heap_->isolate()->PrintWithTimestamp(
          "[Finished reentrant %s during %s.]\n",
          Heap::CollectorName(collector), current_.TypeName(false));
    }
hpayer's avatar
hpayer committed
321 322 323
    return;
  }

324
  DCHECK_LE(0, start_counter_);
325
  DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
326 327
         (collector == MINOR_MARK_COMPACTOR &&
          current_.type == Event::MINOR_MARK_COMPACTOR) ||
328 329 330
         (collector == MARK_COMPACTOR &&
          (current_.type == Event::MARK_COMPACTOR ||
           current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
hpayer's avatar
hpayer committed
331

332
  current_.end_time = MonotonicallyIncreasingTimeInMs();
333

334
  AddAllocation(current_.end_time);
335

336
  double duration = current_.end_time - current_.start_time;
337

338 339 340 341
  switch (current_.type) {
    case Event::SCAVENGER:
    case Event::MINOR_MARK_COMPACTOR:
      recorded_minor_gcs_total_.Push(
342 343 344
          MakeBytesAndDuration(current_.young_object_size, duration));
      recorded_minor_gcs_survived_.Push(
          MakeBytesAndDuration(current_.survived_young_object_size, duration));
345
      FetchBackgroundMinorGCCounters();
346 347 348 349 350 351 352 353
      break;
    case Event::INCREMENTAL_MARK_COMPACTOR:
      current_.incremental_marking_bytes = incremental_marking_bytes_;
      current_.incremental_marking_duration = incremental_marking_duration_;
      for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
        current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
        current_.scopes[i] = incremental_marking_scopes_[i].duration;
      }
354 355 356

      RecordMutatorUtilization(
          current_.end_time, duration + current_.incremental_marking_duration);
357 358 359
      RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
                                    current_.incremental_marking_duration);
      recorded_incremental_mark_compacts_.Push(
360
          MakeBytesAndDuration(current_.end_object_size, duration));
361
      RecordGCSumCounters(duration);
362 363
      ResetIncrementalMarkingCounters();
      combined_mark_compact_speed_cache_ = 0.0;
364
      FetchBackgroundMarkCompactCounters();
365 366 367 368
      break;
    case Event::MARK_COMPACTOR:
      DCHECK_EQ(0u, current_.incremental_marking_bytes);
      DCHECK_EQ(0, current_.incremental_marking_duration);
369 370
      RecordMutatorUtilization(
          current_.end_time, duration + current_.incremental_marking_duration);
371
      recorded_mark_compacts_.Push(
372
          MakeBytesAndDuration(current_.end_object_size, duration));
373
      RecordGCSumCounters(duration);
374 375
      ResetIncrementalMarkingCounters();
      combined_mark_compact_speed_cache_ = 0.0;
376
      FetchBackgroundMarkCompactCounters();
377 378 379
      break;
    case Event::START:
      UNREACHABLE();
380
  }
381
  FetchBackgroundGeneralCounters();
382

383
  heap_->UpdateTotalGCTime(duration);
384

385 386 387
  if ((current_.type == Event::SCAVENGER ||
       current_.type == Event::MINOR_MARK_COMPACTOR) &&
      FLAG_trace_gc_ignore_scavenger)
388 389
    return;

390
  if (FLAG_trace_gc_nvp) {
391
    PrintNVP();
392
  } else {
393
    Print();
394
  }
395

396
  if (FLAG_trace_gc) {
397 398
    heap_->PrintShortHeapStatistics();
  }
399 400 401 402 403 404 405 406 407 408

  if (V8_UNLIKELY(TracingFlags::gc.load(std::memory_order_relaxed) &
                  v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
    std::stringstream heap_stats;
    heap_->DumpJSONHeapStatistics(heap_stats);

    TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "V8.GC_Heap_Stats",
                         TRACE_EVENT_SCOPE_THREAD, "stats",
                         TRACE_STR_COPY(heap_stats.str().c_str()));
  }
409 410
}

411 412 413 414 415 416
void GCTracer::NotifySweepingCompleted() {
  if (FLAG_trace_gc_freelists) {
    PrintIsolate(heap_->isolate(),
                 "FreeLists statistics after sweeping completed:\n");
    heap_->PrintFreeListsStats();
  }
417 418 419 420 421 422
  if (FLAG_trace_allocations_origins) {
    heap_->new_space()->PrintAllocationsOrigins();
    heap_->old_space()->PrintAllocationsOrigins();
    heap_->code_space()->PrintAllocationsOrigins();
    heap_->map_space()->PrintAllocationsOrigins();
  }
423 424
}

425 426
void GCTracer::SampleAllocation(double current_ms,
                                size_t new_space_counter_bytes,
427 428
                                size_t old_generation_counter_bytes,
                                size_t embedder_counter_bytes) {
429
  if (allocation_time_ms_ == 0) {
430
    // It is the first sample.
431 432 433
    allocation_time_ms_ = current_ms;
    new_space_allocation_counter_bytes_ = new_space_counter_bytes;
    old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
434
    embedder_allocation_counter_bytes_ = embedder_counter_bytes;
435 436 437
    return;
  }
  // This assumes that counters are unsigned integers so that the subtraction
thakis's avatar
thakis committed
438
  // below works even if the new counter is less than the old counter.
439 440 441 442
  size_t new_space_allocated_bytes =
      new_space_counter_bytes - new_space_allocation_counter_bytes_;
  size_t old_generation_allocated_bytes =
      old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
443 444
  size_t embedder_allocated_bytes =
      embedder_counter_bytes - embedder_allocation_counter_bytes_;
445 446 447 448
  double duration = current_ms - allocation_time_ms_;
  allocation_time_ms_ = current_ms;
  new_space_allocation_counter_bytes_ = new_space_counter_bytes;
  old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
449
  embedder_allocation_counter_bytes_ = embedder_counter_bytes;
450 451 452 453
  allocation_duration_since_gc_ += duration;
  new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
  old_generation_allocation_in_bytes_since_gc_ +=
      old_generation_allocated_bytes;
454
  embedder_allocation_in_bytes_since_gc_ += embedder_allocated_bytes;
455 456
}

457 458
void GCTracer::AddAllocation(double current_ms) {
  allocation_time_ms_ = current_ms;
459
  if (allocation_duration_since_gc_ > 0) {
460 461 462 463 464 465
    recorded_new_generation_allocations_.Push(
        MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
                             allocation_duration_since_gc_));
    recorded_old_generation_allocations_.Push(
        MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
                             allocation_duration_since_gc_));
466 467
    recorded_embedder_generation_allocations_.Push(MakeBytesAndDuration(
        embedder_allocation_in_bytes_since_gc_, allocation_duration_since_gc_));
468
  }
469
  allocation_duration_since_gc_ = 0;
470
  new_space_allocation_in_bytes_since_gc_ = 0;
471
  old_generation_allocation_in_bytes_since_gc_ = 0;
472
  embedder_allocation_in_bytes_since_gc_ = 0;
473 474 475
}


476
void GCTracer::AddContextDisposalTime(double time) {
477
  recorded_context_disposal_times_.Push(time);
478 479
}

480
void GCTracer::AddCompactionEvent(double duration,
ulan's avatar
ulan committed
481
                                  size_t live_bytes_compacted) {
482 483
  recorded_compactions_.Push(
      MakeBytesAndDuration(live_bytes_compacted, duration));
484 485 486
}


487
void GCTracer::AddSurvivalRatio(double promotion_ratio) {
488
  recorded_survival_ratios_.Push(promotion_ratio);
489 490
}

ulan's avatar
ulan committed
491
void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) {
492
  if (bytes > 0) {
493 494
    incremental_marking_bytes_ += bytes;
    incremental_marking_duration_ += duration;
495
  }
496 497
}

498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513
void GCTracer::Output(const char* format, ...) const {
  if (FLAG_trace_gc) {
    va_list arguments;
    va_start(arguments, format);
    base::OS::VPrint(format, arguments);
    va_end(arguments);
  }

  const int kBufferSize = 256;
  char raw_buffer[kBufferSize];
  Vector<char> buffer(raw_buffer, kBufferSize);
  va_list arguments2;
  va_start(arguments2, format);
  VSNPrintF(buffer, format, arguments2);
  va_end(arguments2);

514
  heap_->AddToRingBuffer(buffer.begin());
515 516
}

517 518
void GCTracer::Print() const {
  double duration = current_.end_time - current_.start_time;
519 520 521
  const size_t kIncrementalStatsSize = 128;
  char incremental_buffer[kIncrementalStatsSize] = {0};

522 523 524 525 526 527 528 529 530
  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
    base::OS::SNPrintF(
        incremental_buffer, kIncrementalStatsSize,
        " (+ %.1f ms in %d steps since start of marking, "
        "biggest step %.1f ms, walltime since start of marking %.f ms)",
        current_.scopes[Scope::MC_INCREMENTAL],
        current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
        current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].longest_step,
        current_.end_time - incremental_marking_start_time_);
531 532
  }

533 534 535 536 537
  // Avoid PrintF as Output also appends the string to the tracing ring buffer
  // that gets printed on OOM failures.
  Output(
      "[%d:%p] "
      "%8.0f ms: "
538
      "%s%s %.1f (%.1f) -> %.1f (%.1f) MB, "
539
      "%.1f / %.1f ms %s (average mu = %.3f, current mu = %.3f) %s %s\n",
540 541 542
      base::OS::GetCurrentProcessId(),
      reinterpret_cast<void*>(heap_->isolate()),
      heap_->isolate()->time_millis_since_init(), current_.TypeName(false),
543
      current_.reduce_memory ? " (reduce)" : "",
544 545 546 547 548
      static_cast<double>(current_.start_object_size) / MB,
      static_cast<double>(current_.start_memory_size) / MB,
      static_cast<double>(current_.end_object_size) / MB,
      static_cast<double>(current_.end_memory_size) / MB, duration,
      TotalExternalTime(), incremental_buffer,
549 550
      AverageMarkCompactMutatorUtilization(),
      CurrentMarkCompactMutatorUtilization(),
551
      Heap::GarbageCollectionReasonToString(current_.gc_reason),
552
      current_.collector_reason != nullptr ? current_.collector_reason : "");
553 554 555 556 557 558
}


void GCTracer::PrintNVP() const {
  double duration = current_.end_time - current_.start_time;
  double spent_in_mutator = current_.start_time - previous_.end_time;
ulan's avatar
ulan committed
559
  size_t allocated_since_last_gc =
560
      current_.start_object_size - previous_.end_object_size;
561

562 563 564 565 566 567 568
  double incremental_walltime_duration = 0;

  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
    incremental_walltime_duration =
        current_.end_time - incremental_marking_start_time_;
  }

569 570
  switch (current_.type) {
    case Event::SCAVENGER:
571
      heap_->isolate()->PrintWithTimestamp(
572 573 574 575
          "pause=%.1f "
          "mutator=%.1f "
          "gc=%s "
          "reduce_memory=%d "
576
          "time_to_safepoint=%.2f "
577 578 579 580 581 582
          "heap.prologue=%.2f "
          "heap.epilogue=%.2f "
          "heap.epilogue.reduce_new_space=%.2f "
          "heap.external.prologue=%.2f "
          "heap.external.epilogue=%.2f "
          "heap.external_weak_global_handles=%.2f "
583
          "fast_promote=%.2f "
584
          "complete.sweep_array_buffers=%.2f "
585
          "scavenge=%.2f "
586
          "scavenge.free_remembered_set=%.2f "
587 588
          "scavenge.roots=%.2f "
          "scavenge.weak=%.2f "
589 590
          "scavenge.weak_global_handles.identify=%.2f "
          "scavenge.weak_global_handles.process=%.2f "
591
          "scavenge.parallel=%.2f "
592
          "scavenge.update_refs=%.2f "
593
          "scavenge.sweep_array_buffers=%.2f "
594
          "background.scavenge.parallel=%.2f "
595
          "background.unmapper=%.2f "
596
          "unmapper=%.2f "
597 598
          "incremental.steps_count=%d "
          "incremental.steps_took=%.1f "
599
          "scavenge_throughput=%.f "
600 601 602 603 604 605 606
          "total_size_before=%zu "
          "total_size_after=%zu "
          "holes_size_before=%zu "
          "holes_size_after=%zu "
          "allocated=%zu "
          "promoted=%zu "
          "semi_space_copied=%zu "
607 608 609 610 611 612 613 614
          "nodes_died_in_new=%d "
          "nodes_copied_in_new=%d "
          "nodes_promoted=%d "
          "promotion_ratio=%.1f%% "
          "average_survival_ratio=%.1f%% "
          "promotion_rate=%.1f%% "
          "semi_space_copy_rate=%.1f%% "
          "new_space_allocation_throughput=%.1f "
615
          "unmapper_chunks=%d "
616
          "context_disposal_rate=%.1f\n",
617
          duration, spent_in_mutator, current_.TypeName(true),
618
          current_.reduce_memory, current_.scopes[Scope::TIME_TO_SAFEPOINT],
619
          current_.scopes[Scope::HEAP_PROLOGUE],
620 621 622 623 624
          current_.scopes[Scope::HEAP_EPILOGUE],
          current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
          current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
          current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
          current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
625
          current_.scopes[Scope::SCAVENGER_FAST_PROMOTE],
626
          current_.scopes[Scope::SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS],
627
          current_.scopes[Scope::SCAVENGER_SCAVENGE],
628
          current_.scopes[Scope::SCAVENGER_FREE_REMEMBERED_SET],
629 630 631 632 633 634 635
          current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS],
          current_.scopes[Scope::SCAVENGER_SCAVENGE_WEAK],
          current_
              .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY],
          current_
              .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS],
          current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL],
636
          current_.scopes[Scope::SCAVENGER_SCAVENGE_UPDATE_REFS],
637
          current_.scopes[Scope::SCAVENGER_SWEEP_ARRAY_BUFFERS],
638
          current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL],
639
          current_.scopes[Scope::BACKGROUND_UNMAPPER],
640
          current_.scopes[Scope::UNMAPPER],
641 642 643 644 645 646 647 648 649 650 651 652 653
          current_.incremental_marking_scopes[GCTracer::Scope::MC_INCREMENTAL]
              .steps,
          current_.scopes[Scope::MC_INCREMENTAL],
          ScavengeSpeedInBytesPerMillisecond(), current_.start_object_size,
          current_.end_object_size, current_.start_holes_size,
          current_.end_holes_size, allocated_since_last_gc,
          heap_->promoted_objects_size(),
          heap_->semi_space_copied_object_size(),
          heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
          heap_->nodes_promoted_, heap_->promotion_ratio_,
          AverageSurvivalRatio(), heap_->promotion_rate_,
          heap_->semi_space_copied_rate_,
          NewSpaceAllocationThroughputInBytesPerMillisecond(),
654
          heap_->memory_allocator()->unmapper()->NumberOfChunks(),
655
          ContextDisposalRateInMilliseconds());
656
      break;
657 658 659 660 661
    case Event::MINOR_MARK_COMPACTOR:
      heap_->isolate()->PrintWithTimestamp(
          "pause=%.1f "
          "mutator=%.1f "
          "gc=%s "
662
          "reduce_memory=%d "
663 664
          "minor_mc=%.2f "
          "finish_sweeping=%.2f "
665
          "time_to_safepoint=%.2f "
666
          "mark=%.2f "
667
          "mark.seed=%.2f "
668
          "mark.roots=%.2f "
669 670 671 672 673
          "mark.weak=%.2f "
          "mark.global_handles=%.2f "
          "clear=%.2f "
          "clear.string_table=%.2f "
          "clear.weak_lists=%.2f "
674 675 676
          "evacuate=%.2f "
          "evacuate.copy=%.2f "
          "evacuate.update_pointers=%.2f "
677 678
          "evacuate.update_pointers.to_new_roots=%.2f "
          "evacuate.update_pointers.slots=%.2f "
679 680 681
          "background.mark=%.2f "
          "background.evacuate.copy=%.2f "
          "background.evacuate.update_pointers=%.2f "
682
          "background.unmapper=%.2f "
683
          "unmapper=%.2f "
684 685
          "update_marking_deque=%.2f "
          "reset_liveness=%.2f\n",
686
          duration, spent_in_mutator, "mmc", current_.reduce_memory,
687 688
          current_.scopes[Scope::MINOR_MC],
          current_.scopes[Scope::MINOR_MC_SWEEPING],
689
          current_.scopes[Scope::TIME_TO_SAFEPOINT],
690
          current_.scopes[Scope::MINOR_MC_MARK],
691
          current_.scopes[Scope::MINOR_MC_MARK_SEED],
692
          current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
693 694 695 696 697 698 699 700 701 702
          current_.scopes[Scope::MINOR_MC_MARK_WEAK],
          current_.scopes[Scope::MINOR_MC_MARK_GLOBAL_HANDLES],
          current_.scopes[Scope::MINOR_MC_CLEAR],
          current_.scopes[Scope::MINOR_MC_CLEAR_STRING_TABLE],
          current_.scopes[Scope::MINOR_MC_CLEAR_WEAK_LISTS],
          current_.scopes[Scope::MINOR_MC_EVACUATE],
          current_.scopes[Scope::MINOR_MC_EVACUATE_COPY],
          current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS],
          current_
              .scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
703
          current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS],
704 705 706
          current_.scopes[Scope::MINOR_MC_BACKGROUND_MARKING],
          current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY],
          current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
707
          current_.scopes[Scope::BACKGROUND_UNMAPPER],
708
          current_.scopes[Scope::UNMAPPER],
709 710
          current_.scopes[Scope::MINOR_MC_MARKING_DEQUE],
          current_.scopes[Scope::MINOR_MC_RESET_LIVENESS]);
711
      break;
712 713
    case Event::MARK_COMPACTOR:
    case Event::INCREMENTAL_MARK_COMPACTOR:
714
      heap_->isolate()->PrintWithTimestamp(
715 716 717 718
          "pause=%.1f "
          "mutator=%.1f "
          "gc=%s "
          "reduce_memory=%d "
719
          "time_to_safepoint=%.2f "
720
          "heap.prologue=%.2f "
721
          "heap.embedder_tracing_epilogue=%.2f "
722 723 724 725 726
          "heap.epilogue=%.2f "
          "heap.epilogue.reduce_new_space=%.2f "
          "heap.external.prologue=%.1f "
          "heap.external.epilogue=%.1f "
          "heap.external.weak_global_handles=%.1f "
727 728 729 730 731 732 733
          "clear=%1.f "
          "clear.dependent_code=%.1f "
          "clear.maps=%.1f "
          "clear.slots_buffer=%.1f "
          "clear.string_table=%.1f "
          "clear.weak_collections=%.1f "
          "clear.weak_lists=%.1f "
734
          "clear.weak_references=%.1f "
735
          "complete.sweep_array_buffers=%.1f "
736
          "epilogue=%.1f "
737 738 739
          "evacuate=%.1f "
          "evacuate.candidates=%.1f "
          "evacuate.clean_up=%.1f "
740
          "evacuate.copy=%.1f "
741 742
          "evacuate.prologue=%.1f "
          "evacuate.epilogue=%.1f "
743
          "evacuate.rebalance=%.1f "
744
          "evacuate.update_pointers=%.1f "
745
          "evacuate.update_pointers.to_new_roots=%.1f "
746
          "evacuate.update_pointers.slots.main=%.1f "
747 748
          "evacuate.update_pointers.weak=%.1f "
          "finish=%.1f "
749
          "finish.sweep_array_buffers=%.1f "
750 751 752
          "mark=%.1f "
          "mark.finish_incremental=%.1f "
          "mark.roots=%.1f "
753
          "mark.main=%.1f "
754
          "mark.weak_closure=%.1f "
755
          "mark.weak_closure.ephemeron=%.1f "
756
          "mark.weak_closure.ephemeron.marking=%.1f "
757
          "mark.weak_closure.ephemeron.linear=%.1f "
758 759 760
          "mark.weak_closure.weak_handles=%.1f "
          "mark.weak_closure.weak_roots=%.1f "
          "mark.weak_closure.harmony=%.1f "
761 762
          "mark.embedder_prologue=%.1f "
          "mark.embedder_tracing=%.1f "
763
          "prologue=%.1f "
764 765 766 767
          "sweep=%.1f "
          "sweep.code=%.1f "
          "sweep.map=%.1f "
          "sweep.old=%.1f "
768 769
          "incremental=%.1f "
          "incremental.finalize=%.1f "
770
          "incremental.finalize.body=%.1f "
771 772
          "incremental.finalize.external.prologue=%.1f "
          "incremental.finalize.external.epilogue=%.1f "
773
          "incremental.layout_change=%.1f "
774
          "incremental.sweep_array_buffers=%.1f "
775
          "incremental.sweeping=%.1f "
776 777
          "incremental.embedder_prologue=%.1f "
          "incremental.embedder_tracing=%.1f "
778
          "incremental_wrapper_tracing_longest_step=%.1f "
779 780 781
          "incremental_finalize_longest_step=%.1f "
          "incremental_finalize_steps_count=%d "
          "incremental_longest_step=%.1f "
782
          "incremental_steps_count=%d "
783
          "incremental_marking_throughput=%.f "
784
          "incremental_walltime_duration=%.f "
785 786 787 788
          "background.mark=%.1f "
          "background.sweep=%.1f "
          "background.evacuate.copy=%.1f "
          "background.evacuate.update_pointers=%.1f "
789
          "background.unmapper=%.1f "
790
          "unmapper=%.1f "
791 792 793 794 795 796 797
          "total_size_before=%zu "
          "total_size_after=%zu "
          "holes_size_before=%zu "
          "holes_size_after=%zu "
          "allocated=%zu "
          "promoted=%zu "
          "semi_space_copied=%zu "
798 799 800 801 802 803 804
          "nodes_died_in_new=%d "
          "nodes_copied_in_new=%d "
          "nodes_promoted=%d "
          "promotion_ratio=%.1f%% "
          "average_survival_ratio=%.1f%% "
          "promotion_rate=%.1f%% "
          "semi_space_copy_rate=%.1f%% "
805
          "new_space_allocation_throughput=%.1f "
806
          "unmapper_chunks=%d "
807
          "context_disposal_rate=%.1f "
808
          "compaction_speed=%.f\n",
809
          duration, spent_in_mutator, current_.TypeName(true),
810
          current_.reduce_memory, current_.scopes[Scope::TIME_TO_SAFEPOINT],
811
          current_.scopes[Scope::HEAP_PROLOGUE],
812
          current_.scopes[Scope::HEAP_EMBEDDER_TRACING_EPILOGUE],
813 814 815 816 817 818
          current_.scopes[Scope::HEAP_EPILOGUE],
          current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
          current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
          current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
          current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
          current_.scopes[Scope::MC_CLEAR],
819 820 821 822 823 824
          current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
          current_.scopes[Scope::MC_CLEAR_MAPS],
          current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
          current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
          current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
          current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
825
          current_.scopes[Scope::MC_CLEAR_WEAK_REFERENCES],
826
          current_.scopes[Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS],
827
          current_.scopes[Scope::MC_EPILOGUE],
828 829 830
          current_.scopes[Scope::MC_EVACUATE],
          current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
          current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
831
          current_.scopes[Scope::MC_EVACUATE_COPY],
832 833
          current_.scopes[Scope::MC_EVACUATE_PROLOGUE],
          current_.scopes[Scope::MC_EVACUATE_EPILOGUE],
834
          current_.scopes[Scope::MC_EVACUATE_REBALANCE],
835
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
836
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
837
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN],
838
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
839 840 841
          current_.scopes[Scope::MC_FINISH],
          current_.scopes[Scope::MC_FINISH_SWEEP_ARRAY_BUFFERS],
          current_.scopes[Scope::MC_MARK],
842 843
          current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
          current_.scopes[Scope::MC_MARK_ROOTS],
844
          current_.scopes[Scope::MC_MARK_MAIN],
845
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
846
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON],
847
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING],
848
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR],
849 850 851
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS],
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_HARMONY],
852 853
          current_.scopes[Scope::MC_MARK_EMBEDDER_PROLOGUE],
          current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING],
854
          current_.scopes[Scope::MC_PROLOGUE], current_.scopes[Scope::MC_SWEEP],
855 856 857
          current_.scopes[Scope::MC_SWEEP_CODE],
          current_.scopes[Scope::MC_SWEEP_MAP],
          current_.scopes[Scope::MC_SWEEP_OLD],
858
          current_.scopes[Scope::MC_INCREMENTAL],
859
          current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
860
          current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY],
861 862
          current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
          current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
863 864
          current_.scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE],
          current_.scopes[Scope::MC_INCREMENTAL_START],
865
          current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
866 867
          current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE],
          current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_TRACING],
868
          current_
869 870
              .incremental_marking_scopes
                  [Scope::MC_INCREMENTAL_EMBEDDER_TRACING]
871 872 873 874 875 876 877 878 879 880
              .longest_step,
          current_
              .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
              .longest_step,
          current_
              .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
              .steps,
          current_.incremental_marking_scopes[Scope::MC_INCREMENTAL]
              .longest_step,
          current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
881
          IncrementalMarkingSpeedInBytesPerMillisecond(),
882 883 884 885 886
          incremental_walltime_duration,
          current_.scopes[Scope::MC_BACKGROUND_MARKING],
          current_.scopes[Scope::MC_BACKGROUND_SWEEPING],
          current_.scopes[Scope::MC_BACKGROUND_EVACUATE_COPY],
          current_.scopes[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
887
          current_.scopes[Scope::BACKGROUND_UNMAPPER],
888 889 890 891
          current_.scopes[Scope::UNMAPPER], current_.start_object_size,
          current_.end_object_size, current_.start_holes_size,
          current_.end_holes_size, allocated_since_last_gc,
          heap_->promoted_objects_size(),
892 893 894 895 896 897
          heap_->semi_space_copied_object_size(),
          heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
          heap_->nodes_promoted_, heap_->promotion_ratio_,
          AverageSurvivalRatio(), heap_->promotion_rate_,
          heap_->semi_space_copied_rate_,
          NewSpaceAllocationThroughputInBytesPerMillisecond(),
898
          heap_->memory_allocator()->unmapper()->NumberOfChunks(),
899 900
          ContextDisposalRateInMilliseconds(),
          CompactionSpeedInBytesPerMillisecond());
901 902 903 904 905 906
      break;
    case Event::START:
      break;
    default:
      UNREACHABLE();
  }
907
}
908

909
double GCTracer::AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
910
                              const BytesAndDuration& initial, double time_ms) {
911 912 913 914 915 916 917 918 919
  BytesAndDuration sum = buffer.Sum(
      [time_ms](BytesAndDuration a, BytesAndDuration b) {
        if (time_ms != 0 && a.second >= time_ms) return a;
        return std::make_pair(a.first + b.first, a.second + b.second);
      },
      initial);
  uint64_t bytes = sum.first;
  double durations = sum.second;
  if (durations == 0.0) return 0;
920
  double speed = bytes / durations;
921 922 923 924
  const int max_speed = 1024 * MB;
  const int min_speed = 1;
  if (speed >= max_speed) return max_speed;
  if (speed <= min_speed) return min_speed;
925
  return speed;
926 927
}

928 929
double GCTracer::AverageSpeed(
    const base::RingBuffer<BytesAndDuration>& buffer) {
930
  return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
931 932
}

ulan's avatar
ulan committed
933
void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
934 935 936 937 938 939 940 941 942 943
  if (duration == 0 || bytes == 0) return;
  double current_speed = bytes / duration;
  if (recorded_incremental_marking_speed_ == 0) {
    recorded_incremental_marking_speed_ = current_speed;
  } else {
    recorded_incremental_marking_speed_ =
        (recorded_incremental_marking_speed_ + current_speed) / 2;
  }
}

944 945 946 947 948 949 950 951 952 953 954 955 956
void GCTracer::RecordTimeToIncrementalMarkingTask(double time_to_task) {
  if (average_time_to_incremental_marking_task_ == 0.0) {
    average_time_to_incremental_marking_task_ = time_to_task;
  } else {
    average_time_to_incremental_marking_task_ =
        (average_time_to_incremental_marking_task_ + time_to_task) / 2;
  }
}

double GCTracer::AverageTimeToIncrementalMarkingTask() const {
  return average_time_to_incremental_marking_task_;
}

957 958 959 960 961 962 963 964 965 966
void GCTracer::RecordEmbedderSpeed(size_t bytes, double duration) {
  if (duration == 0 || bytes == 0) return;
  double current_speed = bytes / duration;
  if (recorded_embedder_speed_ == 0.0) {
    recorded_embedder_speed_ = current_speed;
  } else {
    recorded_embedder_speed_ = (recorded_embedder_speed_ + current_speed) / 2;
  }
}

967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003
void GCTracer::RecordMutatorUtilization(double mark_compact_end_time,
                                        double mark_compact_duration) {
  if (previous_mark_compact_end_time_ == 0) {
    // The first event only contributes to previous_mark_compact_end_time_,
    // because we cannot compute the mutator duration.
    previous_mark_compact_end_time_ = mark_compact_end_time;
  } else {
    double total_duration =
        mark_compact_end_time - previous_mark_compact_end_time_;
    double mutator_duration = total_duration - mark_compact_duration;
    if (average_mark_compact_duration_ == 0 && average_mutator_duration_ == 0) {
      // This is the first event with mutator and mark-compact durations.
      average_mark_compact_duration_ = mark_compact_duration;
      average_mutator_duration_ = mutator_duration;
    } else {
      average_mark_compact_duration_ =
          (average_mark_compact_duration_ + mark_compact_duration) / 2;
      average_mutator_duration_ =
          (average_mutator_duration_ + mutator_duration) / 2;
    }
    current_mark_compact_mutator_utilization_ =
        total_duration ? mutator_duration / total_duration : 0;
    previous_mark_compact_end_time_ = mark_compact_end_time;
  }
}

double GCTracer::AverageMarkCompactMutatorUtilization() const {
  double average_total_duration =
      average_mark_compact_duration_ + average_mutator_duration_;
  if (average_total_duration == 0) return 1.0;
  return average_mutator_duration_ / average_total_duration;
}

double GCTracer::CurrentMarkCompactMutatorUtilization() const {
  return current_mark_compact_mutator_utilization_;
}

1004
double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
1005 1006 1007 1008 1009
  if (recorded_incremental_marking_speed_ != 0) {
    return recorded_incremental_marking_speed_;
  }
  if (incremental_marking_duration_ != 0.0) {
    return incremental_marking_bytes_ / incremental_marking_duration_;
1010
  }
1011
  return kConservativeSpeedInBytesPerMillisecond;
1012
}
1013

1014
double GCTracer::EmbedderSpeedInBytesPerMillisecond() const {
1015 1016 1017
  // Note: Returning 0 is ok here as callers check for whether embedder speeds
  // have been recorded at all.
  return recorded_embedder_speed_;
1018 1019
}

1020
double GCTracer::ScavengeSpeedInBytesPerMillisecond(
1021
    ScavengeSpeedMode mode) const {
1022
  if (mode == kForAllObjects) {
1023
    return AverageSpeed(recorded_minor_gcs_total_);
1024
  } else {
1025
    return AverageSpeed(recorded_minor_gcs_survived_);
1026
  }
1027 1028
}

1029
double GCTracer::CompactionSpeedInBytesPerMillisecond() const {
1030
  return AverageSpeed(recorded_compactions_);
1031
}
1032

1033
double GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
1034
  return AverageSpeed(recorded_mark_compacts_);
1035 1036
}

1037
double GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const {
1038
  return AverageSpeed(recorded_incremental_mark_compacts_);
1039
}
1040

1041
double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
1042 1043 1044 1045 1046 1047 1048
  const double kMinimumMarkingSpeed = 0.5;
  if (combined_mark_compact_speed_cache_ > 0)
    return combined_mark_compact_speed_cache_;
  // MarkCompact speed is more stable than incremental marking speed, because
  // there might not be many incremental marking steps because of concurrent
  // marking.
  combined_mark_compact_speed_cache_ = MarkCompactSpeedInBytesPerMillisecond();
1049 1050
  if (combined_mark_compact_speed_cache_ > 0)
    return combined_mark_compact_speed_cache_;
1051 1052
  double speed1 = IncrementalMarkingSpeedInBytesPerMillisecond();
  double speed2 = FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
1053
  if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
1054 1055 1056
    // No data for the incremental marking speed.
    // Return the non-incremental mark-compact speed.
    combined_mark_compact_speed_cache_ =
1057
        MarkCompactSpeedInBytesPerMillisecond();
1058 1059 1060 1061 1062 1063
  } else {
    // Combine the speed of incremental step and the speed of the final step.
    // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
    combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
  }
  return combined_mark_compact_speed_cache_;
1064
}
1065

1066 1067 1068 1069 1070 1071 1072 1073 1074
double GCTracer::CombineSpeedsInBytesPerMillisecond(double default_speed,
                                                    double optional_speed) {
  constexpr double kMinimumSpeed = 0.5;
  if (optional_speed < kMinimumSpeed) {
    return default_speed;
  }
  return default_speed * optional_speed / (default_speed + optional_speed);
}

1075
double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
1076
    double time_ms) const {
1077
  size_t bytes = new_space_allocation_in_bytes_since_gc_;
1078
  double durations = allocation_duration_since_gc_;
1079 1080
  return AverageSpeed(recorded_new_generation_allocations_,
                      MakeBytesAndDuration(bytes, durations), time_ms);
1081 1082
}

1083
double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
1084
    double time_ms) const {
1085
  size_t bytes = old_generation_allocation_in_bytes_since_gc_;
1086
  double durations = allocation_duration_since_gc_;
1087 1088
  return AverageSpeed(recorded_old_generation_allocations_,
                      MakeBytesAndDuration(bytes, durations), time_ms);
1089
}
1090

1091 1092 1093 1094 1095 1096 1097 1098
double GCTracer::EmbedderAllocationThroughputInBytesPerMillisecond(
    double time_ms) const {
  size_t bytes = embedder_allocation_in_bytes_since_gc_;
  double durations = allocation_duration_since_gc_;
  return AverageSpeed(recorded_embedder_generation_allocations_,
                      MakeBytesAndDuration(bytes, durations), time_ms);
}

1099
double GCTracer::AllocationThroughputInBytesPerMillisecond(
1100 1101 1102
    double time_ms) const {
  return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
         OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
1103
}
1104

1105
double GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
1106 1107 1108
  return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
}

1109
double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
1110 1111
    const {
  return OldGenerationAllocationThroughputInBytesPerMillisecond(
1112
      kThroughputTimeFrameMs);
1113 1114
}

1115 1116 1117 1118 1119 1120
double GCTracer::CurrentEmbedderAllocationThroughputInBytesPerMillisecond()
    const {
  return EmbedderAllocationThroughputInBytesPerMillisecond(
      kThroughputTimeFrameMs);
}

1121
double GCTracer::ContextDisposalRateInMilliseconds() const {
1122 1123 1124
  if (recorded_context_disposal_times_.Count() <
      recorded_context_disposal_times_.kSize)
    return 0.0;
1125
  double begin = heap_->MonotonicallyIncreasingTimeInMs();
1126 1127 1128
  double end = recorded_context_disposal_times_.Sum(
      [](double a, double b) { return b; }, 0.0);
  return (begin - end) / recorded_context_disposal_times_.Count();
1129
}
1130

1131
double GCTracer::AverageSurvivalRatio() const {
1132 1133 1134 1135
  if (recorded_survival_ratios_.Count() == 0) return 0.0;
  double sum = recorded_survival_ratios_.Sum(
      [](double a, double b) { return a + b; }, 0.0);
  return sum / recorded_survival_ratios_.Count();
1136 1137 1138
}

bool GCTracer::SurvivalEventsRecorded() const {
1139
  return recorded_survival_ratios_.Count() > 0;
1140
}
1141

1142
void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
1143 1144 1145 1146 1147

void GCTracer::NotifyIncrementalMarkingStart() {
  incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs();
}

1148 1149
void GCTracer::FetchBackgroundMarkCompactCounters() {
  FetchBackgroundCounters(Scope::FIRST_MC_BACKGROUND_SCOPE,
1150
                          Scope::LAST_MC_BACKGROUND_SCOPE);
1151 1152 1153 1154
  heap_->isolate()->counters()->background_marking()->AddSample(
      static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_MARKING]));
  heap_->isolate()->counters()->background_sweeping()->AddSample(
      static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_SWEEPING]));
1155 1156 1157 1158
}

void GCTracer::FetchBackgroundMinorGCCounters() {
  FetchBackgroundCounters(Scope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
1159
                          Scope::LAST_MINOR_GC_BACKGROUND_SCOPE);
1160 1161 1162
  heap_->isolate()->counters()->background_scavenger()->AddSample(
      static_cast<int>(
          current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]));
1163 1164
}

1165 1166
void GCTracer::FetchBackgroundGeneralCounters() {
  FetchBackgroundCounters(Scope::FIRST_GENERAL_BACKGROUND_SCOPE,
1167
                          Scope::LAST_GENERAL_BACKGROUND_SCOPE);
1168 1169
}

1170
void GCTracer::FetchBackgroundCounters(int first_scope, int last_scope) {
1171
  base::MutexGuard guard(&background_counter_mutex_);
1172 1173 1174
  for (int i = first_scope; i <= last_scope; i++) {
    current_.scopes[i] += background_counter_[i].total_duration_ms;
    background_counter_[i].total_duration_ms = 0;
1175 1176 1177
  }
}

1178
void GCTracer::AddScopeSampleBackground(Scope::ScopeId scope, double duration) {
1179
  base::MutexGuard guard(&background_counter_mutex_);
1180 1181 1182 1183
  BackgroundCounter& counter = background_counter_[scope];
  counter.total_duration_ms += duration;
}

1184
void GCTracer::RecordGCPhasesHistograms(TimedHistogram* gc_timer) {
1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201
  Counters* counters = heap_->isolate()->counters();
  if (gc_timer == counters->gc_finalize()) {
    DCHECK_EQ(Scope::FIRST_TOP_MC_SCOPE, Scope::MC_CLEAR);
    counters->gc_finalize_clear()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_CLEAR]));
    counters->gc_finalize_epilogue()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_EPILOGUE]));
    counters->gc_finalize_evacuate()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_EVACUATE]));
    counters->gc_finalize_finish()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_FINISH]));
    counters->gc_finalize_mark()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_MARK]));
    counters->gc_finalize_prologue()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_PROLOGUE]));
    counters->gc_finalize_sweep()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_SWEEP]));
1202 1203 1204 1205 1206 1207 1208 1209 1210
    if (incremental_marking_duration_ > 0) {
      heap_->isolate()->counters()->incremental_marking_sum()->AddSample(
          static_cast<int>(incremental_marking_duration_));
    }
    const double overall_marking_time =
        incremental_marking_duration_ + current_.scopes[Scope::MC_MARK];
    heap_->isolate()->counters()->gc_marking_sum()->AddSample(
        static_cast<int>(overall_marking_time));

1211 1212 1213 1214
    // Filter out samples where
    // - we don't have high-resolution timers;
    // - size of marked objects is very small;
    // - marking time is rounded to 0;
1215 1216
    constexpr size_t kMinObjectSizeForReportingThroughput = 1024 * 1024;
    if (base::TimeTicks::IsHighResolution() &&
1217 1218
        heap_->SizeOfObjects() > kMinObjectSizeForReportingThroughput &&
        overall_marking_time > 0) {
1219 1220
      const double overall_v8_marking_time =
          overall_marking_time -
1221
          current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING];
1222 1223 1224 1225 1226 1227 1228 1229 1230 1231
      if (overall_v8_marking_time > 0) {
        const int main_thread_marking_throughput_mb_per_s =
            static_cast<int>(static_cast<double>(heap_->SizeOfObjects()) /
                             overall_v8_marking_time * 1000 / 1024 / 1024);
        heap_->isolate()
            ->counters()
            ->gc_main_thread_marking_throughput()
            ->AddSample(
                static_cast<int>(main_thread_marking_throughput_mb_per_s));
      }
1232 1233
    }

1234
    DCHECK_EQ(Scope::LAST_TOP_MC_SCOPE, Scope::MC_SWEEP);
1235 1236 1237 1238 1239
  } else if (gc_timer == counters->gc_scavenger()) {
    counters->gc_scavenger_scavenge_main()->AddSample(
        static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL]));
    counters->gc_scavenger_scavenge_roots()->AddSample(
        static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS]));
1240 1241 1242
  }
}

1243
void GCTracer::RecordGCSumCounters(double atomic_pause_duration) {
1244 1245
  base::MutexGuard guard(&background_counter_mutex_);

1246
  const double overall_duration =
1247 1248
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE]
          .duration +
1249 1250 1251 1252 1253 1254 1255 1256
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
          .duration +
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_SWEEPING]
          .duration +
      incremental_marking_duration_ +
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
          .duration +
      atomic_pause_duration;
1257
  const double background_duration =
1258
      background_counter_[Scope::MC_BACKGROUND_EVACUATE_COPY]
1259
          .total_duration_ms +
1260
      background_counter_[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS]
1261
          .total_duration_ms +
1262 1263
      background_counter_[Scope::MC_BACKGROUND_MARKING].total_duration_ms +
      background_counter_[Scope::MC_BACKGROUND_SWEEPING].total_duration_ms;
1264 1265

  const double marking_duration =
1266 1267
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE]
          .duration +
1268 1269 1270 1271 1272 1273 1274
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
          .duration +
      incremental_marking_duration_ +
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
          .duration +
      current_.scopes[Scope::MC_MARK];
  const double marking_background_duration =
1275
      background_counter_[Scope::MC_BACKGROUND_MARKING].total_duration_ms;
1276 1277

  // UMA.
1278 1279 1280 1281
  heap_->isolate()->counters()->gc_mark_compactor()->AddSample(
      static_cast<int>(overall_duration));

  // Emit trace event counters.
1282
  TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1283
                       "V8.GCMarkCompactorSummary", TRACE_EVENT_SCOPE_THREAD,
1284 1285 1286 1287 1288 1289
                       "duration", overall_duration, "background_duration",
                       background_duration);
  TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
                       "V8.GCMarkCompactorMarkingSummary",
                       TRACE_EVENT_SCOPE_THREAD, "duration", marking_duration,
                       "background_duration", marking_background_duration);
1290 1291
}

1292 1293
}  // namespace internal
}  // namespace v8