gc-tracer.cc 52.1 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5
#include "src/heap/gc-tracer.h"
6

7 8
#include <cstdarg>

9
#include "src/base/atomic-utils.h"
10
#include "src/execution/isolate.h"
11
#include "src/heap/heap-inl.h"
12 13
#include "src/heap/incremental-marking.h"
#include "src/heap/spaces.h"
14
#include "src/logging/counters-inl.h"
15

16 17 18
namespace v8 {
namespace internal {

ulan's avatar
ulan committed
19 20
static size_t CountTotalHolesSize(Heap* heap) {
  size_t holes_size = 0;
21
  PagedSpaceIterator spaces(heap);
22 23
  for (PagedSpace* space = spaces.Next(); space != nullptr;
       space = spaces.Next()) {
ulan's avatar
ulan committed
24
    DCHECK_GE(holes_size + space->Waste() + space->Available(), holes_size);
25
    holes_size += space->Waste() + space->Available();
26 27 28
  }
  return holes_size;
}
29 30 31
WorkerThreadRuntimeCallStats* GCTracer::worker_thread_runtime_call_stats() {
  return heap_->isolate()->counters()->worker_thread_runtime_call_stats();
}
32

33 34 35 36 37
RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
  STATIC_ASSERT(Scope::FIRST_SCOPE == Scope::MC_INCREMENTAL);
  return static_cast<RuntimeCallCounterId>(
      static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
      static_cast<int>(id));
38
}
39

40 41 42 43 44 45 46 47 48 49 50
RuntimeCallCounterId GCTracer::RCSCounterFromBackgroundScope(
    BackgroundScope::ScopeId id) {
  STATIC_ASSERT(Scope::FIRST_BACKGROUND_SCOPE ==
                Scope::BACKGROUND_ARRAY_BUFFER_FREE);
  STATIC_ASSERT(
      0 == static_cast<int>(BackgroundScope::BACKGROUND_ARRAY_BUFFER_FREE));
  return static_cast<RuntimeCallCounterId>(
      static_cast<int>(RCSCounterFromScope(Scope::FIRST_BACKGROUND_SCOPE)) +
      static_cast<int>(id));
}

51 52 53
GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
    : tracer_(tracer), scope_(scope) {
  start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
54
  if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
55
  runtime_stats_ = tracer_->heap_->isolate()->counters()->runtime_call_stats();
56
  runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
57 58 59
}

GCTracer::Scope::~Scope() {
60 61
  tracer_->AddScopeSample(
      scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_);
62
  if (V8_LIKELY(runtime_stats_ == nullptr)) return;
63
  runtime_stats_->Leave(&timer_);
64 65
}

66 67 68
GCTracer::BackgroundScope::BackgroundScope(GCTracer* tracer, ScopeId scope,
                                           RuntimeCallStats* runtime_stats)
    : tracer_(tracer), scope_(scope), runtime_stats_(runtime_stats) {
69
  start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
70
  if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
71 72
  runtime_stats_->Enter(&timer_,
                        GCTracer::RCSCounterFromBackgroundScope(scope));
73 74 75 76 77
}

GCTracer::BackgroundScope::~BackgroundScope() {
  double duration_ms =
      tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_;
78 79 80
  tracer_->AddBackgroundScopeSample(scope_, duration_ms);
  if (V8_LIKELY(runtime_stats_ == nullptr)) return;
  runtime_stats_->Leave(&timer_);
81 82
}

83 84 85 86 87 88
const char* GCTracer::Scope::Name(ScopeId id) {
#define CASE(scope)  \
  case Scope::scope: \
    return "V8.GC_" #scope;
  switch (id) {
    TRACER_SCOPES(CASE)
89
    TRACER_BACKGROUND_SCOPES(CASE)
90 91 92 93
    case Scope::NUMBER_OF_SCOPES:
      break;
  }
#undef CASE
94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
  UNREACHABLE();
  return nullptr;
}

const char* GCTracer::BackgroundScope::Name(ScopeId id) {
#define CASE(scope)            \
  case BackgroundScope::scope: \
    return "V8.GC_" #scope;
  switch (id) {
    TRACER_BACKGROUND_SCOPES(CASE)
    case BackgroundScope::NUMBER_OF_SCOPES:
      break;
  }
#undef CASE
  UNREACHABLE();
  return nullptr;
110
}
111

112
GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
113 114 115 116 117 118
                       const char* collector_reason)
    : type(type),
      gc_reason(gc_reason),
      collector_reason(collector_reason),
      start_time(0.0),
      end_time(0.0),
119
      reduce_memory(false),
120 121 122 123 124 125
      start_object_size(0),
      end_object_size(0),
      start_memory_size(0),
      end_memory_size(0),
      start_holes_size(0),
      end_holes_size(0),
126 127
      young_object_size(0),
      survived_young_object_size(0),
128
      incremental_marking_bytes(0),
129
      incremental_marking_duration(0.0) {
130 131 132 133 134 135 136 137
  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
    scopes[i] = 0;
  }
}

const char* GCTracer::Event::TypeName(bool short_name) const {
  switch (type) {
    case SCAVENGER:
138
      return (short_name) ? "s" : "Scavenge";
139
    case MARK_COMPACTOR:
140
    case INCREMENTAL_MARK_COMPACTOR:
141 142 143
      return (short_name) ? "ms" : "Mark-sweep";
    case MINOR_MARK_COMPACTOR:
      return (short_name) ? "mmc" : "Minor Mark-Compact";
144
    case START:
145
      return (short_name) ? "st" : "Start";
146 147 148 149 150 151
  }
  return "Unknown Event Type";
}

GCTracer::GCTracer(Heap* heap)
    : heap_(heap),
152
      current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
153
      previous_(current_),
154 155
      incremental_marking_bytes_(0),
      incremental_marking_duration_(0.0),
156
      incremental_marking_start_time_(0.0),
157
      recorded_incremental_marking_speed_(0.0),
158
      allocation_time_ms_(0.0),
159
      new_space_allocation_counter_bytes_(0),
160
      old_generation_allocation_counter_bytes_(0),
161
      embedder_allocation_counter_bytes_(0),
162
      allocation_duration_since_gc_(0.0),
163
      new_space_allocation_in_bytes_since_gc_(0),
164
      old_generation_allocation_in_bytes_since_gc_(0),
165
      embedder_allocation_in_bytes_since_gc_(0),
166
      combined_mark_compact_speed_cache_(0.0),
167 168 169 170 171
      start_counter_(0),
      average_mutator_duration_(0),
      average_mark_compact_duration_(0),
      current_mark_compact_mutator_utilization_(1.0),
      previous_mark_compact_end_time_(0) {
172 173 174 175 176 177
  // All accesses to incremental_marking_scope assume that incremental marking
  // scopes come first.
  STATIC_ASSERT(0 == Scope::FIRST_INCREMENTAL_SCOPE);
  // We assume that MC_INCREMENTAL is the first scope so that we can properly
  // map it to RuntimeCallStats.
  STATIC_ASSERT(0 == Scope::MC_INCREMENTAL);
178
  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
179 180 181
  for (int i = 0; i < BackgroundScope::NUMBER_OF_SCOPES; i++) {
    background_counter_[i].total_duration_ms = 0;
  }
182 183
}

184
void GCTracer::ResetForTesting() {
185
  current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
186
  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
187 188
  previous_ = current_;
  ResetIncrementalMarkingCounters();
189 190 191 192 193 194 195
  allocation_time_ms_ = 0.0;
  new_space_allocation_counter_bytes_ = 0.0;
  old_generation_allocation_counter_bytes_ = 0.0;
  allocation_duration_since_gc_ = 0.0;
  new_space_allocation_in_bytes_since_gc_ = 0.0;
  old_generation_allocation_in_bytes_since_gc_ = 0.0;
  combined_mark_compact_speed_cache_ = 0.0;
196 197
  recorded_minor_gcs_total_.Reset();
  recorded_minor_gcs_survived_.Reset();
198 199 200 201 202
  recorded_compactions_.Reset();
  recorded_mark_compacts_.Reset();
  recorded_incremental_mark_compacts_.Reset();
  recorded_new_generation_allocations_.Reset();
  recorded_old_generation_allocations_.Reset();
203
  recorded_embedder_generation_allocations_.Reset();
204 205
  recorded_context_disposal_times_.Reset();
  recorded_survival_ratios_.Reset();
206
  start_counter_ = 0;
207 208 209 210
  average_mutator_duration_ = 0;
  average_mark_compact_duration_ = 0;
  current_mark_compact_mutator_utilization_ = 1.0;
  previous_mark_compact_end_time_ = 0;
211
  base::MutexGuard guard(&background_counter_mutex_);
212 213 214
  for (int i = 0; i < BackgroundScope::NUMBER_OF_SCOPES; i++) {
    background_counter_[i].total_duration_ms = 0;
  }
215
}
216

217 218 219 220 221 222 223
void GCTracer::NotifyYoungGenerationHandling(
    YoungGenerationHandling young_generation_handling) {
  DCHECK(current_.type == Event::SCAVENGER || start_counter_ > 1);
  heap_->isolate()->counters()->young_generation_handling()->AddSample(
      static_cast<int>(young_generation_handling));
}

224 225
void GCTracer::Start(GarbageCollector collector,
                     GarbageCollectionReason gc_reason,
226
                     const char* collector_reason) {
hpayer's avatar
hpayer committed
227 228 229
  start_counter_++;
  if (start_counter_ != 1) return;

230
  previous_ = current_;
231
  double start_time = heap_->MonotonicallyIncreasingTimeInMs();
232
  SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
233 234
                   heap_->OldGenerationAllocationCounter(),
                   heap_->EmbedderAllocationCounter());
235

236 237 238 239 240
  switch (collector) {
    case SCAVENGER:
      current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
      break;
    case MINOR_MARK_COMPACTOR:
241
      current_ =
242 243 244 245 246 247 248 249 250 251
          Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
      break;
    case MARK_COMPACTOR:
      if (heap_->incremental_marking()->WasActivated()) {
        current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
                         collector_reason);
      } else {
        current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
      }
      break;
252 253
  }

254
  current_.reduce_memory = heap_->ShouldReduceMemory();
255
  current_.start_time = start_time;
256 257 258 259
  current_.start_object_size = 0;
  current_.start_memory_size = 0;
  current_.start_holes_size = 0;
  current_.young_object_size = 0;
260

261 262
  current_.incremental_marking_bytes = 0;
  current_.incremental_marking_duration = 0;
263 264 265 266

  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
    current_.scopes[i] = 0;
  }
267

268 269
  Counters* counters = heap_->isolate()->counters();

270
  if (Heap::IsYoungGenerationCollector(collector)) {
271 272 273
    counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
  } else {
    counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
274 275 276 277 278 279

    if (FLAG_trace_gc_freelists) {
      PrintIsolate(heap_->isolate(),
                   "FreeLists statistics before collection:\n");
      heap_->PrintFreeListsStats();
    }
280
  }
281 282
}

283 284 285 286 287 288 289 290
void GCTracer::StartInSafepoint() {
  current_.start_object_size = heap_->SizeOfObjects();
  current_.start_memory_size = heap_->memory_allocator()->Size();
  current_.start_holes_size = CountTotalHolesSize(heap_);
  current_.young_object_size =
      heap_->new_space()->Size() + heap_->new_lo_space()->SizeOfObjects();
}

291 292 293 294 295
void GCTracer::ResetIncrementalMarkingCounters() {
  incremental_marking_bytes_ = 0;
  incremental_marking_duration_ = 0;
  for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
    incremental_marking_scopes_[i].ResetCurrentCycle();
296 297 298
  }
}

299 300 301 302 303 304 305
void GCTracer::StopInSafepoint() {
  current_.end_object_size = heap_->SizeOfObjects();
  current_.end_memory_size = heap_->memory_allocator()->Size();
  current_.end_holes_size = CountTotalHolesSize(heap_);
  current_.survived_young_object_size = heap_->SurvivedYoungObjectSize();
}

hpayer's avatar
hpayer committed
306 307 308
void GCTracer::Stop(GarbageCollector collector) {
  start_counter_--;
  if (start_counter_ != 0) {
309 310 311 312 313
    if (FLAG_trace_gc_verbose) {
      heap_->isolate()->PrintWithTimestamp(
          "[Finished reentrant %s during %s.]\n",
          Heap::CollectorName(collector), current_.TypeName(false));
    }
hpayer's avatar
hpayer committed
314 315 316
    return;
  }

317
  DCHECK_LE(0, start_counter_);
318
  DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
319 320
         (collector == MINOR_MARK_COMPACTOR &&
          current_.type == Event::MINOR_MARK_COMPACTOR) ||
321 322 323
         (collector == MARK_COMPACTOR &&
          (current_.type == Event::MARK_COMPACTOR ||
           current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
hpayer's avatar
hpayer committed
324

325
  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
326

327
  AddAllocation(current_.end_time);
328

329
  double duration = current_.end_time - current_.start_time;
330

331 332 333 334
  switch (current_.type) {
    case Event::SCAVENGER:
    case Event::MINOR_MARK_COMPACTOR:
      recorded_minor_gcs_total_.Push(
335 336 337
          MakeBytesAndDuration(current_.young_object_size, duration));
      recorded_minor_gcs_survived_.Push(
          MakeBytesAndDuration(current_.survived_young_object_size, duration));
338
      FetchBackgroundMinorGCCounters();
339 340 341 342 343 344 345 346
      break;
    case Event::INCREMENTAL_MARK_COMPACTOR:
      current_.incremental_marking_bytes = incremental_marking_bytes_;
      current_.incremental_marking_duration = incremental_marking_duration_;
      for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
        current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
        current_.scopes[i] = incremental_marking_scopes_[i].duration;
      }
347 348 349

      RecordMutatorUtilization(
          current_.end_time, duration + current_.incremental_marking_duration);
350 351 352
      RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
                                    current_.incremental_marking_duration);
      recorded_incremental_mark_compacts_.Push(
353
          MakeBytesAndDuration(current_.end_object_size, duration));
354
      RecordGCSumCounters(duration);
355 356
      ResetIncrementalMarkingCounters();
      combined_mark_compact_speed_cache_ = 0.0;
357
      FetchBackgroundMarkCompactCounters();
358 359 360 361
      break;
    case Event::MARK_COMPACTOR:
      DCHECK_EQ(0u, current_.incremental_marking_bytes);
      DCHECK_EQ(0, current_.incremental_marking_duration);
362 363
      RecordMutatorUtilization(
          current_.end_time, duration + current_.incremental_marking_duration);
364
      recorded_mark_compacts_.Push(
365
          MakeBytesAndDuration(current_.end_object_size, duration));
366
      RecordGCSumCounters(duration);
367 368
      ResetIncrementalMarkingCounters();
      combined_mark_compact_speed_cache_ = 0.0;
369
      FetchBackgroundMarkCompactCounters();
370 371 372
      break;
    case Event::START:
      UNREACHABLE();
373
  }
374
  FetchBackgroundGeneralCounters();
375

376
  heap_->UpdateTotalGCTime(duration);
377

378 379 380
  if ((current_.type == Event::SCAVENGER ||
       current_.type == Event::MINOR_MARK_COMPACTOR) &&
      FLAG_trace_gc_ignore_scavenger)
381 382
    return;

383
  if (FLAG_trace_gc_nvp) {
384
    PrintNVP();
385
  } else {
386
    Print();
387
  }
388

389
  if (FLAG_trace_gc) {
390 391
    heap_->PrintShortHeapStatistics();
  }
392 393 394 395 396 397 398 399 400 401

  if (V8_UNLIKELY(TracingFlags::gc.load(std::memory_order_relaxed) &
                  v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
    std::stringstream heap_stats;
    heap_->DumpJSONHeapStatistics(heap_stats);

    TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "V8.GC_Heap_Stats",
                         TRACE_EVENT_SCOPE_THREAD, "stats",
                         TRACE_STR_COPY(heap_stats.str().c_str()));
  }
402 403
}

404 405 406 407 408 409
void GCTracer::NotifySweepingCompleted() {
  if (FLAG_trace_gc_freelists) {
    PrintIsolate(heap_->isolate(),
                 "FreeLists statistics after sweeping completed:\n");
    heap_->PrintFreeListsStats();
  }
410 411 412 413 414 415
  if (FLAG_trace_allocations_origins) {
    heap_->new_space()->PrintAllocationsOrigins();
    heap_->old_space()->PrintAllocationsOrigins();
    heap_->code_space()->PrintAllocationsOrigins();
    heap_->map_space()->PrintAllocationsOrigins();
  }
416 417
}

418 419
void GCTracer::SampleAllocation(double current_ms,
                                size_t new_space_counter_bytes,
420 421
                                size_t old_generation_counter_bytes,
                                size_t embedder_counter_bytes) {
422
  if (allocation_time_ms_ == 0) {
423
    // It is the first sample.
424 425 426
    allocation_time_ms_ = current_ms;
    new_space_allocation_counter_bytes_ = new_space_counter_bytes;
    old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
427
    embedder_allocation_counter_bytes_ = embedder_counter_bytes;
428 429 430
    return;
  }
  // This assumes that counters are unsigned integers so that the subtraction
thakis's avatar
thakis committed
431
  // below works even if the new counter is less than the old counter.
432 433 434 435
  size_t new_space_allocated_bytes =
      new_space_counter_bytes - new_space_allocation_counter_bytes_;
  size_t old_generation_allocated_bytes =
      old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
436 437
  size_t embedder_allocated_bytes =
      embedder_counter_bytes - embedder_allocation_counter_bytes_;
438 439 440 441
  double duration = current_ms - allocation_time_ms_;
  allocation_time_ms_ = current_ms;
  new_space_allocation_counter_bytes_ = new_space_counter_bytes;
  old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
442
  embedder_allocation_counter_bytes_ = embedder_counter_bytes;
443 444 445 446
  allocation_duration_since_gc_ += duration;
  new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
  old_generation_allocation_in_bytes_since_gc_ +=
      old_generation_allocated_bytes;
447
  embedder_allocation_in_bytes_since_gc_ += embedder_allocated_bytes;
448 449
}

450 451
void GCTracer::AddAllocation(double current_ms) {
  allocation_time_ms_ = current_ms;
452
  if (allocation_duration_since_gc_ > 0) {
453 454 455 456 457 458
    recorded_new_generation_allocations_.Push(
        MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
                             allocation_duration_since_gc_));
    recorded_old_generation_allocations_.Push(
        MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
                             allocation_duration_since_gc_));
459 460
    recorded_embedder_generation_allocations_.Push(MakeBytesAndDuration(
        embedder_allocation_in_bytes_since_gc_, allocation_duration_since_gc_));
461
  }
462
  allocation_duration_since_gc_ = 0;
463
  new_space_allocation_in_bytes_since_gc_ = 0;
464
  old_generation_allocation_in_bytes_since_gc_ = 0;
465
  embedder_allocation_in_bytes_since_gc_ = 0;
466 467 468
}


469
void GCTracer::AddContextDisposalTime(double time) {
470
  recorded_context_disposal_times_.Push(time);
471 472
}

473
void GCTracer::AddCompactionEvent(double duration,
ulan's avatar
ulan committed
474
                                  size_t live_bytes_compacted) {
475 476
  recorded_compactions_.Push(
      MakeBytesAndDuration(live_bytes_compacted, duration));
477 478 479
}


480
void GCTracer::AddSurvivalRatio(double promotion_ratio) {
481
  recorded_survival_ratios_.Push(promotion_ratio);
482 483
}

ulan's avatar
ulan committed
484
void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) {
485
  if (bytes > 0) {
486 487
    incremental_marking_bytes_ += bytes;
    incremental_marking_duration_ += duration;
488
  }
489 490
}

491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506
void GCTracer::Output(const char* format, ...) const {
  if (FLAG_trace_gc) {
    va_list arguments;
    va_start(arguments, format);
    base::OS::VPrint(format, arguments);
    va_end(arguments);
  }

  const int kBufferSize = 256;
  char raw_buffer[kBufferSize];
  Vector<char> buffer(raw_buffer, kBufferSize);
  va_list arguments2;
  va_start(arguments2, format);
  VSNPrintF(buffer, format, arguments2);
  va_end(arguments2);

507
  heap_->AddToRingBuffer(buffer.begin());
508 509
}

510 511
void GCTracer::Print() const {
  double duration = current_.end_time - current_.start_time;
512 513 514
  const size_t kIncrementalStatsSize = 128;
  char incremental_buffer[kIncrementalStatsSize] = {0};

515 516 517 518 519 520 521 522 523
  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
    base::OS::SNPrintF(
        incremental_buffer, kIncrementalStatsSize,
        " (+ %.1f ms in %d steps since start of marking, "
        "biggest step %.1f ms, walltime since start of marking %.f ms)",
        current_.scopes[Scope::MC_INCREMENTAL],
        current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
        current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].longest_step,
        current_.end_time - incremental_marking_start_time_);
524 525
  }

526 527 528 529 530
  // Avoid PrintF as Output also appends the string to the tracing ring buffer
  // that gets printed on OOM failures.
  Output(
      "[%d:%p] "
      "%8.0f ms: "
531
      "%s%s %.1f (%.1f) -> %.1f (%.1f) MB, "
532
      "%.1f / %.1f ms %s (average mu = %.3f, current mu = %.3f) %s %s\n",
533 534 535
      base::OS::GetCurrentProcessId(),
      reinterpret_cast<void*>(heap_->isolate()),
      heap_->isolate()->time_millis_since_init(), current_.TypeName(false),
536
      current_.reduce_memory ? " (reduce)" : "",
537 538 539 540 541
      static_cast<double>(current_.start_object_size) / MB,
      static_cast<double>(current_.start_memory_size) / MB,
      static_cast<double>(current_.end_object_size) / MB,
      static_cast<double>(current_.end_memory_size) / MB, duration,
      TotalExternalTime(), incremental_buffer,
542 543
      AverageMarkCompactMutatorUtilization(),
      CurrentMarkCompactMutatorUtilization(),
544
      Heap::GarbageCollectionReasonToString(current_.gc_reason),
545
      current_.collector_reason != nullptr ? current_.collector_reason : "");
546 547 548 549 550 551
}


void GCTracer::PrintNVP() const {
  double duration = current_.end_time - current_.start_time;
  double spent_in_mutator = current_.start_time - previous_.end_time;
ulan's avatar
ulan committed
552
  size_t allocated_since_last_gc =
553
      current_.start_object_size - previous_.end_object_size;
554

555 556 557 558 559 560 561
  double incremental_walltime_duration = 0;

  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
    incremental_walltime_duration =
        current_.end_time - incremental_marking_start_time_;
  }

562 563
  switch (current_.type) {
    case Event::SCAVENGER:
564
      heap_->isolate()->PrintWithTimestamp(
565 566 567 568
          "pause=%.1f "
          "mutator=%.1f "
          "gc=%s "
          "reduce_memory=%d "
569 570 571 572 573 574
          "heap.prologue=%.2f "
          "heap.epilogue=%.2f "
          "heap.epilogue.reduce_new_space=%.2f "
          "heap.external.prologue=%.2f "
          "heap.external.epilogue=%.2f "
          "heap.external_weak_global_handles=%.2f "
575
          "fast_promote=%.2f "
576
          "complete.sweep_array_buffers=%.2f "
577
          "scavenge=%.2f "
578
          "scavenge.process_array_buffers=%.2f "
579
          "scavenge.free_remembered_set=%.2f "
580 581
          "scavenge.roots=%.2f "
          "scavenge.weak=%.2f "
582 583
          "scavenge.weak_global_handles.identify=%.2f "
          "scavenge.weak_global_handles.process=%.2f "
584
          "scavenge.parallel=%.2f "
585
          "scavenge.update_refs=%.2f "
586
          "scavenge.sweep_array_buffers=%.2f "
587
          "background.scavenge.parallel=%.2f "
588 589 590
          "background.array_buffer_free=%.2f "
          "background.store_buffer=%.2f "
          "background.unmapper=%.2f "
591 592
          "incremental.steps_count=%d "
          "incremental.steps_took=%.1f "
593
          "scavenge_throughput=%.f "
594 595 596 597 598 599 600
          "total_size_before=%zu "
          "total_size_after=%zu "
          "holes_size_before=%zu "
          "holes_size_after=%zu "
          "allocated=%zu "
          "promoted=%zu "
          "semi_space_copied=%zu "
601 602 603 604 605 606 607 608
          "nodes_died_in_new=%d "
          "nodes_copied_in_new=%d "
          "nodes_promoted=%d "
          "promotion_ratio=%.1f%% "
          "average_survival_ratio=%.1f%% "
          "promotion_rate=%.1f%% "
          "semi_space_copy_rate=%.1f%% "
          "new_space_allocation_throughput=%.1f "
609
          "unmapper_chunks=%d "
610
          "context_disposal_rate=%.1f\n",
611
          duration, spent_in_mutator, current_.TypeName(true),
612 613 614 615 616 617
          current_.reduce_memory, current_.scopes[Scope::HEAP_PROLOGUE],
          current_.scopes[Scope::HEAP_EPILOGUE],
          current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
          current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
          current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
          current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
618
          current_.scopes[Scope::SCAVENGER_SWEEP_ARRAY_BUFFERS],
619
          current_.scopes[Scope::SCAVENGER_FAST_PROMOTE],
620
          current_.scopes[Scope::SCAVENGER_SCAVENGE],
621
          current_.scopes[Scope::SCAVENGER_PROCESS_ARRAY_BUFFERS],
622
          current_.scopes[Scope::SCAVENGER_FREE_REMEMBERED_SET],
623 624 625 626 627 628 629
          current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS],
          current_.scopes[Scope::SCAVENGER_SCAVENGE_WEAK],
          current_
              .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY],
          current_
              .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS],
          current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL],
630
          current_.scopes[Scope::SCAVENGER_SCAVENGE_UPDATE_REFS],
631
          current_.scopes[Scope::SCAVENGER_SWEEP_ARRAY_BUFFERS],
632
          current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL],
633 634 635
          current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
          current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
          current_.scopes[Scope::BACKGROUND_UNMAPPER],
636 637 638 639 640 641 642 643 644 645 646 647 648
          current_.incremental_marking_scopes[GCTracer::Scope::MC_INCREMENTAL]
              .steps,
          current_.scopes[Scope::MC_INCREMENTAL],
          ScavengeSpeedInBytesPerMillisecond(), current_.start_object_size,
          current_.end_object_size, current_.start_holes_size,
          current_.end_holes_size, allocated_since_last_gc,
          heap_->promoted_objects_size(),
          heap_->semi_space_copied_object_size(),
          heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
          heap_->nodes_promoted_, heap_->promotion_ratio_,
          AverageSurvivalRatio(), heap_->promotion_rate_,
          heap_->semi_space_copied_rate_,
          NewSpaceAllocationThroughputInBytesPerMillisecond(),
649
          heap_->memory_allocator()->unmapper()->NumberOfChunks(),
650
          ContextDisposalRateInMilliseconds());
651
      break;
652 653 654 655 656
    case Event::MINOR_MARK_COMPACTOR:
      heap_->isolate()->PrintWithTimestamp(
          "pause=%.1f "
          "mutator=%.1f "
          "gc=%s "
657
          "reduce_memory=%d "
658 659
          "minor_mc=%.2f "
          "finish_sweeping=%.2f "
660
          "mark=%.2f "
661
          "mark.seed=%.2f "
662
          "mark.roots=%.2f "
663 664 665 666 667
          "mark.weak=%.2f "
          "mark.global_handles=%.2f "
          "clear=%.2f "
          "clear.string_table=%.2f "
          "clear.weak_lists=%.2f "
668 669 670
          "evacuate=%.2f "
          "evacuate.copy=%.2f "
          "evacuate.update_pointers=%.2f "
671 672
          "evacuate.update_pointers.to_new_roots=%.2f "
          "evacuate.update_pointers.slots=%.2f "
673 674 675
          "background.mark=%.2f "
          "background.evacuate.copy=%.2f "
          "background.evacuate.update_pointers=%.2f "
676 677 678
          "background.array_buffer_free=%.2f "
          "background.store_buffer=%.2f "
          "background.unmapper=%.2f "
679 680
          "update_marking_deque=%.2f "
          "reset_liveness=%.2f\n",
681
          duration, spent_in_mutator, "mmc", current_.reduce_memory,
682 683
          current_.scopes[Scope::MINOR_MC],
          current_.scopes[Scope::MINOR_MC_SWEEPING],
684
          current_.scopes[Scope::MINOR_MC_MARK],
685
          current_.scopes[Scope::MINOR_MC_MARK_SEED],
686
          current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
687 688 689 690 691 692 693 694 695 696
          current_.scopes[Scope::MINOR_MC_MARK_WEAK],
          current_.scopes[Scope::MINOR_MC_MARK_GLOBAL_HANDLES],
          current_.scopes[Scope::MINOR_MC_CLEAR],
          current_.scopes[Scope::MINOR_MC_CLEAR_STRING_TABLE],
          current_.scopes[Scope::MINOR_MC_CLEAR_WEAK_LISTS],
          current_.scopes[Scope::MINOR_MC_EVACUATE],
          current_.scopes[Scope::MINOR_MC_EVACUATE_COPY],
          current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS],
          current_
              .scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
697
          current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS],
698 699 700
          current_.scopes[Scope::MINOR_MC_BACKGROUND_MARKING],
          current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY],
          current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
701 702 703
          current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
          current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
          current_.scopes[Scope::BACKGROUND_UNMAPPER],
704 705
          current_.scopes[Scope::MINOR_MC_MARKING_DEQUE],
          current_.scopes[Scope::MINOR_MC_RESET_LIVENESS]);
706
      break;
707 708
    case Event::MARK_COMPACTOR:
    case Event::INCREMENTAL_MARK_COMPACTOR:
709
      heap_->isolate()->PrintWithTimestamp(
710 711 712 713
          "pause=%.1f "
          "mutator=%.1f "
          "gc=%s "
          "reduce_memory=%d "
714
          "heap.prologue=%.2f "
715
          "heap.embedder_tracing_epilogue=%.2f "
716 717 718 719 720
          "heap.epilogue=%.2f "
          "heap.epilogue.reduce_new_space=%.2f "
          "heap.external.prologue=%.1f "
          "heap.external.epilogue=%.1f "
          "heap.external.weak_global_handles=%.1f "
721 722 723 724 725 726 727 728
          "clear=%1.f "
          "clear.dependent_code=%.1f "
          "clear.maps=%.1f "
          "clear.slots_buffer=%.1f "
          "clear.store_buffer=%.1f "
          "clear.string_table=%.1f "
          "clear.weak_collections=%.1f "
          "clear.weak_lists=%.1f "
729
          "clear.weak_references=%.1f "
730
          "complete.sweep_array_buffers=%.1f "
731
          "epilogue=%.1f "
732 733 734
          "evacuate=%.1f "
          "evacuate.candidates=%.1f "
          "evacuate.clean_up=%.1f "
735
          "evacuate.copy=%.1f "
736 737
          "evacuate.prologue=%.1f "
          "evacuate.epilogue=%.1f "
738
          "evacuate.rebalance=%.1f "
739
          "evacuate.update_pointers=%.1f "
740
          "evacuate.update_pointers.to_new_roots=%.1f "
741 742
          "evacuate.update_pointers.slots.main=%.1f "
          "evacuate.update_pointers.slots.map_space=%.1f "
743 744
          "evacuate.update_pointers.weak=%.1f "
          "finish=%.1f "
745
          "finish.sweep_array_buffers=%.1f "
746 747 748
          "mark=%.1f "
          "mark.finish_incremental=%.1f "
          "mark.roots=%.1f "
749
          "mark.main=%.1f "
750
          "mark.weak_closure=%.1f "
751
          "mark.weak_closure.ephemeron=%.1f "
752
          "mark.weak_closure.ephemeron.marking=%.1f "
753
          "mark.weak_closure.ephemeron.linear=%.1f "
754 755 756
          "mark.weak_closure.weak_handles=%.1f "
          "mark.weak_closure.weak_roots=%.1f "
          "mark.weak_closure.harmony=%.1f "
757 758
          "mark.embedder_prologue=%.1f "
          "mark.embedder_tracing=%.1f "
759
          "prologue=%.1f "
760 761 762 763
          "sweep=%.1f "
          "sweep.code=%.1f "
          "sweep.map=%.1f "
          "sweep.old=%.1f "
764 765
          "incremental=%.1f "
          "incremental.finalize=%.1f "
766
          "incremental.finalize.body=%.1f "
767 768
          "incremental.finalize.external.prologue=%.1f "
          "incremental.finalize.external.epilogue=%.1f "
769 770
          "incremental.layout_change=%.1f "
          "incremental.start=%.1f "
771
          "incremental.sweep_array_buffers=%.1f "
772
          "incremental.sweeping=%.1f "
773 774
          "incremental.embedder_prologue=%.1f "
          "incremental.embedder_tracing=%.1f "
775
          "incremental_wrapper_tracing_longest_step=%.1f "
776 777 778
          "incremental_finalize_longest_step=%.1f "
          "incremental_finalize_steps_count=%d "
          "incremental_longest_step=%.1f "
779
          "incremental_steps_count=%d "
780
          "incremental_marking_throughput=%.f "
781
          "incremental_walltime_duration=%.f "
782 783 784 785
          "background.mark=%.1f "
          "background.sweep=%.1f "
          "background.evacuate.copy=%.1f "
          "background.evacuate.update_pointers=%.1f "
786 787 788
          "background.array_buffer_free=%.2f "
          "background.store_buffer=%.2f "
          "background.unmapper=%.1f "
789 790 791 792 793 794 795
          "total_size_before=%zu "
          "total_size_after=%zu "
          "holes_size_before=%zu "
          "holes_size_after=%zu "
          "allocated=%zu "
          "promoted=%zu "
          "semi_space_copied=%zu "
796 797 798 799 800 801 802
          "nodes_died_in_new=%d "
          "nodes_copied_in_new=%d "
          "nodes_promoted=%d "
          "promotion_ratio=%.1f%% "
          "average_survival_ratio=%.1f%% "
          "promotion_rate=%.1f%% "
          "semi_space_copy_rate=%.1f%% "
803
          "new_space_allocation_throughput=%.1f "
804
          "unmapper_chunks=%d "
805
          "context_disposal_rate=%.1f "
806
          "compaction_speed=%.f\n",
807
          duration, spent_in_mutator, current_.TypeName(true),
808
          current_.reduce_memory, current_.scopes[Scope::HEAP_PROLOGUE],
809
          current_.scopes[Scope::HEAP_EMBEDDER_TRACING_EPILOGUE],
810 811 812 813 814 815
          current_.scopes[Scope::HEAP_EPILOGUE],
          current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
          current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
          current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
          current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
          current_.scopes[Scope::MC_CLEAR],
816 817 818 819 820 821 822
          current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
          current_.scopes[Scope::MC_CLEAR_MAPS],
          current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
          current_.scopes[Scope::MC_CLEAR_STORE_BUFFER],
          current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
          current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
          current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
823
          current_.scopes[Scope::MC_CLEAR_WEAK_REFERENCES],
824
          current_.scopes[Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS],
825
          current_.scopes[Scope::MC_EPILOGUE],
826 827 828
          current_.scopes[Scope::MC_EVACUATE],
          current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
          current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
829
          current_.scopes[Scope::MC_EVACUATE_COPY],
830 831
          current_.scopes[Scope::MC_EVACUATE_PROLOGUE],
          current_.scopes[Scope::MC_EVACUATE_EPILOGUE],
832
          current_.scopes[Scope::MC_EVACUATE_REBALANCE],
833
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
834
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
835 836
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN],
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAP_SPACE],
837
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
838 839 840
          current_.scopes[Scope::MC_FINISH],
          current_.scopes[Scope::MC_FINISH_SWEEP_ARRAY_BUFFERS],
          current_.scopes[Scope::MC_MARK],
841 842
          current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
          current_.scopes[Scope::MC_MARK_ROOTS],
843
          current_.scopes[Scope::MC_MARK_MAIN],
844
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
845
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON],
846
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING],
847
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR],
848 849 850
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS],
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_HARMONY],
851 852
          current_.scopes[Scope::MC_MARK_EMBEDDER_PROLOGUE],
          current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING],
853
          current_.scopes[Scope::MC_PROLOGUE], current_.scopes[Scope::MC_SWEEP],
854 855 856
          current_.scopes[Scope::MC_SWEEP_CODE],
          current_.scopes[Scope::MC_SWEEP_MAP],
          current_.scopes[Scope::MC_SWEEP_OLD],
857
          current_.scopes[Scope::MC_INCREMENTAL],
858
          current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
859
          current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY],
860 861
          current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
          current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
862 863
          current_.scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE],
          current_.scopes[Scope::MC_INCREMENTAL_START],
864
          current_.scopes[Scope::MC_INCREMENTAL_SWEEP_ARRAY_BUFFERS],
865
          current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
866 867
          current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE],
          current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_TRACING],
868
          current_
869 870
              .incremental_marking_scopes
                  [Scope::MC_INCREMENTAL_EMBEDDER_TRACING]
871 872 873 874 875 876 877 878 879 880
              .longest_step,
          current_
              .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
              .longest_step,
          current_
              .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
              .steps,
          current_.incremental_marking_scopes[Scope::MC_INCREMENTAL]
              .longest_step,
          current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
881
          IncrementalMarkingSpeedInBytesPerMillisecond(),
882 883 884 885 886
          incremental_walltime_duration,
          current_.scopes[Scope::MC_BACKGROUND_MARKING],
          current_.scopes[Scope::MC_BACKGROUND_SWEEPING],
          current_.scopes[Scope::MC_BACKGROUND_EVACUATE_COPY],
          current_.scopes[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
887 888 889
          current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
          current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
          current_.scopes[Scope::BACKGROUND_UNMAPPER],
890 891 892
          current_.start_object_size, current_.end_object_size,
          current_.start_holes_size, current_.end_holes_size,
          allocated_since_last_gc, heap_->promoted_objects_size(),
893 894 895 896 897 898
          heap_->semi_space_copied_object_size(),
          heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
          heap_->nodes_promoted_, heap_->promotion_ratio_,
          AverageSurvivalRatio(), heap_->promotion_rate_,
          heap_->semi_space_copied_rate_,
          NewSpaceAllocationThroughputInBytesPerMillisecond(),
899
          heap_->memory_allocator()->unmapper()->NumberOfChunks(),
900 901
          ContextDisposalRateInMilliseconds(),
          CompactionSpeedInBytesPerMillisecond());
902 903 904 905 906 907
      break;
    case Event::START:
      break;
    default:
      UNREACHABLE();
  }
908
}
909

910
double GCTracer::AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
911
                              const BytesAndDuration& initial, double time_ms) {
912 913 914 915 916 917 918 919 920
  BytesAndDuration sum = buffer.Sum(
      [time_ms](BytesAndDuration a, BytesAndDuration b) {
        if (time_ms != 0 && a.second >= time_ms) return a;
        return std::make_pair(a.first + b.first, a.second + b.second);
      },
      initial);
  uint64_t bytes = sum.first;
  double durations = sum.second;
  if (durations == 0.0) return 0;
921
  double speed = bytes / durations;
922 923 924 925
  const int max_speed = 1024 * MB;
  const int min_speed = 1;
  if (speed >= max_speed) return max_speed;
  if (speed <= min_speed) return min_speed;
926
  return speed;
927 928
}

929 930
double GCTracer::AverageSpeed(
    const base::RingBuffer<BytesAndDuration>& buffer) {
931
  return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
932 933
}

ulan's avatar
ulan committed
934
void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
935 936 937 938 939 940 941 942 943 944
  if (duration == 0 || bytes == 0) return;
  double current_speed = bytes / duration;
  if (recorded_incremental_marking_speed_ == 0) {
    recorded_incremental_marking_speed_ = current_speed;
  } else {
    recorded_incremental_marking_speed_ =
        (recorded_incremental_marking_speed_ + current_speed) / 2;
  }
}

945 946 947 948 949 950 951 952 953 954 955 956 957
void GCTracer::RecordTimeToIncrementalMarkingTask(double time_to_task) {
  if (average_time_to_incremental_marking_task_ == 0.0) {
    average_time_to_incremental_marking_task_ = time_to_task;
  } else {
    average_time_to_incremental_marking_task_ =
        (average_time_to_incremental_marking_task_ + time_to_task) / 2;
  }
}

double GCTracer::AverageTimeToIncrementalMarkingTask() const {
  return average_time_to_incremental_marking_task_;
}

958 959 960 961 962 963 964 965 966 967
void GCTracer::RecordEmbedderSpeed(size_t bytes, double duration) {
  if (duration == 0 || bytes == 0) return;
  double current_speed = bytes / duration;
  if (recorded_embedder_speed_ == 0.0) {
    recorded_embedder_speed_ = current_speed;
  } else {
    recorded_embedder_speed_ = (recorded_embedder_speed_ + current_speed) / 2;
  }
}

968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004
void GCTracer::RecordMutatorUtilization(double mark_compact_end_time,
                                        double mark_compact_duration) {
  if (previous_mark_compact_end_time_ == 0) {
    // The first event only contributes to previous_mark_compact_end_time_,
    // because we cannot compute the mutator duration.
    previous_mark_compact_end_time_ = mark_compact_end_time;
  } else {
    double total_duration =
        mark_compact_end_time - previous_mark_compact_end_time_;
    double mutator_duration = total_duration - mark_compact_duration;
    if (average_mark_compact_duration_ == 0 && average_mutator_duration_ == 0) {
      // This is the first event with mutator and mark-compact durations.
      average_mark_compact_duration_ = mark_compact_duration;
      average_mutator_duration_ = mutator_duration;
    } else {
      average_mark_compact_duration_ =
          (average_mark_compact_duration_ + mark_compact_duration) / 2;
      average_mutator_duration_ =
          (average_mutator_duration_ + mutator_duration) / 2;
    }
    current_mark_compact_mutator_utilization_ =
        total_duration ? mutator_duration / total_duration : 0;
    previous_mark_compact_end_time_ = mark_compact_end_time;
  }
}

double GCTracer::AverageMarkCompactMutatorUtilization() const {
  double average_total_duration =
      average_mark_compact_duration_ + average_mutator_duration_;
  if (average_total_duration == 0) return 1.0;
  return average_mutator_duration_ / average_total_duration;
}

double GCTracer::CurrentMarkCompactMutatorUtilization() const {
  return current_mark_compact_mutator_utilization_;
}

1005
double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
1006 1007 1008 1009 1010
  if (recorded_incremental_marking_speed_ != 0) {
    return recorded_incremental_marking_speed_;
  }
  if (incremental_marking_duration_ != 0.0) {
    return incremental_marking_bytes_ / incremental_marking_duration_;
1011
  }
1012
  return kConservativeSpeedInBytesPerMillisecond;
1013
}
1014

1015
double GCTracer::EmbedderSpeedInBytesPerMillisecond() const {
1016 1017 1018
  // Note: Returning 0 is ok here as callers check for whether embedder speeds
  // have been recorded at all.
  return recorded_embedder_speed_;
1019 1020
}

1021
double GCTracer::ScavengeSpeedInBytesPerMillisecond(
1022
    ScavengeSpeedMode mode) const {
1023
  if (mode == kForAllObjects) {
1024
    return AverageSpeed(recorded_minor_gcs_total_);
1025
  } else {
1026
    return AverageSpeed(recorded_minor_gcs_survived_);
1027
  }
1028 1029
}

1030
double GCTracer::CompactionSpeedInBytesPerMillisecond() const {
1031
  return AverageSpeed(recorded_compactions_);
1032
}
1033

1034
double GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
1035
  return AverageSpeed(recorded_mark_compacts_);
1036 1037
}

1038
double GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const {
1039
  return AverageSpeed(recorded_incremental_mark_compacts_);
1040
}
1041

1042
double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
1043 1044 1045 1046 1047 1048 1049
  const double kMinimumMarkingSpeed = 0.5;
  if (combined_mark_compact_speed_cache_ > 0)
    return combined_mark_compact_speed_cache_;
  // MarkCompact speed is more stable than incremental marking speed, because
  // there might not be many incremental marking steps because of concurrent
  // marking.
  combined_mark_compact_speed_cache_ = MarkCompactSpeedInBytesPerMillisecond();
1050 1051
  if (combined_mark_compact_speed_cache_ > 0)
    return combined_mark_compact_speed_cache_;
1052 1053
  double speed1 = IncrementalMarkingSpeedInBytesPerMillisecond();
  double speed2 = FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
1054
  if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
1055 1056 1057
    // No data for the incremental marking speed.
    // Return the non-incremental mark-compact speed.
    combined_mark_compact_speed_cache_ =
1058
        MarkCompactSpeedInBytesPerMillisecond();
1059 1060 1061 1062 1063 1064
  } else {
    // Combine the speed of incremental step and the speed of the final step.
    // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
    combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
  }
  return combined_mark_compact_speed_cache_;
1065
}
1066

1067 1068 1069 1070 1071 1072 1073 1074 1075
double GCTracer::CombineSpeedsInBytesPerMillisecond(double default_speed,
                                                    double optional_speed) {
  constexpr double kMinimumSpeed = 0.5;
  if (optional_speed < kMinimumSpeed) {
    return default_speed;
  }
  return default_speed * optional_speed / (default_speed + optional_speed);
}

1076
double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
1077
    double time_ms) const {
1078
  size_t bytes = new_space_allocation_in_bytes_since_gc_;
1079
  double durations = allocation_duration_since_gc_;
1080 1081
  return AverageSpeed(recorded_new_generation_allocations_,
                      MakeBytesAndDuration(bytes, durations), time_ms);
1082 1083
}

1084
double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
1085
    double time_ms) const {
1086
  size_t bytes = old_generation_allocation_in_bytes_since_gc_;
1087
  double durations = allocation_duration_since_gc_;
1088 1089
  return AverageSpeed(recorded_old_generation_allocations_,
                      MakeBytesAndDuration(bytes, durations), time_ms);
1090
}
1091

1092 1093 1094 1095 1096 1097 1098 1099
double GCTracer::EmbedderAllocationThroughputInBytesPerMillisecond(
    double time_ms) const {
  size_t bytes = embedder_allocation_in_bytes_since_gc_;
  double durations = allocation_duration_since_gc_;
  return AverageSpeed(recorded_embedder_generation_allocations_,
                      MakeBytesAndDuration(bytes, durations), time_ms);
}

1100
double GCTracer::AllocationThroughputInBytesPerMillisecond(
1101 1102 1103
    double time_ms) const {
  return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
         OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
1104
}
1105

1106
double GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
1107 1108 1109
  return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
}

1110
double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
1111 1112
    const {
  return OldGenerationAllocationThroughputInBytesPerMillisecond(
1113
      kThroughputTimeFrameMs);
1114 1115
}

1116 1117 1118 1119 1120 1121
double GCTracer::CurrentEmbedderAllocationThroughputInBytesPerMillisecond()
    const {
  return EmbedderAllocationThroughputInBytesPerMillisecond(
      kThroughputTimeFrameMs);
}

1122
double GCTracer::ContextDisposalRateInMilliseconds() const {
1123 1124 1125
  if (recorded_context_disposal_times_.Count() <
      recorded_context_disposal_times_.kSize)
    return 0.0;
1126
  double begin = heap_->MonotonicallyIncreasingTimeInMs();
1127 1128 1129
  double end = recorded_context_disposal_times_.Sum(
      [](double a, double b) { return b; }, 0.0);
  return (begin - end) / recorded_context_disposal_times_.Count();
1130
}
1131

1132
double GCTracer::AverageSurvivalRatio() const {
1133 1134 1135 1136
  if (recorded_survival_ratios_.Count() == 0) return 0.0;
  double sum = recorded_survival_ratios_.Sum(
      [](double a, double b) { return a + b; }, 0.0);
  return sum / recorded_survival_ratios_.Count();
1137 1138 1139
}

bool GCTracer::SurvivalEventsRecorded() const {
1140
  return recorded_survival_ratios_.Count() > 0;
1141
}
1142

1143
void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
1144 1145 1146 1147 1148

void GCTracer::NotifyIncrementalMarkingStart() {
  incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs();
}

1149 1150 1151 1152 1153
void GCTracer::FetchBackgroundMarkCompactCounters() {
  FetchBackgroundCounters(Scope::FIRST_MC_BACKGROUND_SCOPE,
                          Scope::LAST_MC_BACKGROUND_SCOPE,
                          BackgroundScope::FIRST_MC_BACKGROUND_SCOPE,
                          BackgroundScope::LAST_MC_BACKGROUND_SCOPE);
1154 1155 1156 1157
  heap_->isolate()->counters()->background_marking()->AddSample(
      static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_MARKING]));
  heap_->isolate()->counters()->background_sweeping()->AddSample(
      static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_SWEEPING]));
1158 1159 1160 1161 1162 1163 1164
}

void GCTracer::FetchBackgroundMinorGCCounters() {
  FetchBackgroundCounters(Scope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
                          Scope::LAST_MINOR_GC_BACKGROUND_SCOPE,
                          BackgroundScope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
                          BackgroundScope::LAST_MINOR_GC_BACKGROUND_SCOPE);
1165 1166 1167
  heap_->isolate()->counters()->background_scavenger()->AddSample(
      static_cast<int>(
          current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]));
1168 1169
}

1170 1171 1172 1173 1174 1175 1176
void GCTracer::FetchBackgroundGeneralCounters() {
  FetchBackgroundCounters(Scope::FIRST_GENERAL_BACKGROUND_SCOPE,
                          Scope::LAST_GENERAL_BACKGROUND_SCOPE,
                          BackgroundScope::FIRST_GENERAL_BACKGROUND_SCOPE,
                          BackgroundScope::LAST_GENERAL_BACKGROUND_SCOPE);
}

1177 1178 1179 1180 1181 1182
void GCTracer::FetchBackgroundCounters(int first_global_scope,
                                       int last_global_scope,
                                       int first_background_scope,
                                       int last_background_scope) {
  DCHECK_EQ(last_global_scope - first_global_scope,
            last_background_scope - first_background_scope);
1183
  base::MutexGuard guard(&background_counter_mutex_);
1184 1185 1186 1187 1188 1189 1190 1191
  int background_mc_scopes = last_background_scope - first_background_scope + 1;
  for (int i = 0; i < background_mc_scopes; i++) {
    current_.scopes[first_global_scope + i] +=
        background_counter_[first_background_scope + i].total_duration_ms;
    background_counter_[first_background_scope + i].total_duration_ms = 0;
  }
}

1192 1193
void GCTracer::AddBackgroundScopeSample(BackgroundScope::ScopeId scope,
                                        double duration) {
1194
  base::MutexGuard guard(&background_counter_mutex_);
1195 1196 1197 1198
  BackgroundCounter& counter = background_counter_[scope];
  counter.total_duration_ms += duration;
}

1199
void GCTracer::RecordGCPhasesHistograms(TimedHistogram* gc_timer) {
1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216
  Counters* counters = heap_->isolate()->counters();
  if (gc_timer == counters->gc_finalize()) {
    DCHECK_EQ(Scope::FIRST_TOP_MC_SCOPE, Scope::MC_CLEAR);
    counters->gc_finalize_clear()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_CLEAR]));
    counters->gc_finalize_epilogue()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_EPILOGUE]));
    counters->gc_finalize_evacuate()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_EVACUATE]));
    counters->gc_finalize_finish()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_FINISH]));
    counters->gc_finalize_mark()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_MARK]));
    counters->gc_finalize_prologue()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_PROLOGUE]));
    counters->gc_finalize_sweep()->AddSample(
        static_cast<int>(current_.scopes[Scope::MC_SWEEP]));
1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229
    if (incremental_marking_duration_ > 0) {
      heap_->isolate()->counters()->incremental_marking_sum()->AddSample(
          static_cast<int>(incremental_marking_duration_));
    }
    const double overall_marking_time =
        incremental_marking_duration_ + current_.scopes[Scope::MC_MARK];
    heap_->isolate()->counters()->gc_marking_sum()->AddSample(
        static_cast<int>(overall_marking_time));

    constexpr size_t kMinObjectSizeForReportingThroughput = 1024 * 1024;
    if (base::TimeTicks::IsHighResolution() &&
        heap_->SizeOfObjects() > kMinObjectSizeForReportingThroughput) {
      DCHECK_GT(overall_marking_time, 0.0);
1230 1231
      const double overall_v8_marking_time =
          overall_marking_time -
1232
          current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING];
1233
      DCHECK_GT(overall_v8_marking_time, 0.0);
1234 1235
      const int main_thread_marking_throughput_mb_per_s =
          static_cast<int>(static_cast<double>(heap_->SizeOfObjects()) /
1236
                           overall_v8_marking_time * 1000 / 1024 / 1024);
1237 1238 1239 1240 1241 1242 1243
      heap_->isolate()
          ->counters()
          ->gc_main_thread_marking_throughput()
          ->AddSample(
              static_cast<int>(main_thread_marking_throughput_mb_per_s));
    }

1244
    DCHECK_EQ(Scope::LAST_TOP_MC_SCOPE, Scope::MC_SWEEP);
1245 1246 1247 1248 1249
  } else if (gc_timer == counters->gc_scavenger()) {
    counters->gc_scavenger_scavenge_main()->AddSample(
        static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL]));
    counters->gc_scavenger_scavenge_roots()->AddSample(
        static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS]));
1250 1251 1252
  }
}

1253
void GCTracer::RecordGCSumCounters(double atomic_pause_duration) {
1254 1255
  base::MutexGuard guard(&background_counter_mutex_);

1256
  const double overall_duration =
1257 1258
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE]
          .duration +
1259 1260 1261 1262 1263 1264 1265 1266
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
          .duration +
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_SWEEPING]
          .duration +
      incremental_marking_duration_ +
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
          .duration +
      atomic_pause_duration;
1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278
  const double background_duration =
      background_counter_[BackgroundScope::MC_BACKGROUND_EVACUATE_COPY]
          .total_duration_ms +
      background_counter_
          [BackgroundScope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS]
              .total_duration_ms +
      background_counter_[BackgroundScope::MC_BACKGROUND_MARKING]
          .total_duration_ms +
      background_counter_[BackgroundScope::MC_BACKGROUND_SWEEPING]
          .total_duration_ms;

  const double marking_duration =
1279 1280
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE]
          .duration +
1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
          .duration +
      incremental_marking_duration_ +
      current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
          .duration +
      current_.scopes[Scope::MC_MARK];
  const double marking_background_duration =
      background_counter_[BackgroundScope::MC_BACKGROUND_MARKING]
          .total_duration_ms;

  // UMA.
1292 1293 1294 1295
  heap_->isolate()->counters()->gc_mark_compactor()->AddSample(
      static_cast<int>(overall_duration));

  // Emit trace event counters.
1296
  TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1297
                       "V8.GCMarkCompactorSummary", TRACE_EVENT_SCOPE_THREAD,
1298 1299 1300 1301 1302 1303
                       "duration", overall_duration, "background_duration",
                       background_duration);
  TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
                       "V8.GCMarkCompactorMarkingSummary",
                       TRACE_EVENT_SCOPE_THREAD, "duration", marking_duration,
                       "background_duration", marking_background_duration);
1304 1305
}

1306 1307
}  // namespace internal
}  // namespace v8