gc-tracer.cc 32.6 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5
#include "src/heap/gc-tracer.h"
6

7
#include "src/counters.h"
8
#include "src/heap/heap-inl.h"
9 10
#include "src/isolate.h"

11 12 13 14 15 16 17 18 19 20 21 22 23
namespace v8 {
namespace internal {

static intptr_t CountTotalHolesSize(Heap* heap) {
  intptr_t holes_size = 0;
  OldSpaces spaces(heap);
  for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
    holes_size += space->Waste() + space->Available();
  }
  return holes_size;
}


24 25 26 27 28 29 30 31 32 33 34 35 36
GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
    : tracer_(tracer), scope_(scope) {
  start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
}


GCTracer::Scope::~Scope() {
  DCHECK(scope_ < NUMBER_OF_SCOPES);  // scope_ is unsigned.
  tracer_->current_.scopes[scope_] +=
      tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_;
}


37
GCTracer::AllocationEvent::AllocationEvent(double duration,
38
                                           size_t allocation_in_bytes) {
39 40 41 42 43
  duration_ = duration;
  allocation_in_bytes_ = allocation_in_bytes;
}


44 45 46 47 48
GCTracer::ContextDisposalEvent::ContextDisposalEvent(double time) {
  time_ = time;
}


49
GCTracer::SurvivalEvent::SurvivalEvent(double promotion_ratio) {
50
  promotion_ratio_ = promotion_ratio;
51 52 53
}


54 55 56 57 58 59 60
GCTracer::Event::Event(Type type, const char* gc_reason,
                       const char* collector_reason)
    : type(type),
      gc_reason(gc_reason),
      collector_reason(collector_reason),
      start_time(0.0),
      end_time(0.0),
61
      reduce_memory(false),
62 63 64 65 66 67
      start_object_size(0),
      end_object_size(0),
      start_memory_size(0),
      end_memory_size(0),
      start_holes_size(0),
      end_holes_size(0),
68
      cumulative_incremental_marking_steps(0),
69
      incremental_marking_steps(0),
70 71 72 73
      cumulative_incremental_marking_bytes(0),
      incremental_marking_bytes(0),
      cumulative_incremental_marking_duration(0.0),
      incremental_marking_duration(0.0),
74 75
      cumulative_pure_incremental_marking_duration(0.0),
      pure_incremental_marking_duration(0.0),
76
      longest_incremental_marking_step(0.0) {
77 78 79 80 81 82 83 84 85 86 87 88 89 90 91
  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
    scopes[i] = 0;
  }
}


const char* GCTracer::Event::TypeName(bool short_name) const {
  switch (type) {
    case SCAVENGER:
      if (short_name) {
        return "s";
      } else {
        return "Scavenge";
      }
    case MARK_COMPACTOR:
92
    case INCREMENTAL_MARK_COMPACTOR:
93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110
      if (short_name) {
        return "ms";
      } else {
        return "Mark-sweep";
      }
    case START:
      if (short_name) {
        return "st";
      } else {
        return "Start";
      }
  }
  return "Unknown Event Type";
}


GCTracer::GCTracer(Heap* heap)
    : heap_(heap),
111 112 113
      cumulative_incremental_marking_steps_(0),
      cumulative_incremental_marking_bytes_(0),
      cumulative_incremental_marking_duration_(0.0),
114
      cumulative_pure_incremental_marking_duration_(0.0),
115
      longest_incremental_marking_step_(0.0),
116 117 118
      cumulative_incremental_marking_finalization_steps_(0),
      cumulative_incremental_marking_finalization_duration_(0.0),
      longest_incremental_marking_finalization_step_(0.0),
119
      cumulative_marking_duration_(0.0),
120
      cumulative_sweeping_duration_(0.0),
121
      allocation_time_ms_(0.0),
122
      new_space_allocation_counter_bytes_(0),
123 124
      old_generation_allocation_counter_bytes_(0),
      allocation_duration_since_gc_(0.0),
125
      new_space_allocation_in_bytes_since_gc_(0),
126
      old_generation_allocation_in_bytes_since_gc_(0),
127
      combined_mark_compact_speed_cache_(0.0),
hpayer's avatar
hpayer committed
128
      start_counter_(0) {
129
  current_ = Event(Event::START, NULL, NULL);
130
  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
131
  previous_ = previous_incremental_mark_compactor_event_ = current_;
132 133 134 135 136
}


void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
                     const char* collector_reason) {
hpayer's avatar
hpayer committed
137 138 139
  start_counter_++;
  if (start_counter_ != 1) return;

140
  previous_ = current_;
141
  double start_time = heap_->MonotonicallyIncreasingTimeInMs();
142 143
  SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
                   heap_->OldGenerationAllocationCounter());
144 145
  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR)
    previous_incremental_mark_compactor_event_ = current_;
146 147 148

  if (collector == SCAVENGER) {
    current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
149
  } else if (collector == MARK_COMPACTOR) {
150
    if (heap_->incremental_marking()->WasActivated()) {
151 152 153 154 155
      current_ =
          Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason);
    } else {
      current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
    }
156 157
  }

158
  current_.reduce_memory = heap_->ShouldReduceMemory();
159
  current_.start_time = start_time;
160 161 162
  current_.start_object_size = heap_->SizeOfObjects();
  current_.start_memory_size = heap_->isolate()->memory_allocator()->Size();
  current_.start_holes_size = CountTotalHolesSize(heap_);
163 164
  current_.new_space_object_size =
      heap_->new_space()->top() - heap_->new_space()->bottom();
165

166 167 168 169 170 171
  current_.cumulative_incremental_marking_steps =
      cumulative_incremental_marking_steps_;
  current_.cumulative_incremental_marking_bytes =
      cumulative_incremental_marking_bytes_;
  current_.cumulative_incremental_marking_duration =
      cumulative_incremental_marking_duration_;
172 173
  current_.cumulative_pure_incremental_marking_duration =
      cumulative_pure_incremental_marking_duration_;
174 175 176 177 178
  current_.longest_incremental_marking_step = longest_incremental_marking_step_;

  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
    current_.scopes[i] = 0;
  }
179 180 181 182 183 184
  int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB);
  int used_memory = static_cast<int>(current_.start_object_size / KB);
  heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
      start_time, committed_memory);
  heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
      start_time, used_memory);
185 186 187
}


hpayer's avatar
hpayer committed
188 189 190
void GCTracer::Stop(GarbageCollector collector) {
  start_counter_--;
  if (start_counter_ != 0) {
191 192 193
    Output("[Finished reentrant %s during %s.]\n",
           collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
           current_.TypeName(false));
hpayer's avatar
hpayer committed
194 195 196 197
    return;
  }

  DCHECK(start_counter_ >= 0);
198 199 200 201
  DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
         (collector == MARK_COMPACTOR &&
          (current_.type == Event::MARK_COMPACTOR ||
           current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
hpayer's avatar
hpayer committed
202

203
  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
204 205 206
  current_.end_object_size = heap_->SizeOfObjects();
  current_.end_memory_size = heap_->isolate()->memory_allocator()->Size();
  current_.end_holes_size = CountTotalHolesSize(heap_);
207
  current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize();
208

209
  AddAllocation(current_.end_time);
210

211 212 213 214 215 216 217
  int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB);
  int used_memory = static_cast<int>(current_.end_object_size / KB);
  heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
      current_.end_time, committed_memory);
  heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
      current_.end_time, used_memory);

218
  if (current_.type == Event::SCAVENGER) {
219 220 221 222 223 224 225 226 227
    current_.incremental_marking_steps =
        current_.cumulative_incremental_marking_steps -
        previous_.cumulative_incremental_marking_steps;
    current_.incremental_marking_bytes =
        current_.cumulative_incremental_marking_bytes -
        previous_.cumulative_incremental_marking_bytes;
    current_.incremental_marking_duration =
        current_.cumulative_incremental_marking_duration -
        previous_.cumulative_incremental_marking_duration;
228 229 230
    current_.pure_incremental_marking_duration =
        current_.cumulative_pure_incremental_marking_duration -
        previous_.cumulative_pure_incremental_marking_duration;
231
    scavenger_events_.push_front(current_);
232
  } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
233 234
    current_.incremental_marking_steps =
        current_.cumulative_incremental_marking_steps -
235 236
        previous_incremental_mark_compactor_event_
            .cumulative_incremental_marking_steps;
237 238
    current_.incremental_marking_bytes =
        current_.cumulative_incremental_marking_bytes -
239 240
        previous_incremental_mark_compactor_event_
            .cumulative_incremental_marking_bytes;
241 242
    current_.incremental_marking_duration =
        current_.cumulative_incremental_marking_duration -
243 244
        previous_incremental_mark_compactor_event_
            .cumulative_incremental_marking_duration;
245 246
    current_.pure_incremental_marking_duration =
        current_.cumulative_pure_incremental_marking_duration -
247
        previous_incremental_mark_compactor_event_
248
            .cumulative_pure_incremental_marking_duration;
249
    longest_incremental_marking_step_ = 0.0;
250
    incremental_mark_compactor_events_.push_front(current_);
251
    combined_mark_compact_speed_cache_ = 0.0;
252 253 254 255
  } else {
    DCHECK(current_.incremental_marking_bytes == 0);
    DCHECK(current_.incremental_marking_duration == 0);
    DCHECK(current_.pure_incremental_marking_duration == 0);
256
    longest_incremental_marking_step_ = 0.0;
257
    mark_compactor_events_.push_front(current_);
258
    combined_mark_compact_speed_cache_ = 0.0;
259 260
  }

261 262
  // TODO(ernstm): move the code below out of GCTracer.

263 264 265 266 267 268 269 270 271
  double duration = current_.end_time - current_.start_time;
  double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0);

  heap_->UpdateCumulativeGCStatistics(duration, spent_in_mutator,
                                      current_.scopes[Scope::MC_MARK]);

  if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
    return;

272 273 274 275
  if (FLAG_trace_gc_nvp)
    PrintNVP();
  else
    Print();
276

277
  if (FLAG_trace_gc) {
278 279
    heap_->PrintShortHeapStatistics();
  }
280 281 282 283

  longest_incremental_marking_finalization_step_ = 0.0;
  cumulative_incremental_marking_finalization_steps_ = 0;
  cumulative_incremental_marking_finalization_duration_ = 0.0;
284 285 286
}


287 288 289 290
void GCTracer::SampleAllocation(double current_ms,
                                size_t new_space_counter_bytes,
                                size_t old_generation_counter_bytes) {
  if (allocation_time_ms_ == 0) {
291
    // It is the first sample.
292 293 294
    allocation_time_ms_ = current_ms;
    new_space_allocation_counter_bytes_ = new_space_counter_bytes;
    old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
295 296 297 298
    return;
  }
  // This assumes that counters are unsigned integers so that the subtraction
  // below works even if the new counter is less then the old counter.
299 300 301 302 303 304 305 306 307 308 309 310
  size_t new_space_allocated_bytes =
      new_space_counter_bytes - new_space_allocation_counter_bytes_;
  size_t old_generation_allocated_bytes =
      old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
  double duration = current_ms - allocation_time_ms_;
  allocation_time_ms_ = current_ms;
  new_space_allocation_counter_bytes_ = new_space_counter_bytes;
  old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
  allocation_duration_since_gc_ += duration;
  new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
  old_generation_allocation_in_bytes_since_gc_ +=
      old_generation_allocated_bytes;
311 312 313
}


314 315 316 317
void GCTracer::AddAllocation(double current_ms) {
  allocation_time_ms_ = current_ms;
  new_space_allocation_events_.push_front(AllocationEvent(
      allocation_duration_since_gc_, new_space_allocation_in_bytes_since_gc_));
318
  old_generation_allocation_events_.push_front(
319
      AllocationEvent(allocation_duration_since_gc_,
320
                      old_generation_allocation_in_bytes_since_gc_));
321
  allocation_duration_since_gc_ = 0;
322
  new_space_allocation_in_bytes_since_gc_ = 0;
323
  old_generation_allocation_in_bytes_since_gc_ = 0;
324 325 326
}


327 328 329 330 331
void GCTracer::AddContextDisposalTime(double time) {
  context_disposal_events_.push_front(ContextDisposalEvent(time));
}


332 333 334 335 336 337 338
void GCTracer::AddCompactionEvent(double duration,
                                  intptr_t live_bytes_compacted) {
  compaction_events_.push_front(
      CompactionEvent(duration, live_bytes_compacted));
}


339 340
void GCTracer::AddSurvivalRatio(double promotion_ratio) {
  survival_events_.push_front(SurvivalEvent(promotion_ratio));
341 342 343
}


344 345 346 347
void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) {
  cumulative_incremental_marking_steps_++;
  cumulative_incremental_marking_bytes_ += bytes;
  cumulative_incremental_marking_duration_ += duration;
348 349
  longest_incremental_marking_step_ =
      Max(longest_incremental_marking_step_, duration);
350
  cumulative_marking_duration_ += duration;
351 352 353
  if (bytes > 0) {
    cumulative_pure_incremental_marking_duration_ += duration;
  }
354 355 356
}


357 358 359 360 361 362 363 364
void GCTracer::AddIncrementalMarkingFinalizationStep(double duration) {
  cumulative_incremental_marking_finalization_steps_++;
  cumulative_incremental_marking_finalization_duration_ += duration;
  longest_incremental_marking_finalization_step_ =
      Max(longest_incremental_marking_finalization_step_, duration);
}


365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384
void GCTracer::Output(const char* format, ...) const {
  if (FLAG_trace_gc) {
    va_list arguments;
    va_start(arguments, format);
    base::OS::VPrint(format, arguments);
    va_end(arguments);
  }

  const int kBufferSize = 256;
  char raw_buffer[kBufferSize];
  Vector<char> buffer(raw_buffer, kBufferSize);
  va_list arguments2;
  va_start(arguments2, format);
  VSNPrintF(buffer, format, arguments2);
  va_end(arguments2);

  heap_->AddToRingBuffer(buffer.start());
}


385
void GCTracer::Print() const {
386 387 388 389
  if (FLAG_trace_gc) {
    PrintIsolate(heap_->isolate(), "");
  }
  Output("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
390

391
  Output("%s %.1f (%.1f) -> %.1f (%.1f) MB, ", current_.TypeName(false),
392 393 394 395 396 397 398
         static_cast<double>(current_.start_object_size) / MB,
         static_cast<double>(current_.start_memory_size) / MB,
         static_cast<double>(current_.end_object_size) / MB,
         static_cast<double>(current_.end_memory_size) / MB);

  int external_time = static_cast<int>(current_.scopes[Scope::EXTERNAL]);
  double duration = current_.end_time - current_.start_time;
399 400
  Output("%.1f / %d ms", duration, external_time);

401
  if (current_.type == Event::SCAVENGER) {
402
    if (current_.incremental_marking_steps > 0) {
403
      Output(" (+ %.1f ms in %d steps since last GC)",
404 405
             current_.incremental_marking_duration,
             current_.incremental_marking_steps);
406 407
    }
  } else {
408
    if (current_.incremental_marking_steps > 0) {
409
      Output(
410 411
          " (+ %.1f ms in %d steps since start of marking, "
          "biggest step %.1f ms)",
412 413 414
          current_.incremental_marking_duration,
          current_.incremental_marking_steps,
          current_.longest_incremental_marking_step);
415 416 417 418
    }
  }

  if (current_.gc_reason != NULL) {
419
    Output(" [%s]", current_.gc_reason);
420 421 422
  }

  if (current_.collector_reason != NULL) {
423
    Output(" [%s]", current_.collector_reason);
424 425
  }

426
  Output(".\n");
427 428 429 430 431 432
}


void GCTracer::PrintNVP() const {
  double duration = current_.end_time - current_.start_time;
  double spent_in_mutator = current_.start_time - previous_.end_time;
433 434
  intptr_t allocated_since_last_gc =
      current_.start_object_size - previous_.end_object_size;
435

436 437
  switch (current_.type) {
    case Event::SCAVENGER:
438
      PrintIsolate(heap_->isolate(),
439
                   "%8.0f ms: "
440 441 442 443 444 445 446 447 448 449 450 451 452
                   "pause=%.1f "
                   "mutator=%.1f "
                   "gc=%s "
                   "reduce_memory=%d "
                   "scavenge=%.2f "
                   "old_new=%.2f "
                   "weak=%.2f "
                   "roots=%.2f "
                   "code=%.2f "
                   "semispace=%.2f "
                   "object_groups=%.2f "
                   "steps_count=%d "
                   "steps_took=%.1f "
453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468
                   "scavenge_throughput=%" V8_PTR_PREFIX
                   "d "
                   "total_size_before=%" V8_PTR_PREFIX
                   "d "
                   "total_size_after=%" V8_PTR_PREFIX
                   "d "
                   "holes_size_before=%" V8_PTR_PREFIX
                   "d "
                   "holes_size_after=%" V8_PTR_PREFIX
                   "d "
                   "allocated=%" V8_PTR_PREFIX
                   "d "
                   "promoted=%" V8_PTR_PREFIX
                   "d "
                   "semi_space_copied=%" V8_PTR_PREFIX
                   "d "
469 470 471 472 473 474 475
                   "nodes_died_in_new=%d "
                   "nodes_copied_in_new=%d "
                   "nodes_promoted=%d "
                   "promotion_ratio=%.1f%% "
                   "average_survival_ratio=%.1f%% "
                   "promotion_rate=%.1f%% "
                   "semi_space_copy_rate=%.1f%% "
476 477
                   "new_space_allocation_throughput=%" V8_PTR_PREFIX
                   "d "
478
                   "context_disposal_rate=%.1f\n",
479 480
                   heap_->isolate()->time_millis_since_init(), duration,
                   spent_in_mutator, current_.TypeName(true),
481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501
                   current_.reduce_memory,
                   current_.scopes[Scope::SCAVENGER_SCAVENGE],
                   current_.scopes[Scope::SCAVENGER_OLD_TO_NEW_POINTERS],
                   current_.scopes[Scope::SCAVENGER_WEAK],
                   current_.scopes[Scope::SCAVENGER_ROOTS],
                   current_.scopes[Scope::SCAVENGER_CODE_FLUSH_CANDIDATES],
                   current_.scopes[Scope::SCAVENGER_SEMISPACE],
                   current_.scopes[Scope::SCAVENGER_OBJECT_GROUPS],
                   current_.incremental_marking_steps,
                   current_.incremental_marking_duration,
                   ScavengeSpeedInBytesPerMillisecond(),
                   current_.start_object_size, current_.end_object_size,
                   current_.start_holes_size, current_.end_holes_size,
                   allocated_since_last_gc, heap_->promoted_objects_size(),
                   heap_->semi_space_copied_object_size(),
                   heap_->nodes_died_in_new_space_,
                   heap_->nodes_copied_in_new_space_, heap_->nodes_promoted_,
                   heap_->promotion_ratio_, AverageSurvivalRatio(),
                   heap_->promotion_rate_, heap_->semi_space_copied_rate_,
                   NewSpaceAllocationThroughputInBytesPerMillisecond(),
                   ContextDisposalRateInMilliseconds());
502 503 504
      break;
    case Event::MARK_COMPACTOR:
    case Event::INCREMENTAL_MARK_COMPACTOR:
505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626
      PrintIsolate(
          heap_->isolate(),
          "%8.0f ms: "
          "pause=%.1f "
          "mutator=%.1f "
          "gc=%s "
          "reduce_memory=%d "
          "external=%.1f "
          "clear=%1.f "
          "clear.code_flush=%.1f "
          "clear.dependent_code=%.1f "
          "clear.global_handles=%.1f "
          "clear.maps=%.1f "
          "clear.slots_buffer=%.1f "
          "clear.store_buffer=%.1f "
          "clear.string_table=%.1f "
          "clear.weak_cells=%.1f "
          "clear.weak_collections=%.1f "
          "clear.weak_lists=%.1f "
          "evacuate=%.1f "
          "evacuate.candidates=%.1f "
          "evacuate.clean_up=%.1f "
          "evacuate.new_space=%.1f "
          "evacuate.update_pointers=%.1f "
          "evacuate.update_pointers.between_evacuated=%.1f "
          "evacuate.update_pointers.to_evacuated=%.1f "
          "evacuate.update_pointers.to_new=%.1f "
          "evacuate.update_pointers.weak=%.1f "
          "finish=%.1f "
          "mark=%.1f "
          "mark.finish_incremental=%.1f "
          "mark.prepare_code_flush=%.1f "
          "mark.roots=%.1f "
          "mark.weak_closure=%.1f "
          "sweep=%.1f "
          "sweep.code=%.1f "
          "sweep.map=%.1f "
          "sweep.old=%.1f "
          "incremental_finalize=%.1f "
          "steps_count=%d "
          "steps_took=%.1f "
          "longest_step=%.1f "
          "finalization_steps_count=%d "
          "finalization_steps_took=%.1f "
          "finalization_longest_step=%.1f "
          "incremental_marking_throughput=%" V8_PTR_PREFIX
          "d "
          "total_size_before=%" V8_PTR_PREFIX
          "d "
          "total_size_after=%" V8_PTR_PREFIX
          "d "
          "holes_size_before=%" V8_PTR_PREFIX
          "d "
          "holes_size_after=%" V8_PTR_PREFIX
          "d "
          "allocated=%" V8_PTR_PREFIX
          "d "
          "promoted=%" V8_PTR_PREFIX
          "d "
          "semi_space_copied=%" V8_PTR_PREFIX
          "d "
          "nodes_died_in_new=%d "
          "nodes_copied_in_new=%d "
          "nodes_promoted=%d "
          "promotion_ratio=%.1f%% "
          "average_survival_ratio=%.1f%% "
          "promotion_rate=%.1f%% "
          "semi_space_copy_rate=%.1f%% "
          "new_space_allocation_throughput=%" V8_PTR_PREFIX
          "d "
          "context_disposal_rate=%.1f "
          "compaction_speed=%" V8_PTR_PREFIX "d\n",
          heap_->isolate()->time_millis_since_init(), duration,
          spent_in_mutator, current_.TypeName(true), current_.reduce_memory,
          current_.scopes[Scope::EXTERNAL], current_.scopes[Scope::MC_CLEAR],
          current_.scopes[Scope::MC_CLEAR_CODE_FLUSH],
          current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
          current_.scopes[Scope::MC_CLEAR_GLOBAL_HANDLES],
          current_.scopes[Scope::MC_CLEAR_MAPS],
          current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
          current_.scopes[Scope::MC_CLEAR_STORE_BUFFER],
          current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
          current_.scopes[Scope::MC_CLEAR_WEAK_CELLS],
          current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
          current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
          current_.scopes[Scope::MC_EVACUATE],
          current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
          current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
          current_.scopes[Scope::MC_EVACUATE_NEW_SPACE],
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED],
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED],
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW],
          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
          current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK],
          current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
          current_.scopes[Scope::MC_MARK_PREPARE_CODE_FLUSH],
          current_.scopes[Scope::MC_MARK_ROOTS],
          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
          current_.scopes[Scope::MC_SWEEP],
          current_.scopes[Scope::MC_SWEEP_CODE],
          current_.scopes[Scope::MC_SWEEP_MAP],
          current_.scopes[Scope::MC_SWEEP_OLD],
          current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
          current_.incremental_marking_steps,
          current_.incremental_marking_duration,
          current_.longest_incremental_marking_step,
          cumulative_incremental_marking_finalization_steps_,
          cumulative_incremental_marking_finalization_duration_,
          longest_incremental_marking_finalization_step_,
          IncrementalMarkingSpeedInBytesPerMillisecond(),
          current_.start_object_size, current_.end_object_size,
          current_.start_holes_size, current_.end_holes_size,
          allocated_since_last_gc, heap_->promoted_objects_size(),
          heap_->semi_space_copied_object_size(),
          heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
          heap_->nodes_promoted_, heap_->promotion_ratio_,
          AverageSurvivalRatio(), heap_->promotion_rate_,
          heap_->semi_space_copied_rate_,
          NewSpaceAllocationThroughputInBytesPerMillisecond(),
          ContextDisposalRateInMilliseconds(),
          CompactionSpeedInBytesPerMillisecond());
627 628 629 630 631 632
      break;
    case Event::START:
      break;
    default:
      UNREACHABLE();
  }
633
}
634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664


double GCTracer::MeanDuration(const EventBuffer& events) const {
  if (events.empty()) return 0.0;

  double mean = 0.0;
  EventBuffer::const_iterator iter = events.begin();
  while (iter != events.end()) {
    mean += iter->end_time - iter->start_time;
    ++iter;
  }

  return mean / events.size();
}


double GCTracer::MaxDuration(const EventBuffer& events) const {
  if (events.empty()) return 0.0;

  double maximum = 0.0f;
  EventBuffer::const_iterator iter = events.begin();
  while (iter != events.end()) {
    maximum = Max(iter->end_time - iter->start_time, maximum);
    ++iter;
  }

  return maximum;
}


double GCTracer::MeanIncrementalMarkingDuration() const {
665 666 667 668
  if (cumulative_incremental_marking_steps_ == 0) return 0.0;

  // We haven't completed an entire round of incremental marking, yet.
  // Use data from GCTracer instead of data from event buffers.
669
  if (incremental_mark_compactor_events_.empty()) {
670 671 672
    return cumulative_incremental_marking_duration_ /
           cumulative_incremental_marking_steps_;
  }
673

674 675
  int steps = 0;
  double durations = 0.0;
676 677
  EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
  while (iter != incremental_mark_compactor_events_.end()) {
678 679 680 681 682 683 684 685
    steps += iter->incremental_marking_steps;
    durations += iter->incremental_marking_duration;
    ++iter;
  }

  if (steps == 0) return 0.0;

  return durations / steps;
686 687 688 689
}


double GCTracer::MaxIncrementalMarkingDuration() const {
690 691
  // We haven't completed an entire round of incremental marking, yet.
  // Use data from GCTracer instead of data from event buffers.
692 693
  if (incremental_mark_compactor_events_.empty())
    return longest_incremental_marking_step_;
694 695

  double max_duration = 0.0;
696 697
  EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
  while (iter != incremental_mark_compactor_events_.end())
698 699 700 701 702 703
    max_duration = Max(iter->longest_incremental_marking_step, max_duration);

  return max_duration;
}


704
intptr_t GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
705 706 707 708
  if (cumulative_incremental_marking_duration_ == 0.0) return 0;

  // We haven't completed an entire round of incremental marking, yet.
  // Use data from GCTracer instead of data from event buffers.
709
  if (incremental_mark_compactor_events_.empty()) {
710
    return static_cast<intptr_t>(cumulative_incremental_marking_bytes_ /
711
                                 cumulative_pure_incremental_marking_duration_);
712 713 714 715
  }

  intptr_t bytes = 0;
  double durations = 0.0;
716 717
  EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
  while (iter != incremental_mark_compactor_events_.end()) {
718
    bytes += iter->incremental_marking_bytes;
719
    durations += iter->pure_incremental_marking_duration;
720 721 722 723
    ++iter;
  }

  if (durations == 0.0) return 0;
724 725
  // Make sure the result is at least 1.
  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
726
}
727 728


729 730
intptr_t GCTracer::ScavengeSpeedInBytesPerMillisecond(
    ScavengeSpeedMode mode) const {
731 732 733 734
  intptr_t bytes = 0;
  double durations = 0.0;
  EventBuffer::const_iterator iter = scavenger_events_.begin();
  while (iter != scavenger_events_.end()) {
735 736
    bytes += mode == kForAllObjects ? iter->new_space_object_size
                                    : iter->survived_new_space_object_size;
737 738 739 740 741
    durations += iter->end_time - iter->start_time;
    ++iter;
  }

  if (durations == 0.0) return 0;
742 743
  // Make sure the result is at least 1.
  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
744 745 746 747
}


intptr_t GCTracer::CompactionSpeedInBytesPerMillisecond() const {
748
  if (compaction_events_.size() == 0) return 0;
749 750 751 752 753 754 755 756 757 758 759 760
  intptr_t bytes = 0;
  double durations = 0.0;
  CompactionEventBuffer::const_iterator iter = compaction_events_.begin();
  while (iter != compaction_events_.end()) {
    bytes += iter->live_bytes_compacted;
    durations += iter->duration;
    ++iter;
  }

  if (durations == 0.0) return 0;
  // Make sure the result is at least 1.
  return Max<intptr_t>(static_cast<intptr_t>(bytes / durations + 0.5), 1);
761
}
762 763 764 765 766 767 768 769


intptr_t GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
  intptr_t bytes = 0;
  double durations = 0.0;
  EventBuffer::const_iterator iter = mark_compactor_events_.begin();
  while (iter != mark_compactor_events_.end()) {
    bytes += iter->start_object_size;
770 771 772 773 774
    durations += iter->end_time - iter->start_time;
    ++iter;
  }

  if (durations == 0.0) return 0;
775 776
  // Make sure the result is at least 1.
  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
777 778 779 780 781 782 783 784 785 786 787
}


intptr_t GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()
    const {
  intptr_t bytes = 0;
  double durations = 0.0;
  EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
  while (iter != incremental_mark_compactor_events_.end()) {
    bytes += iter->start_object_size;
    durations += iter->end_time - iter->start_time;
788 789 790 791
    ++iter;
  }

  if (durations == 0.0) return 0;
792 793 794
  // Make sure the result is at least 1.
  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
}
795

796 797 798 799

double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
  if (combined_mark_compact_speed_cache_ > 0)
    return combined_mark_compact_speed_cache_;
800
  const double kMinimumMarkingSpeed = 0.5;
801 802 803 804
  double speed1 =
      static_cast<double>(IncrementalMarkingSpeedInBytesPerMillisecond());
  double speed2 = static_cast<double>(
      FinalIncrementalMarkCompactSpeedInBytesPerMillisecond());
805
  if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
806 807 808 809 810 811 812 813 814 815
    // No data for the incremental marking speed.
    // Return the non-incremental mark-compact speed.
    combined_mark_compact_speed_cache_ =
        static_cast<double>(MarkCompactSpeedInBytesPerMillisecond());
  } else {
    // Combine the speed of incremental step and the speed of the final step.
    // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
    combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
  }
  return combined_mark_compact_speed_cache_;
816
}
817 818


819 820
size_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
    double time_ms) const {
821
  size_t bytes = new_space_allocation_in_bytes_since_gc_;
822 823 824
  double durations = allocation_duration_since_gc_;
  AllocationEventBuffer::const_iterator iter =
      new_space_allocation_events_.begin();
825
  const size_t max_bytes = static_cast<size_t>(-1);
826
  while (iter != new_space_allocation_events_.end() &&
827
         bytes < max_bytes - bytes && (time_ms == 0 || durations < time_ms)) {
828 829 830 831 832 833
    bytes += iter->allocation_in_bytes_;
    durations += iter->duration_;
    ++iter;
  }

  if (durations == 0.0) return 0;
834 835
  // Make sure the result is at least 1.
  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
836 837 838
}


839
size_t GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
840
    double time_ms) const {
841
  size_t bytes = old_generation_allocation_in_bytes_since_gc_;
842
  double durations = allocation_duration_since_gc_;
843 844
  AllocationEventBuffer::const_iterator iter =
      old_generation_allocation_events_.begin();
845
  const size_t max_bytes = static_cast<size_t>(-1);
846 847
  while (iter != old_generation_allocation_events_.end() &&
         bytes < max_bytes - bytes && (time_ms == 0 || durations < time_ms)) {
848 849 850 851 852
    bytes += iter->allocation_in_bytes_;
    durations += iter->duration_;
    ++iter;
  }

853
  if (durations == 0.0) return 0;
854 855 856
  // Make sure the result is at least 1.
  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
}
857

858 859 860 861 862

size_t GCTracer::AllocationThroughputInBytesPerMillisecond(
    double time_ms) const {
  return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
         OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
863
}
864 865


866 867 868 869 870
size_t GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
  return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
}


871 872 873
size_t GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
    const {
  return OldGenerationAllocationThroughputInBytesPerMillisecond(
874
      kThroughputTimeFrameMs);
875 876 877
}


878
double GCTracer::ContextDisposalRateInMilliseconds() const {
879
  if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0;
880

881
  double begin = heap_->MonotonicallyIncreasingTimeInMs();
882 883 884 885 886 887 888 889 890 891
  double end = 0.0;
  ContextDisposalEventBuffer::const_iterator iter =
      context_disposal_events_.begin();
  while (iter != context_disposal_events_.end()) {
    end = iter->time_;
    ++iter;
  }

  return (begin - end) / context_disposal_events_.size();
}
892 893


894 895
double GCTracer::AverageSurvivalRatio() const {
  if (survival_events_.size() == 0) return 0.0;
896 897

  double sum_of_rates = 0.0;
898 899
  SurvivalEventBuffer::const_iterator iter = survival_events_.begin();
  while (iter != survival_events_.end()) {
900
    sum_of_rates += iter->promotion_ratio_;
901 902 903
    ++iter;
  }

904
  return sum_of_rates / static_cast<double>(survival_events_.size());
905 906 907 908
}


bool GCTracer::SurvivalEventsRecorded() const {
909
  return survival_events_.size() > 0;
910
}
911 912


913
void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); }
914 915
}  // namespace internal
}  // namespace v8