debug-coverage.cc 20.5 KB
Newer Older
1 2 3 4 5 6
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/debug/debug-coverage.h"

7
#include "src/ast/ast.h"
8
#include "src/base/hashmap.h"
9
#include "src/debug/debug.h"
10
#include "src/deoptimizer.h"
11
#include "src/frames-inl.h"
12
#include "src/isolate.h"
13
#include "src/objects.h"
14
#include "src/objects/debug-objects-inl.h"
15 16 17 18 19

namespace v8 {
namespace internal {

class SharedToCounterMap
20
    : public base::TemplateHashMapImpl<SharedFunctionInfo, uint32_t,
21
                                       base::KeyEqualityMatcher<Object>,
22 23
                                       base::DefaultAllocationPolicy> {
 public:
24 25
  typedef base::TemplateHashMapEntry<SharedFunctionInfo, uint32_t> Entry;
  inline void Add(SharedFunctionInfo key, uint32_t count) {
26 27 28 29 30 31 32 33 34
    Entry* entry = LookupOrInsert(key, Hash(key), []() { return 0; });
    uint32_t old_count = entry->value;
    if (UINT32_MAX - count < old_count) {
      entry->value = UINT32_MAX;
    } else {
      entry->value = old_count + count;
    }
  }

35
  inline uint32_t Get(SharedFunctionInfo key) {
36 37 38 39 40 41
    Entry* entry = Lookup(key, Hash(key));
    if (entry == nullptr) return 0;
    return entry->value;
  }

 private:
42 43
  static uint32_t Hash(SharedFunctionInfo key) {
    return static_cast<uint32_t>(key.ptr());
44 45
  }

46
  DisallowHeapAllocation no_gc;
47 48
};

49
namespace {
50
int StartPosition(SharedFunctionInfo info) {
51
  int start = info->function_token_position();
52
  if (start == kNoSourcePosition) start = info->StartPosition();
53 54 55
  return start;
}

56
bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) {
57 58
  int a_start = StartPosition(a);
  int b_start = StartPosition(b);
59
  if (a_start == b_start) return a->EndPosition() > b->EndPosition();
60 61
  return a_start < b_start;
}
62 63

bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
64 65
  DCHECK_NE(kNoSourcePosition, a.start);
  DCHECK_NE(kNoSourcePosition, b.start);
66 67 68 69
  if (a.start == b.start) return a.end > b.end;
  return a.start < b.start;
}

70 71 72 73 74
void SortBlockData(std::vector<CoverageBlock>& v) {
  // Sort according to the block nesting structure.
  std::sort(v.begin(), v.end(), CompareCoverageBlock);
}

75
std::vector<CoverageBlock> GetSortedBlockData(SharedFunctionInfo shared) {
76 77
  DCHECK(shared->HasCoverageInfo());

78
  CoverageInfo coverage_info =
79 80
      CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());

81 82 83
  std::vector<CoverageBlock> result;
  if (coverage_info->SlotCount() == 0) return result;

84 85 86 87 88
  for (int i = 0; i < coverage_info->SlotCount(); i++) {
    const int start_pos = coverage_info->StartSourcePosition(i);
    const int until_pos = coverage_info->EndSourcePosition(i);
    const int count = coverage_info->BlockCount(i);

89
    DCHECK_NE(kNoSourcePosition, start_pos);
90 91 92
    result.emplace_back(start_pos, until_pos, count);
  }

93
  SortBlockData(result);
94

95 96
  return result;
}
97

98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113
// A utility class to simplify logic for performing passes over block coverage
// ranges. Provides access to the implicit tree structure of ranges (i.e. access
// to parent and sibling blocks), and supports efficient in-place editing and
// deletion. The underlying backing store is the array of CoverageBlocks stored
// on the CoverageFunction.
class CoverageBlockIterator final {
 public:
  explicit CoverageBlockIterator(CoverageFunction* function)
      : function_(function),
        ended_(false),
        delete_current_(false),
        read_index_(-1),
        write_index_(-1) {
    DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
                          CompareCoverageBlock));
  }
114

115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173
  ~CoverageBlockIterator() {
    Finalize();
    DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
                          CompareCoverageBlock));
  }

  bool HasNext() const {
    return read_index_ + 1 < static_cast<int>(function_->blocks.size());
  }

  bool Next() {
    if (!HasNext()) {
      if (!ended_) MaybeWriteCurrent();
      ended_ = true;
      return false;
    }

    // If a block has been deleted, subsequent iteration moves trailing blocks
    // to their updated position within the array.
    MaybeWriteCurrent();

    if (read_index_ == -1) {
      // Initialize the nesting stack with the function range.
      nesting_stack_.emplace_back(function_->start, function_->end,
                                  function_->count);
    } else if (!delete_current_) {
      nesting_stack_.emplace_back(GetBlock());
    }

    delete_current_ = false;
    read_index_++;

    DCHECK(IsActive());

    CoverageBlock& block = GetBlock();
    while (nesting_stack_.size() > 1 &&
           nesting_stack_.back().end <= block.start) {
      nesting_stack_.pop_back();
    }

    DCHECK_IMPLIES(block.start >= function_->end,
                   block.end == kNoSourcePosition);
    DCHECK_NE(block.start, kNoSourcePosition);
    DCHECK_LE(block.end, GetParent().end);

    return true;
  }

  CoverageBlock& GetBlock() {
    DCHECK(IsActive());
    return function_->blocks[read_index_];
  }

  CoverageBlock& GetNextBlock() {
    DCHECK(IsActive());
    DCHECK(HasNext());
    return function_->blocks[read_index_ + 1];
  }

174 175 176 177 178 179
  CoverageBlock& GetPreviousBlock() {
    DCHECK(IsActive());
    DCHECK_GT(read_index_, 0);
    return function_->blocks[read_index_ - 1];
  }

180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195
  CoverageBlock& GetParent() {
    DCHECK(IsActive());
    return nesting_stack_.back();
  }

  bool HasSiblingOrChild() {
    DCHECK(IsActive());
    return HasNext() && GetNextBlock().start < GetParent().end;
  }

  CoverageBlock& GetSiblingOrChild() {
    DCHECK(HasSiblingOrChild());
    DCHECK(IsActive());
    return GetNextBlock();
  }

196 197 198 199
  // A range is considered to be at top level if its parent range is the
  // function range.
  bool IsTopLevel() const { return nesting_stack_.size() == 1; }

200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233
  void DeleteBlock() {
    DCHECK(!delete_current_);
    DCHECK(IsActive());
    delete_current_ = true;
  }

 private:
  void MaybeWriteCurrent() {
    if (delete_current_) return;
    if (read_index_ >= 0 && write_index_ != read_index_) {
      function_->blocks[write_index_] = function_->blocks[read_index_];
    }
    write_index_++;
  }

  void Finalize() {
    while (Next()) {
      // Just iterate to the end.
    }
    function_->blocks.resize(write_index_);
  }

  bool IsActive() const { return read_index_ >= 0 && !ended_; }

  CoverageFunction* function_;
  std::vector<CoverageBlock> nesting_stack_;
  bool ended_;
  bool delete_current_;
  int read_index_;
  int write_index_;
};

bool HaveSameSourceRange(const CoverageBlock& lhs, const CoverageBlock& rhs) {
  return lhs.start == rhs.start && lhs.end == rhs.end;
234
}
235

236 237 238 239 240 241 242 243 244 245 246 247 248 249 250
void MergeDuplicateRanges(CoverageFunction* function) {
  CoverageBlockIterator iter(function);

  while (iter.Next() && iter.HasNext()) {
    CoverageBlock& block = iter.GetBlock();
    CoverageBlock& next_block = iter.GetNextBlock();

    if (!HaveSameSourceRange(block, next_block)) continue;

    DCHECK_NE(kNoSourcePosition, block.end);  // Non-singleton range.
    next_block.count = std::max(block.count, next_block.count);
    iter.DeleteBlock();
  }
}

251 252 253 254 255
// Rewrite position singletons (produced by unconditional control flow
// like return statements, and by continuation counters) into source
// ranges that end at the next sibling range or the end of the parent
// range, whichever comes first.
void RewritePositionSingletonsToRanges(CoverageFunction* function) {
256
  CoverageBlockIterator iter(function);
257

258 259 260
  while (iter.Next()) {
    CoverageBlock& block = iter.GetBlock();
    CoverageBlock& parent = iter.GetParent();
261

262 263
    if (block.start >= function->end) {
      DCHECK_EQ(block.end, kNoSourcePosition);
264 265 266 267
      iter.DeleteBlock();
    } else if (block.end == kNoSourcePosition) {
      // The current block ends at the next sibling block (if it exists) or the
      // end of the parent block otherwise.
268 269 270 271 272 273 274 275 276 277
      if (iter.HasSiblingOrChild()) {
        block.end = iter.GetSiblingOrChild().start;
      } else if (iter.IsTopLevel()) {
        // See https://crbug.com/v8/6661. Functions are special-cased because
        // we never want the closing brace to be uncovered. This is mainly to
        // avoid a noisy UI.
        block.end = parent.end - 1;
      } else {
        block.end = parent.end;
      }
278
    }
279 280
  }
}
281

282
void MergeConsecutiveRanges(CoverageFunction* function) {
283 284 285 286 287
  CoverageBlockIterator iter(function);

  while (iter.Next()) {
    CoverageBlock& block = iter.GetBlock();

288
    if (iter.HasSiblingOrChild()) {
289 290 291 292 293 294 295
      CoverageBlock& sibling = iter.GetSiblingOrChild();
      if (sibling.start == block.end && sibling.count == block.count) {
        // Best-effort: this pass may miss mergeable siblings in the presence of
        // child blocks.
        sibling.start = block.start;
        iter.DeleteBlock();
      }
296
    }
297 298
  }
}
299

300 301 302 303 304 305 306 307 308 309 310 311 312 313 314
void MergeNestedRanges(CoverageFunction* function) {
  CoverageBlockIterator iter(function);

  while (iter.Next()) {
    CoverageBlock& block = iter.GetBlock();
    CoverageBlock& parent = iter.GetParent();

    if (parent.count == block.count) {
      // Transformation may not be valid if sibling blocks exist with a
      // differing count.
      iter.DeleteBlock();
    }
  }
}

315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338
void FilterAliasedSingletons(CoverageFunction* function) {
  CoverageBlockIterator iter(function);

  iter.Next();  // Advance once since we reference the previous block later.

  while (iter.Next()) {
    CoverageBlock& previous_block = iter.GetPreviousBlock();
    CoverageBlock& block = iter.GetBlock();

    bool is_singleton = block.end == kNoSourcePosition;
    bool aliases_start = block.start == previous_block.start;

    if (is_singleton && aliases_start) {
      // The previous block must have a full range since duplicate singletons
      // have already been merged.
      DCHECK_NE(previous_block.end, kNoSourcePosition);
      // Likewise, the next block must have another start position since
      // singletons are sorted to the end.
      DCHECK_IMPLIES(iter.HasNext(), iter.GetNextBlock().start != block.start);
      iter.DeleteBlock();
    }
  }
}

339 340
void FilterUncoveredRanges(CoverageFunction* function) {
  CoverageBlockIterator iter(function);
341

342 343 344 345 346 347
  while (iter.Next()) {
    CoverageBlock& block = iter.GetBlock();
    CoverageBlock& parent = iter.GetParent();
    if (block.count == 0 && parent.count == 0) iter.DeleteBlock();
  }
}
348

349 350
void FilterEmptyRanges(CoverageFunction* function) {
  CoverageBlockIterator iter(function);
351

352 353 354
  while (iter.Next()) {
    CoverageBlock& block = iter.GetBlock();
    if (block.start == block.end) iter.DeleteBlock();
355
  }
356 357
}

358 359 360 361 362 363 364 365 366
void ClampToBinary(CoverageFunction* function) {
  CoverageBlockIterator iter(function);

  while (iter.Next()) {
    CoverageBlock& block = iter.GetBlock();
    if (block.count > 0) block.count = 1;
  }
}

367
void ResetAllBlockCounts(SharedFunctionInfo shared) {
368 369
  DCHECK(shared->HasCoverageInfo());

370
  CoverageInfo coverage_info =
371 372 373 374 375 376 377
      CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());

  for (int i = 0; i < coverage_info->SlotCount(); i++) {
    coverage_info->ResetBlockCount(i);
  }
}

378
bool IsBlockMode(debug::CoverageMode mode) {
379
  switch (mode) {
380 381
    case debug::CoverageMode::kBlockBinary:
    case debug::CoverageMode::kBlockCount:
382 383 384 385 386 387
      return true;
    default:
      return false;
  }
}

388
bool IsBinaryMode(debug::CoverageMode mode) {
389
  switch (mode) {
390 391
    case debug::CoverageMode::kBlockBinary:
    case debug::CoverageMode::kPreciseBinary:
392 393 394 395 396 397
      return true;
    default:
      return false;
  }
}

398
void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
399
                          debug::CoverageMode mode) {
400 401 402
  DCHECK(IsBlockMode(mode));

  function->has_block_coverage = true;
403
  function->blocks = GetSortedBlockData(info);
404

405
  // If in binary mode, only report counts of 0/1.
406
  if (mode == debug::CoverageMode::kBlockBinary) ClampToBinary(function);
407

408 409 410 411 412 413 414 415 416
  // Remove singleton ranges with the same start position as a full range and
  // throw away their counts.
  // Singleton ranges are only intended to split existing full ranges and should
  // never expand into a full range. Consider 'if (cond) { ... } else { ... }'
  // as a problematic example; if the then-block produces a continuation
  // singleton, it would incorrectly expand into the else range.
  // For more context, see https://crbug.com/v8/8237.
  FilterAliasedSingletons(function);

417 418 419 420 421
  // Rewrite all singletons (created e.g. by continuations and unconditional
  // control flow) to ranges.
  RewritePositionSingletonsToRanges(function);

  // Merge nested and consecutive ranges with identical counts.
422 423 424 425 426 427 428 429 430
  // Note that it's necessary to merge duplicate ranges prior to merging nested
  // changes in order to avoid invalid transformations. See crbug.com/827530.
  MergeConsecutiveRanges(function);

  SortBlockData(function->blocks);
  MergeDuplicateRanges(function);
  MergeNestedRanges(function);

  MergeConsecutiveRanges(function);
431 432 433 434 435 436 437

  // Filter out ranges with count == 0 unless the immediate parent range has
  // a count != 0.
  FilterUncoveredRanges(function);

  // Filter out ranges of zero length.
  FilterEmptyRanges(function);
438

439 440
  // Reset all counters on the DebugInfo to zero.
  ResetAllBlockCounts(info);
441
}
442 443
}  // anonymous namespace

444
std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
445
  DCHECK(!isolate->is_best_effort_code_coverage());
446 447
  std::unique_ptr<Coverage> result =
      Collect(isolate, isolate->code_coverage_mode());
448 449 450
  if (!isolate->is_collecting_type_profile() &&
      (isolate->is_precise_binary_code_coverage() ||
       isolate->is_block_binary_code_coverage())) {
451 452
    // We do not have to hold onto feedback vectors for invocations we already
    // reported. So we can reset the list.
453
    isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
454 455 456 457
  }
  return result;
}

458
std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
459
  return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
460 461
}

462
std::unique_ptr<Coverage> Coverage::Collect(
463
    Isolate* isolate, v8::debug::CoverageMode collectionMode) {
464
  SharedToCounterMap counter_map;
465

466 467
  const bool reset_count =
      collectionMode != v8::debug::CoverageMode::kBestEffort;
468

469
  switch (isolate->code_coverage_mode()) {
470 471 472 473
    case v8::debug::CoverageMode::kBlockBinary:
    case v8::debug::CoverageMode::kBlockCount:
    case v8::debug::CoverageMode::kPreciseBinary:
    case v8::debug::CoverageMode::kPreciseCount: {
474
      // Feedback vectors are already listed to prevent losing them to GC.
475 476 477 478 479
      DCHECK(isolate->factory()
                 ->feedback_vectors_for_profiling_tools()
                 ->IsArrayList());
      Handle<ArrayList> list = Handle<ArrayList>::cast(
          isolate->factory()->feedback_vectors_for_profiling_tools());
480
      for (int i = 0; i < list->Length(); i++) {
481
        FeedbackVector vector = FeedbackVector::cast(list->Get(i));
482
        SharedFunctionInfo shared = vector->shared_function_info();
483 484 485 486 487 488
        DCHECK(shared->IsSubjectToDebugging());
        uint32_t count = static_cast<uint32_t>(vector->invocation_count());
        if (reset_count) vector->clear_invocation_count();
        counter_map.Add(shared, count);
      }
      break;
489
    }
490
    case v8::debug::CoverageMode::kBestEffort: {
491 492 493
      DCHECK(!isolate->factory()
                  ->feedback_vectors_for_profiling_tools()
                  ->IsArrayList());
494
      DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, collectionMode);
495
      HeapIterator heap_iterator(isolate->heap());
496 497
      for (HeapObject current_obj = heap_iterator.next();
           !current_obj.is_null(); current_obj = heap_iterator.next()) {
498
        if (!current_obj->IsFeedbackVector()) continue;
499
        FeedbackVector vector = FeedbackVector::cast(current_obj);
500
        SharedFunctionInfo shared = vector->shared_function_info();
501 502 503 504 505
        if (!shared->IsSubjectToDebugging()) continue;
        uint32_t count = static_cast<uint32_t>(vector->invocation_count());
        counter_map.Add(shared, count);
      }
      break;
506
    }
507 508 509 510
  }

  // Iterate shared function infos of every script and build a mapping
  // between source ranges and invocation counts.
511
  std::unique_ptr<Coverage> result(new Coverage());
512
  Script::Iterator scripts(isolate);
513 514
  for (Script script = scripts.Next(); !script.is_null();
       script = scripts.Next()) {
515
    if (!script->IsUserJavaScript()) continue;
516 517

    // Create and add new script data.
518
    Handle<Script> script_handle(script, isolate);
519
    result->emplace_back(script_handle);
520
    std::vector<CoverageFunction>* functions = &result->back().functions;
521

522
    std::vector<SharedFunctionInfo> sorted;
523 524

    {
525
      // Sort functions by start position, from outer to inner functions.
526
      SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
527 528
      for (SharedFunctionInfo info = infos.Next(); !info.is_null();
           info = infos.Next()) {
529 530
        sorted.push_back(info);
      }
531 532 533
      std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
    }

534 535 536
    // Stack to track nested functions, referring function by index.
    std::vector<size_t> nesting;

537
    // Use sorted list to reconstruct function nesting.
538
    for (SharedFunctionInfo info : sorted) {
539
      int start = StartPosition(info);
540
      int end = info->EndPosition();
541
      uint32_t count = counter_map.Get(info);
542 543 544 545
      // Find the correct outer function based on start position.
      while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
        nesting.pop_back();
      }
546 547
      if (count != 0) {
        switch (collectionMode) {
548 549
          case v8::debug::CoverageMode::kBlockCount:
          case v8::debug::CoverageMode::kPreciseCount:
550
            break;
551 552
          case v8::debug::CoverageMode::kBlockBinary:
          case v8::debug::CoverageMode::kPreciseBinary:
553 554 555
            count = info->has_reported_binary_coverage() ? 0 : 1;
            info->set_has_reported_binary_coverage(true);
            break;
556
          case v8::debug::CoverageMode::kBestEffort:
557 558 559 560
            count = 1;
            break;
        }
      }
561

562 563 564
      Handle<String> name(info->DebugName(), isolate);
      CoverageFunction function(start, end, count, name);

565
      if (IsBlockMode(collectionMode) && info->HasCoverageInfo()) {
566
        CollectBlockCoverage(&function, info, collectionMode);
567 568 569 570 571 572 573 574 575 576 577
      }

      // Only include a function range if itself or its parent function is
      // covered, or if it contains non-trivial block coverage.
      bool is_covered = (count != 0);
      bool parent_is_covered =
          (!nesting.empty() && functions->at(nesting.back()).count != 0);
      bool has_block_coverage = !function.blocks.empty();
      if (is_covered || parent_is_covered || has_block_coverage) {
        nesting.push_back(functions->size());
        functions->emplace_back(function);
578
      }
579
    }
580 581 582

    // Remove entries for scripts that have no coverage.
    if (functions->empty()) result->pop_back();
583 584 585 586
  }
  return result;
}

587
void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) {
588
  switch (mode) {
589
    case debug::CoverageMode::kBestEffort:
590 591 592 593
      // Note that DevTools switches back to best-effort coverage once the
      // recording is stopped. Since we delete coverage infos at that point, any
      // following coverage recording (without reloads) will be at function
      // granularity.
594
      isolate->debug()->RemoveAllCoverageInfos();
595 596
      if (!isolate->is_collecting_type_profile()) {
        isolate->SetFeedbackVectorsForProfilingTools(
597
            ReadOnlyRoots(isolate).undefined_value());
598
      }
599
      break;
600 601 602 603
    case debug::CoverageMode::kBlockBinary:
    case debug::CoverageMode::kBlockCount:
    case debug::CoverageMode::kPreciseBinary:
    case debug::CoverageMode::kPreciseCount: {
604
      HandleScope scope(isolate);
605

606 607 608
      // Remove all optimized function. Optimized and inlined functions do not
      // increment invocation count.
      Deoptimizer::DeoptimizeAll(isolate);
609 610 611 612 613

      // Root all feedback vectors to avoid early collection.
      isolate->MaybeInitializeVectorListFromHeap();

      HeapIterator heap_iterator(isolate->heap());
614 615
      for (HeapObject o = heap_iterator.next(); !o.is_null();
           o = heap_iterator.next()) {
616 617 618 619
        if (IsBinaryMode(mode) && o->IsSharedFunctionInfo()) {
          // If collecting binary coverage, reset
          // SFI::has_reported_binary_coverage to avoid optimizing / inlining
          // functions before they have reported coverage.
620
          SharedFunctionInfo shared = SharedFunctionInfo::cast(o);
621 622 623
          shared->set_has_reported_binary_coverage(false);
        } else if (o->IsFeedbackVector()) {
          // In any case, clear any collected invocation counts.
624
          FeedbackVector::cast(o)->clear_invocation_count();
625
        }
626
      }
627

628
      break;
629 630
    }
  }
631
  isolate->set_code_coverage_mode(mode);
632 633
}

634 635
}  // namespace internal
}  // namespace v8