profile-generator.h 17.7 KB
Newer Older
1
// Copyright 2011 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_PROFILER_PROFILE_GENERATOR_H_
#define V8_PROFILER_PROFILE_GENERATOR_H_
7

8
#include <atomic>
9
#include <deque>
10
#include <limits>
11
#include <map>
12
#include <memory>
13
#include <unordered_map>
14
#include <utility>
15 16
#include <vector>

17
#include "include/v8-profiler.h"
18
#include "src/base/platform/time.h"
19
#include "src/builtins/builtins.h"
20 21
#include "src/codegen/source-position.h"
#include "src/logging/code-events.h"
22
#include "src/profiler/strings-storage.h"
23
#include "src/utils/allocation.h"
24 25

namespace v8 {
26
namespace internal {
27 28 29

struct TickSample;

30
// Provides a mapping from the offsets within generated code or a bytecode array
31
// to the source line and inlining id.
32
class V8_EXPORT_PRIVATE SourcePositionTable : public Malloced {
33
 public:
34
  SourcePositionTable() = default;
35

36
  void SetPosition(int pc_offset, int line, int inlining_id);
37
  int GetSourceLineNumber(int pc_offset) const;
38 39 40
  int GetInliningId(int pc_offset) const;

  void print() const;
41 42

 private:
43 44
  struct SourcePositionTuple {
    bool operator<(const SourcePositionTuple& other) const {
45 46 47 48
      return pc_offset < other.pc_offset;
    }
    int pc_offset;
    int line_number;
49
    int inlining_id;
50
  };
51
  // This is logically a map, but we store it as a vector of tuples, sorted by
52 53
  // the pc offset, so that we can save space and look up items using binary
  // search.
54
  std::vector<SourcePositionTuple> pc_offsets_to_lines_;
55
  DISALLOW_COPY_AND_ASSIGN(SourcePositionTable);
56 57
};

58
struct CodeEntryAndLineNumber;
59

60 61
class CodeEntry {
 public:
62
  // CodeEntry doesn't own name strings, just references them.
63
  inline CodeEntry(CodeEventListener::LogEventsAndTags tag, const char* name,
64
                   const char* resource_name = CodeEntry::kEmptyResourceName,
65
                   int line_number = v8::CpuProfileNode::kNoLineNumberInfo,
66
                   int column_number = v8::CpuProfileNode::kNoColumnNumberInfo,
67
                   std::unique_ptr<SourcePositionTable> line_info = nullptr,
68 69
                   Address instruction_start = kNullAddress,
                   bool is_shared_cross_origin = false);
70

71 72 73
  const char* name() const { return name_; }
  const char* resource_name() const { return resource_name_; }
  int line_number() const { return line_number_; }
74
  int column_number() const { return column_number_; }
75
  const SourcePositionTable* line_info() const { return line_info_.get(); }
76 77
  int script_id() const { return script_id_; }
  void set_script_id(int script_id) { script_id_ = script_id; }
78 79
  int position() const { return position_; }
  void set_position(int position) { position_ = position; }
80
  void set_bailout_reason(const char* bailout_reason) {
81 82 83 84
    EnsureRareData()->bailout_reason_ = bailout_reason;
  }
  const char* bailout_reason() const {
    return rare_data_ ? rare_data_->bailout_reason_ : kEmptyBailoutReason;
85
  }
86

87 88 89
  void set_deopt_info(const char* deopt_reason, int deopt_id,
                      std::vector<CpuProfileDeoptFrame> inlined_frames);

90
  CpuProfileDeoptInfo GetDeoptInfo();
91 92 93
  bool has_deopt_info() const {
    return rare_data_ && rare_data_->deopt_id_ != kNoDeoptimizationId;
  }
94
  void clear_deopt_info() {
95 96 97 98
    if (!rare_data_) return;
    // TODO(alph): Clear rare_data_ if that was the only field in use.
    rare_data_->deopt_reason_ = kNoDeoptReason;
    rare_data_->deopt_id_ = kNoDeoptimizationId;
99
  }
100 101
  void mark_used() { bit_field_ = UsedField::update(bit_field_, true); }
  bool used() const { return UsedField::decode(bit_field_); }
102

103
  void FillFunctionInfo(SharedFunctionInfo shared);
104

105
  void SetBuiltinId(Builtins::Name id);
106 107 108
  Builtins::Name builtin_id() const {
    return BuiltinIdField::decode(bit_field_);
  }
109

110 111 112 113
  bool is_shared_cross_origin() const {
    return SharedCrossOriginField::decode(bit_field_);
  }

114
  uint32_t GetHash() const;
115
  bool IsSameFunctionAs(const CodeEntry* entry) const;
116

117 118
  int GetSourceLine(int pc_offset) const;

119 120 121 122 123 124 125 126 127 128 129 130
  struct Equals {
    bool operator()(const std::unique_ptr<CodeEntry>& lhs,
                    const std::unique_ptr<CodeEntry>& rhs) const {
      return lhs.get()->IsSameFunctionAs(rhs.get());
    }
  };
  struct Hasher {
    std::size_t operator()(const std::unique_ptr<CodeEntry>& e) const {
      return e->GetHash();
    }
  };

131
  void SetInlineStacks(
132 133 134 135 136 137
      std::unordered_set<std::unique_ptr<CodeEntry>, Hasher, Equals>
          inline_entries,
      std::unordered_map<int, std::vector<CodeEntryAndLineNumber>>
          inline_stacks);
  const std::vector<CodeEntryAndLineNumber>* GetInlineStack(
      int pc_offset) const;
138

139
  void set_instruction_start(Address start) { instruction_start_ = start; }
140
  Address instruction_start() const { return instruction_start_; }
141

142 143 144
  CodeEventListener::LogEventsAndTags tag() const {
    return TagField::decode(bit_field_);
  }
145

146
  static const char* const kWasmResourceNamePrefix;
147
  V8_EXPORT_PRIVATE static const char* const kEmptyResourceName;
148
  static const char* const kEmptyBailoutReason;
149
  static const char* const kNoDeoptReason;
150

151 152
  V8_EXPORT_PRIVATE static const char* const kProgramEntryName;
  V8_EXPORT_PRIVATE static const char* const kIdleEntryName;
lpy's avatar
lpy committed
153 154 155
  static const char* const kGarbageCollectorEntryName;
  // Used to represent frames for which we have no reliable way to
  // detect function.
156
  V8_EXPORT_PRIVATE static const char* const kUnresolvedFunctionName;
157
  V8_EXPORT_PRIVATE static const char* const kRootEntryName;
lpy's avatar
lpy committed
158 159 160 161 162 163 164 165 166

  V8_INLINE static CodeEntry* program_entry() {
    return kProgramEntry.Pointer();
  }
  V8_INLINE static CodeEntry* idle_entry() { return kIdleEntry.Pointer(); }
  V8_INLINE static CodeEntry* gc_entry() { return kGCEntry.Pointer(); }
  V8_INLINE static CodeEntry* unresolved_entry() {
    return kUnresolvedEntry.Pointer();
  }
167
  V8_INLINE static CodeEntry* root_entry() { return kRootEntry.Pointer(); }
lpy's avatar
lpy committed
168

169 170
  void print() const;

171
 private:
172 173
  struct RareData {
    const char* deopt_reason_ = kNoDeoptReason;
174
    const char* bailout_reason_ = kEmptyBailoutReason;
175
    int deopt_id_ = kNoDeoptimizationId;
176 177 178
    std::unordered_map<int, std::vector<CodeEntryAndLineNumber>> inline_stacks_;
    std::unordered_set<std::unique_ptr<CodeEntry>, Hasher, Equals>
        inline_entries_;
179
    std::vector<CpuProfileDeoptFrame> deopt_inlined_frames_;
180 181 182 183
  };

  RareData* EnsureRareData();

184
  struct V8_EXPORT_PRIVATE ProgramEntryCreateTrait {
lpy's avatar
lpy committed
185 186
    static CodeEntry* Create();
  };
187
  struct V8_EXPORT_PRIVATE IdleEntryCreateTrait {
lpy's avatar
lpy committed
188 189
    static CodeEntry* Create();
  };
190
  struct V8_EXPORT_PRIVATE GCEntryCreateTrait {
lpy's avatar
lpy committed
191 192
    static CodeEntry* Create();
  };
193
  struct V8_EXPORT_PRIVATE UnresolvedEntryCreateTrait {
lpy's avatar
lpy committed
194 195
    static CodeEntry* Create();
  };
196
  struct V8_EXPORT_PRIVATE RootEntryCreateTrait {
197 198
    static CodeEntry* Create();
  };
lpy's avatar
lpy committed
199

200 201 202 203 204 205 206 207 208 209
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, ProgramEntryCreateTrait>::type kProgramEntry;
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, IdleEntryCreateTrait>::type kIdleEntry;
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, GCEntryCreateTrait>::type kGCEntry;
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, UnresolvedEntryCreateTrait>::type kUnresolvedEntry;
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, RootEntryCreateTrait>::type kRootEntry;
lpy's avatar
lpy committed
210

211
  using TagField = BitField<CodeEventListener::LogEventsAndTags, 0, 8>;
212 213 214 215 216
  using BuiltinIdField = BitField<Builtins::Name, 8, 22>;
  static_assert(Builtins::builtin_count <= BuiltinIdField::kNumValues,
                "builtin_count exceeds size of bitfield");
  using UsedField = BitField<bool, 30, 1>;
  using SharedCrossOriginField = BitField<bool, 31, 1>;
217 218

  uint32_t bit_field_;
219 220 221
  const char* name_;
  const char* resource_name_;
  int line_number_;
222
  int column_number_;
223
  int script_id_;
224
  int position_;
225
  std::unique_ptr<SourcePositionTable> line_info_;
226
  Address instruction_start_;
227
  std::unique_ptr<RareData> rare_data_;
228

229
  DISALLOW_COPY_AND_ASSIGN(CodeEntry);
230 231
};

232 233 234 235 236
struct CodeEntryAndLineNumber {
  CodeEntry* code_entry;
  int line_number;
};

237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266
struct ProfileStackFrame {
  CodeEntryAndLineNumber entry;
  Address native_context;
  bool filterable;  // If true, the frame should be filtered by context (if a
                    // filter is present).
};

typedef std::vector<ProfileStackFrame> ProfileStackTrace;

// Filters stack frames from sources other than a target native context.
class ContextFilter {
 public:
  explicit ContextFilter(Address native_context_address)
      : native_context_address_(native_context_address) {}

  // Returns true if the stack frame passes a context check.
  bool Accept(const ProfileStackFrame&);

  // Invoked when a native context has changed address.
  void OnMoveEvent(Address from_address, Address to_address);

  // Update the context's tracked address based on VM-thread events.
  void set_native_context_address(Address address) {
    native_context_address_ = address;
  }
  Address native_context_address() const { return native_context_address_; }

 private:
  Address native_context_address_;
};
267

268 269
class ProfileTree;

270
class V8_EXPORT_PRIVATE ProfileNode {
271
 public:
272 273
  inline ProfileNode(ProfileTree* tree, CodeEntry* entry, ProfileNode* parent,
                     int line_number = 0);
274

275 276 277 278
  ProfileNode* FindChild(
      CodeEntry* entry,
      int line_number = v8::CpuProfileNode::kNoLineNumberInfo);
  ProfileNode* FindOrAddChild(CodeEntry* entry, int line_number = 0);
279 280
  void IncrementSelfTicks() { ++self_ticks_; }
  void IncreaseSelfTicks(unsigned amount) { self_ticks_ += amount; }
281
  void IncrementLineTicks(int src_line);
282

283 284
  CodeEntry* entry() const { return entry_; }
  unsigned self_ticks() const { return self_ticks_; }
285
  const std::vector<ProfileNode*>* children() const { return &children_list_; }
286
  unsigned id() const { return id_; }
287
  unsigned function_id() const;
288
  ProfileNode* parent() const { return parent_; }
289 290 291
  int line_number() const {
    return line_number_ != 0 ? line_number_ : entry_->line_number();
  }
292
  CpuProfileNode::SourceType source_type() const;
293

294 295 296
  unsigned int GetHitLineCount() const {
    return static_cast<unsigned int>(line_ticks_.size());
  }
297 298
  bool GetLineTicks(v8::CpuProfileNode::LineTick* entries,
                    unsigned int length) const;
299
  void CollectDeoptInfo(CodeEntry* entry);
300 301 302
  const std::vector<CpuProfileDeoptInfo>& deopt_infos() const {
    return deopt_infos_;
  }
303
  Isolate* isolate() const;
304 305 306

  void Print(int indent);

307
 private:
308 309 310 311 312
  struct Equals {
    bool operator()(CodeEntryAndLineNumber lhs,
                    CodeEntryAndLineNumber rhs) const {
      return lhs.code_entry->IsSameFunctionAs(rhs.code_entry) &&
             lhs.line_number == rhs.line_number;
313 314
    }
  };
315 316
  struct Hasher {
    std::size_t operator()(CodeEntryAndLineNumber pair) const {
317
      return pair.code_entry->GetHash() ^ ComputeUnseededHash(pair.line_number);
318
    }
319
  };
320

321
  ProfileTree* tree_;
322 323
  CodeEntry* entry_;
  unsigned self_ticks_;
324
  std::unordered_map<CodeEntryAndLineNumber, ProfileNode*, Hasher, Equals>
325
      children_;
326
  int line_number_;
327
  std::vector<ProfileNode*> children_list_;
328
  ProfileNode* parent_;
329
  unsigned id_;
330 331
  // maps line number --> number of ticks
  std::unordered_map<int, int> line_ticks_;
332

333
  std::vector<CpuProfileDeoptInfo> deopt_infos_;
334

335 336 337
  DISALLOW_COPY_AND_ASSIGN(ProfileNode);
};

338
class V8_EXPORT_PRIVATE ProfileTree {
339
 public:
340
  explicit ProfileTree(Isolate* isolate);
341 342
  ~ProfileTree();

343
  using ProfilingMode = v8::CpuProfilingMode;
344

345
  ProfileNode* AddPathFromEnd(
346
      const std::vector<CodeEntry*>& path,
347 348
      int src_line = v8::CpuProfileNode::kNoLineNumberInfo,
      bool update_stats = true);
349 350 351 352
  ProfileNode* AddPathFromEnd(
      const ProfileStackTrace& path,
      int src_line = v8::CpuProfileNode::kNoLineNumberInfo,
      bool update_stats = true,
353 354
      ProfilingMode mode = ProfilingMode::kLeafNodeLineNumbers,
      ContextFilter* context_filter = nullptr);
355
  ProfileNode* root() const { return root_; }
356
  unsigned next_node_id() { return next_node_id_++; }
357
  unsigned GetFunctionId(const ProfileNode* node);
358

359 360 361 362
  void Print() {
    root_->Print(0);
  }

363 364
  Isolate* isolate() const { return isolate_; }

365 366 367 368 369 370
  void EnqueueNode(const ProfileNode* node) { pending_nodes_.push_back(node); }
  size_t pending_nodes_count() const { return pending_nodes_.size(); }
  std::vector<const ProfileNode*> TakePendingNodes() {
    return std::move(pending_nodes_);
  }

371 372
 private:
  template <typename Callback>
373
  void TraverseDepthFirst(Callback* callback);
374

375 376
  std::vector<const ProfileNode*> pending_nodes_;

377
  unsigned next_node_id_;
378
  ProfileNode* root_;
379
  Isolate* isolate_;
380

381
  unsigned next_function_id_;
382
  std::unordered_map<CodeEntry*, unsigned> function_ids_;
383

384 385 386
  DISALLOW_COPY_AND_ASSIGN(ProfileTree);
};

387
class CpuProfiler;
388

389
class CpuProfile {
390
 public:
391 392 393 394 395 396
  struct SampleInfo {
    ProfileNode* node;
    base::TimeTicks timestamp;
    int line;
  };

397 398
  V8_EXPORT_PRIVATE CpuProfile(CpuProfiler* profiler, const char* title,
                               CpuProfilingOptions options);
399

400 401 402
  // Checks whether or not the given TickSample should be (sub)sampled, given
  // the sampling interval of the profiler that recorded it (in microseconds).
  V8_EXPORT_PRIVATE bool CheckSubsample(base::TimeDelta sampling_interval);
403
  // Add pc -> ... -> main() call path to the profile.
404
  void AddPath(base::TimeTicks timestamp, const ProfileStackTrace& path,
405 406
               int src_line, bool update_stats,
               base::TimeDelta sampling_interval);
407
  void FinishProfile();
408

409 410
  const char* title() const { return title_; }
  const ProfileTree* top_down() const { return &top_down_; }
411

412
  int samples_count() const { return static_cast<int>(samples_.size()); }
413
  const SampleInfo& sample(int index) const { return samples_[index]; }
414

415 416 417 418
  int64_t sampling_interval_us() const {
    return options_.sampling_interval_us();
  }

419 420
  base::TimeTicks start_time() const { return start_time_; }
  base::TimeTicks end_time() const { return end_time_; }
421
  CpuProfiler* cpu_profiler() const { return profiler_; }
422
  ContextFilter* context_filter() const { return context_filter_.get(); }
423

424 425
  void UpdateTicksScale();

426
  V8_EXPORT_PRIVATE void Print();
427 428

 private:
429 430
  void StreamPendingTraceEvents();

431
  const char* title_;
432
  const CpuProfilingOptions options_;
433
  std::unique_ptr<ContextFilter> context_filter_;
434 435
  base::TimeTicks start_time_;
  base::TimeTicks end_time_;
436
  std::deque<SampleInfo> samples_;
437
  ProfileTree top_down_;
438
  CpuProfiler* const profiler_;
439
  size_t streaming_next_sample_;
440
  uint32_t id_;
441 442 443
  // Number of microseconds worth of profiler ticks that should elapse before
  // the next sample is recorded.
  base::TimeDelta next_sample_delta_;
444 445

  static std::atomic<uint32_t> last_id_;
446 447 448 449

  DISALLOW_COPY_AND_ASSIGN(CpuProfile);
};

450
class V8_EXPORT_PRIVATE CodeMap {
451
 public:
452
  CodeMap();
453
  ~CodeMap();
454

455 456
  void AddCode(Address addr, CodeEntry* entry, unsigned size);
  void MoveCode(Address from, Address to);
yurys's avatar
yurys committed
457
  CodeEntry* FindEntry(Address addr);
458 459
  void Print();

460
 private:
461
  struct CodeEntryMapInfo {
462
    unsigned index;
463 464 465
    unsigned size;
  };

466 467 468 469 470 471 472
  union CodeEntrySlotInfo {
    CodeEntry* entry;
    unsigned next_free_slot;
  };

  static constexpr unsigned kNoFreeSlot = std::numeric_limits<unsigned>::max();

473
  void ClearCodesInRange(Address start, Address end);
474 475 476 477
  unsigned AddCodeEntry(Address start, CodeEntry*);
  void DeleteCodeEntry(unsigned index);

  CodeEntry* entry(unsigned index) { return code_entries_[index].entry; }
478

479 480 481
  std::deque<CodeEntrySlotInfo> code_entries_;
  std::map<Address, CodeEntryMapInfo> code_map_;
  unsigned free_list_head_ = kNoFreeSlot;
482 483 484 485

  DISALLOW_COPY_AND_ASSIGN(CodeMap);
};

486
class V8_EXPORT_PRIVATE CpuProfilesCollection {
487
 public:
488
  explicit CpuProfilesCollection(Isolate* isolate);
489

490
  void set_cpu_profiler(CpuProfiler* profiler) { profiler_ = profiler; }
491
  bool StartProfiling(const char* title, CpuProfilingOptions options = {});
492

493
  CpuProfile* StopProfiling(const char* title);
494 495 496
  std::vector<std::unique_ptr<CpuProfile>>* profiles() {
    return &finished_profiles_;
  }
497
  const char* GetName(Name name) { return resource_names_.GetName(name); }
498
  bool IsLastProfile(const char* title);
499
  void RemoveProfile(CpuProfile* profile);
500

501 502 503 504 505
  // Finds a common sampling interval dividing each CpuProfile's interval,
  // rounded up to the nearest multiple of the CpuProfiler's sampling interval.
  // Returns 0 if no profiles are attached.
  base::TimeDelta GetCommonSamplingInterval() const;

506
  // Called from profile generator thread.
507
  void AddPathToCurrentProfiles(base::TimeTicks timestamp,
508
                                const ProfileStackTrace& path, int src_line,
509 510
                                bool update_stats,
                                base::TimeDelta sampling_interval);
511

512 513 514
  // Called from profile generator thread.
  void UpdateNativeContextAddressForCurrentProfiles(Address from, Address to);

515 516 517
  // Limits the number of profiles that can be simultaneously collected.
  static const int kMaxSimultaneousProfiles = 100;

518
 private:
lpy's avatar
lpy committed
519
  StringsStorage resource_names_;
520
  std::vector<std::unique_ptr<CpuProfile>> finished_profiles_;
521
  CpuProfiler* profiler_;
522

523
  // Accessed by VM thread and profile generator thread.
524
  std::vector<std::unique_ptr<CpuProfile>> current_profiles_;
525
  base::Semaphore current_profiles_semaphore_;
526 527 528 529

  DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection);
};

530
class V8_EXPORT_PRIVATE ProfileGenerator {
531
 public:
532
  explicit ProfileGenerator(CpuProfilesCollection* profiles, CodeMap* code_map);
533 534 535

  void RecordTickSample(const TickSample& sample);

536 537
  void UpdateNativeContextAddress(Address from, Address to);

538
  CodeMap* code_map() { return code_map_; }
539 540

 private:
541
  CodeEntry* FindEntry(Address address);
542
  CodeEntry* EntryForVMState(StateTag tag);
543

544
  CpuProfilesCollection* profiles_;
545
  CodeMap* const code_map_;
546 547 548 549

  DISALLOW_COPY_AND_ASSIGN(ProfileGenerator);
};

550 551
}  // namespace internal
}  // namespace v8
552

553
#endif  // V8_PROFILER_PROFILE_GENERATOR_H_