profile-generator.h 16.4 KB
Newer Older
1
// Copyright 2011 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_PROFILER_PROFILE_GENERATOR_H_
#define V8_PROFILER_PROFILE_GENERATOR_H_
7

8
#include <atomic>
9
#include <deque>
10
#include <limits>
11
#include <map>
12
#include <memory>
13
#include <unordered_map>
14
#include <utility>
15 16
#include <vector>

17
#include "include/v8-profiler.h"
18
#include "src/base/platform/time.h"
19
#include "src/builtins/builtins.h"
20 21
#include "src/codegen/source-position.h"
#include "src/logging/code-events.h"
22
#include "src/profiler/strings-storage.h"
23
#include "src/utils/allocation.h"
24 25

namespace v8 {
26
namespace internal {
27 28 29

struct TickSample;

30
// Provides a mapping from the offsets within generated code or a bytecode array
31
// to the source line and inlining id.
32
class V8_EXPORT_PRIVATE SourcePositionTable : public Malloced {
33
 public:
34
  SourcePositionTable() = default;
35

36
  void SetPosition(int pc_offset, int line, int inlining_id);
37
  int GetSourceLineNumber(int pc_offset) const;
38 39 40
  int GetInliningId(int pc_offset) const;

  void print() const;
41 42

 private:
43 44
  struct SourcePositionTuple {
    bool operator<(const SourcePositionTuple& other) const {
45 46 47 48
      return pc_offset < other.pc_offset;
    }
    int pc_offset;
    int line_number;
49
    int inlining_id;
50
  };
51
  // This is logically a map, but we store it as a vector of tuples, sorted by
52 53
  // the pc offset, so that we can save space and look up items using binary
  // search.
54
  std::vector<SourcePositionTuple> pc_offsets_to_lines_;
55
  DISALLOW_COPY_AND_ASSIGN(SourcePositionTable);
56 57
};

58
struct CodeEntryAndLineNumber;
59

60 61
class CodeEntry {
 public:
62
  // CodeEntry doesn't own name strings, just references them.
63
  inline CodeEntry(CodeEventListener::LogEventsAndTags tag, const char* name,
64
                   const char* resource_name = CodeEntry::kEmptyResourceName,
65
                   int line_number = v8::CpuProfileNode::kNoLineNumberInfo,
66
                   int column_number = v8::CpuProfileNode::kNoColumnNumberInfo,
67
                   std::unique_ptr<SourcePositionTable> line_info = nullptr,
68 69
                   Address instruction_start = kNullAddress,
                   bool is_shared_cross_origin = false);
70

71 72 73
  const char* name() const { return name_; }
  const char* resource_name() const { return resource_name_; }
  int line_number() const { return line_number_; }
74
  int column_number() const { return column_number_; }
75
  const SourcePositionTable* line_info() const { return line_info_.get(); }
76 77
  int script_id() const { return script_id_; }
  void set_script_id(int script_id) { script_id_ = script_id; }
78 79
  int position() const { return position_; }
  void set_position(int position) { position_ = position; }
80
  void set_bailout_reason(const char* bailout_reason) {
81 82 83 84
    EnsureRareData()->bailout_reason_ = bailout_reason;
  }
  const char* bailout_reason() const {
    return rare_data_ ? rare_data_->bailout_reason_ : kEmptyBailoutReason;
85
  }
86

87 88 89
  void set_deopt_info(const char* deopt_reason, int deopt_id,
                      std::vector<CpuProfileDeoptFrame> inlined_frames);

90
  CpuProfileDeoptInfo GetDeoptInfo();
91 92 93
  bool has_deopt_info() const {
    return rare_data_ && rare_data_->deopt_id_ != kNoDeoptimizationId;
  }
94
  void clear_deopt_info() {
95 96 97 98
    if (!rare_data_) return;
    // TODO(alph): Clear rare_data_ if that was the only field in use.
    rare_data_->deopt_reason_ = kNoDeoptReason;
    rare_data_->deopt_id_ = kNoDeoptimizationId;
99
  }
100 101
  void mark_used() { bit_field_ = UsedField::update(bit_field_, true); }
  bool used() const { return UsedField::decode(bit_field_); }
102

103
  void FillFunctionInfo(SharedFunctionInfo shared);
104

105
  void SetBuiltinId(Builtins::Name id);
106 107 108
  Builtins::Name builtin_id() const {
    return BuiltinIdField::decode(bit_field_);
  }
109

110 111 112 113
  bool is_shared_cross_origin() const {
    return SharedCrossOriginField::decode(bit_field_);
  }

114
  uint32_t GetHash() const;
115
  bool IsSameFunctionAs(const CodeEntry* entry) const;
116

117 118
  int GetSourceLine(int pc_offset) const;

119 120 121 122 123 124 125 126 127 128 129 130
  struct Equals {
    bool operator()(const std::unique_ptr<CodeEntry>& lhs,
                    const std::unique_ptr<CodeEntry>& rhs) const {
      return lhs.get()->IsSameFunctionAs(rhs.get());
    }
  };
  struct Hasher {
    std::size_t operator()(const std::unique_ptr<CodeEntry>& e) const {
      return e->GetHash();
    }
  };

131
  void SetInlineStacks(
132 133 134 135 136 137
      std::unordered_set<std::unique_ptr<CodeEntry>, Hasher, Equals>
          inline_entries,
      std::unordered_map<int, std::vector<CodeEntryAndLineNumber>>
          inline_stacks);
  const std::vector<CodeEntryAndLineNumber>* GetInlineStack(
      int pc_offset) const;
138

139
  void set_instruction_start(Address start) { instruction_start_ = start; }
140
  Address instruction_start() const { return instruction_start_; }
141

142 143 144
  CodeEventListener::LogEventsAndTags tag() const {
    return TagField::decode(bit_field_);
  }
145

146
  static const char* const kWasmResourceNamePrefix;
147
  V8_EXPORT_PRIVATE static const char* const kEmptyResourceName;
148
  static const char* const kEmptyBailoutReason;
149
  static const char* const kNoDeoptReason;
150

151 152
  V8_EXPORT_PRIVATE static const char* const kProgramEntryName;
  V8_EXPORT_PRIVATE static const char* const kIdleEntryName;
lpy's avatar
lpy committed
153 154 155
  static const char* const kGarbageCollectorEntryName;
  // Used to represent frames for which we have no reliable way to
  // detect function.
156
  V8_EXPORT_PRIVATE static const char* const kUnresolvedFunctionName;
157
  V8_EXPORT_PRIVATE static const char* const kRootEntryName;
lpy's avatar
lpy committed
158 159 160 161 162 163 164 165 166

  V8_INLINE static CodeEntry* program_entry() {
    return kProgramEntry.Pointer();
  }
  V8_INLINE static CodeEntry* idle_entry() { return kIdleEntry.Pointer(); }
  V8_INLINE static CodeEntry* gc_entry() { return kGCEntry.Pointer(); }
  V8_INLINE static CodeEntry* unresolved_entry() {
    return kUnresolvedEntry.Pointer();
  }
167
  V8_INLINE static CodeEntry* root_entry() { return kRootEntry.Pointer(); }
lpy's avatar
lpy committed
168

169 170
  void print() const;

171
 private:
172 173
  struct RareData {
    const char* deopt_reason_ = kNoDeoptReason;
174
    const char* bailout_reason_ = kEmptyBailoutReason;
175
    int deopt_id_ = kNoDeoptimizationId;
176 177 178
    std::unordered_map<int, std::vector<CodeEntryAndLineNumber>> inline_stacks_;
    std::unordered_set<std::unique_ptr<CodeEntry>, Hasher, Equals>
        inline_entries_;
179
    std::vector<CpuProfileDeoptFrame> deopt_inlined_frames_;
180 181 182 183
  };

  RareData* EnsureRareData();

184
  struct V8_EXPORT_PRIVATE ProgramEntryCreateTrait {
lpy's avatar
lpy committed
185 186
    static CodeEntry* Create();
  };
187
  struct V8_EXPORT_PRIVATE IdleEntryCreateTrait {
lpy's avatar
lpy committed
188 189
    static CodeEntry* Create();
  };
190
  struct V8_EXPORT_PRIVATE GCEntryCreateTrait {
lpy's avatar
lpy committed
191 192
    static CodeEntry* Create();
  };
193
  struct V8_EXPORT_PRIVATE UnresolvedEntryCreateTrait {
lpy's avatar
lpy committed
194 195
    static CodeEntry* Create();
  };
196
  struct V8_EXPORT_PRIVATE RootEntryCreateTrait {
197 198
    static CodeEntry* Create();
  };
lpy's avatar
lpy committed
199

200 201 202 203 204 205 206 207 208 209
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, ProgramEntryCreateTrait>::type kProgramEntry;
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, IdleEntryCreateTrait>::type kIdleEntry;
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, GCEntryCreateTrait>::type kGCEntry;
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, UnresolvedEntryCreateTrait>::type kUnresolvedEntry;
  V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
      CodeEntry, RootEntryCreateTrait>::type kRootEntry;
lpy's avatar
lpy committed
210

211
  using TagField = BitField<CodeEventListener::LogEventsAndTags, 0, 8>;
212 213 214 215 216
  using BuiltinIdField = BitField<Builtins::Name, 8, 22>;
  static_assert(Builtins::builtin_count <= BuiltinIdField::kNumValues,
                "builtin_count exceeds size of bitfield");
  using UsedField = BitField<bool, 30, 1>;
  using SharedCrossOriginField = BitField<bool, 31, 1>;
217 218

  uint32_t bit_field_;
219 220 221
  const char* name_;
  const char* resource_name_;
  int line_number_;
222
  int column_number_;
223
  int script_id_;
224
  int position_;
225
  std::unique_ptr<SourcePositionTable> line_info_;
226
  Address instruction_start_;
227
  std::unique_ptr<RareData> rare_data_;
228

229
  DISALLOW_COPY_AND_ASSIGN(CodeEntry);
230 231
};

232 233 234 235 236
struct CodeEntryAndLineNumber {
  CodeEntry* code_entry;
  int line_number;
};

237
using ProfileStackTrace = std::vector<CodeEntryAndLineNumber>;
238

239 240
class ProfileTree;

241
class V8_EXPORT_PRIVATE ProfileNode {
242
 public:
243 244
  inline ProfileNode(ProfileTree* tree, CodeEntry* entry, ProfileNode* parent,
                     int line_number = 0);
245

246 247 248 249
  ProfileNode* FindChild(
      CodeEntry* entry,
      int line_number = v8::CpuProfileNode::kNoLineNumberInfo);
  ProfileNode* FindOrAddChild(CodeEntry* entry, int line_number = 0);
250 251
  void IncrementSelfTicks() { ++self_ticks_; }
  void IncreaseSelfTicks(unsigned amount) { self_ticks_ += amount; }
252
  void IncrementLineTicks(int src_line);
253

254 255
  CodeEntry* entry() const { return entry_; }
  unsigned self_ticks() const { return self_ticks_; }
256
  const std::vector<ProfileNode*>* children() const { return &children_list_; }
257
  unsigned id() const { return id_; }
258
  unsigned function_id() const;
259
  ProfileNode* parent() const { return parent_; }
260 261 262
  int line_number() const {
    return line_number_ != 0 ? line_number_ : entry_->line_number();
  }
263
  CpuProfileNode::SourceType source_type() const;
264

265 266 267
  unsigned int GetHitLineCount() const {
    return static_cast<unsigned int>(line_ticks_.size());
  }
268 269
  bool GetLineTicks(v8::CpuProfileNode::LineTick* entries,
                    unsigned int length) const;
270
  void CollectDeoptInfo(CodeEntry* entry);
271 272 273
  const std::vector<CpuProfileDeoptInfo>& deopt_infos() const {
    return deopt_infos_;
  }
274
  Isolate* isolate() const;
275 276 277

  void Print(int indent);

278
 private:
279 280 281 282 283
  struct Equals {
    bool operator()(CodeEntryAndLineNumber lhs,
                    CodeEntryAndLineNumber rhs) const {
      return lhs.code_entry->IsSameFunctionAs(rhs.code_entry) &&
             lhs.line_number == rhs.line_number;
284 285
    }
  };
286 287
  struct Hasher {
    std::size_t operator()(CodeEntryAndLineNumber pair) const {
288
      return pair.code_entry->GetHash() ^ ComputeUnseededHash(pair.line_number);
289
    }
290
  };
291

292
  ProfileTree* tree_;
293 294
  CodeEntry* entry_;
  unsigned self_ticks_;
295
  std::unordered_map<CodeEntryAndLineNumber, ProfileNode*, Hasher, Equals>
296
      children_;
297
  int line_number_;
298
  std::vector<ProfileNode*> children_list_;
299
  ProfileNode* parent_;
300
  unsigned id_;
301 302
  // maps line number --> number of ticks
  std::unordered_map<int, int> line_ticks_;
303

304
  std::vector<CpuProfileDeoptInfo> deopt_infos_;
305

306 307 308
  DISALLOW_COPY_AND_ASSIGN(ProfileNode);
};

309
class V8_EXPORT_PRIVATE ProfileTree {
310
 public:
311
  explicit ProfileTree(Isolate* isolate);
312 313
  ~ProfileTree();

314
  using ProfilingMode = v8::CpuProfilingMode;
315

316
  ProfileNode* AddPathFromEnd(
317
      const std::vector<CodeEntry*>& path,
318 319
      int src_line = v8::CpuProfileNode::kNoLineNumberInfo,
      bool update_stats = true);
320 321 322 323 324
  ProfileNode* AddPathFromEnd(
      const ProfileStackTrace& path,
      int src_line = v8::CpuProfileNode::kNoLineNumberInfo,
      bool update_stats = true,
      ProfilingMode mode = ProfilingMode::kLeafNodeLineNumbers);
325
  ProfileNode* root() const { return root_; }
326
  unsigned next_node_id() { return next_node_id_++; }
327
  unsigned GetFunctionId(const ProfileNode* node);
328

329 330 331 332
  void Print() {
    root_->Print(0);
  }

333 334
  Isolate* isolate() const { return isolate_; }

335 336 337 338 339 340
  void EnqueueNode(const ProfileNode* node) { pending_nodes_.push_back(node); }
  size_t pending_nodes_count() const { return pending_nodes_.size(); }
  std::vector<const ProfileNode*> TakePendingNodes() {
    return std::move(pending_nodes_);
  }

341 342
 private:
  template <typename Callback>
343
  void TraverseDepthFirst(Callback* callback);
344

345 346
  std::vector<const ProfileNode*> pending_nodes_;

347
  unsigned next_node_id_;
348
  ProfileNode* root_;
349
  Isolate* isolate_;
350

351
  unsigned next_function_id_;
352
  std::unordered_map<CodeEntry*, unsigned> function_ids_;
353

354 355 356
  DISALLOW_COPY_AND_ASSIGN(ProfileTree);
};

357
class CpuProfiler;
358

359
class CpuProfile {
360
 public:
361 362 363 364 365 366
  struct SampleInfo {
    ProfileNode* node;
    base::TimeTicks timestamp;
    int line;
  };

367 368
  V8_EXPORT_PRIVATE CpuProfile(CpuProfiler* profiler, const char* title,
                               CpuProfilingOptions options);
369

370 371 372
  // Checks whether or not the given TickSample should be (sub)sampled, given
  // the sampling interval of the profiler that recorded it (in microseconds).
  V8_EXPORT_PRIVATE bool CheckSubsample(base::TimeDelta sampling_interval);
373
  // Add pc -> ... -> main() call path to the profile.
374
  void AddPath(base::TimeTicks timestamp, const ProfileStackTrace& path,
375 376
               int src_line, bool update_stats,
               base::TimeDelta sampling_interval);
377
  void FinishProfile();
378

379 380
  const char* title() const { return title_; }
  const ProfileTree* top_down() const { return &top_down_; }
381

382
  int samples_count() const { return static_cast<int>(samples_.size()); }
383
  const SampleInfo& sample(int index) const { return samples_[index]; }
384

385 386 387 388
  int64_t sampling_interval_us() const {
    return options_.sampling_interval_us();
  }

389 390
  base::TimeTicks start_time() const { return start_time_; }
  base::TimeTicks end_time() const { return end_time_; }
391
  CpuProfiler* cpu_profiler() const { return profiler_; }
392

393 394
  void UpdateTicksScale();

395
  V8_EXPORT_PRIVATE void Print();
396 397

 private:
398 399
  void StreamPendingTraceEvents();

400
  const char* title_;
401
  const CpuProfilingOptions options_;
402 403
  base::TimeTicks start_time_;
  base::TimeTicks end_time_;
404
  std::deque<SampleInfo> samples_;
405
  ProfileTree top_down_;
406
  CpuProfiler* const profiler_;
407
  size_t streaming_next_sample_;
408
  uint32_t id_;
409 410 411
  // Number of microseconds worth of profiler ticks that should elapse before
  // the next sample is recorded.
  base::TimeDelta next_sample_delta_;
412 413

  static std::atomic<uint32_t> last_id_;
414 415 416 417

  DISALLOW_COPY_AND_ASSIGN(CpuProfile);
};

418
class V8_EXPORT_PRIVATE CodeMap {
419
 public:
420
  CodeMap();
421
  ~CodeMap();
422

423 424
  void AddCode(Address addr, CodeEntry* entry, unsigned size);
  void MoveCode(Address from, Address to);
yurys's avatar
yurys committed
425
  CodeEntry* FindEntry(Address addr);
426 427
  void Print();

428
 private:
429
  struct CodeEntryMapInfo {
430
    unsigned index;
431 432 433
    unsigned size;
  };

434 435 436 437 438 439 440
  union CodeEntrySlotInfo {
    CodeEntry* entry;
    unsigned next_free_slot;
  };

  static constexpr unsigned kNoFreeSlot = std::numeric_limits<unsigned>::max();

441
  void ClearCodesInRange(Address start, Address end);
442 443 444 445
  unsigned AddCodeEntry(Address start, CodeEntry*);
  void DeleteCodeEntry(unsigned index);

  CodeEntry* entry(unsigned index) { return code_entries_[index].entry; }
446

447 448 449
  std::deque<CodeEntrySlotInfo> code_entries_;
  std::map<Address, CodeEntryMapInfo> code_map_;
  unsigned free_list_head_ = kNoFreeSlot;
450 451 452 453

  DISALLOW_COPY_AND_ASSIGN(CodeMap);
};

454
class V8_EXPORT_PRIVATE CpuProfilesCollection {
455
 public:
456
  explicit CpuProfilesCollection(Isolate* isolate);
457

458
  void set_cpu_profiler(CpuProfiler* profiler) { profiler_ = profiler; }
459
  bool StartProfiling(const char* title, CpuProfilingOptions options = {});
460

461
  CpuProfile* StopProfiling(const char* title);
462 463 464
  std::vector<std::unique_ptr<CpuProfile>>* profiles() {
    return &finished_profiles_;
  }
465
  const char* GetName(Name name) { return resource_names_.GetName(name); }
466
  bool IsLastProfile(const char* title);
467
  void RemoveProfile(CpuProfile* profile);
468

469 470 471 472 473
  // Finds a common sampling interval dividing each CpuProfile's interval,
  // rounded up to the nearest multiple of the CpuProfiler's sampling interval.
  // Returns 0 if no profiles are attached.
  base::TimeDelta GetCommonSamplingInterval() const;

474
  // Called from profile generator thread.
475
  void AddPathToCurrentProfiles(base::TimeTicks timestamp,
476
                                const ProfileStackTrace& path, int src_line,
477 478
                                bool update_stats,
                                base::TimeDelta sampling_interval);
479

480 481 482
  // Limits the number of profiles that can be simultaneously collected.
  static const int kMaxSimultaneousProfiles = 100;

483
 private:
lpy's avatar
lpy committed
484
  StringsStorage resource_names_;
485
  std::vector<std::unique_ptr<CpuProfile>> finished_profiles_;
486
  CpuProfiler* profiler_;
487

488
  // Accessed by VM thread and profile generator thread.
489
  std::vector<std::unique_ptr<CpuProfile>> current_profiles_;
490
  base::Semaphore current_profiles_semaphore_;
491 492 493 494

  DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection);
};

495
class V8_EXPORT_PRIVATE ProfileGenerator {
496
 public:
497
  explicit ProfileGenerator(CpuProfilesCollection* profiles);
498 499 500

  void RecordTickSample(const TickSample& sample);

501
  CodeMap* code_map() { return &code_map_; }
502 503

 private:
504
  CodeEntry* FindEntry(Address address);
505
  CodeEntry* EntryForVMState(StateTag tag);
506

507
  CpuProfilesCollection* profiles_;
508 509 510 511 512
  CodeMap code_map_;

  DISALLOW_COPY_AND_ASSIGN(ProfileGenerator);
};

513 514
}  // namespace internal
}  // namespace v8
515

516
#endif  // V8_PROFILER_PROFILE_GENERATOR_H_