optimized-compilation-info.h 10.2 KB
Newer Older
1 2 3 4
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5 6
#ifndef V8_OPTIMIZED_COMPILATION_INFO_H_
#define V8_OPTIMIZED_COMPILATION_INFO_H_
7 8 9

#include <memory>

Marja Hölttä's avatar
Marja Hölttä committed
10
#include "src/bailout-reason.h"
11
#include "src/code-reference.h"
12
#include "src/feedback-vector.h"
13
#include "src/frames.h"
14
#include "src/globals.h"
15 16 17 18 19 20 21 22 23
#include "src/handles.h"
#include "src/objects.h"
#include "src/source-position-table.h"
#include "src/utils.h"
#include "src/vector.h"

namespace v8 {
namespace internal {

24
class CoverageInfo;
25 26 27 28
class DeclarationScope;
class DeferredHandles;
class FunctionLiteral;
class Isolate;
29
class JavaScriptFrame;
30
class ParseInfo;
31
class SourceRangeMap;
32 33
class Zone;

34 35 36 37
// OptimizedCompilationInfo encapsulates the information needed to compile
// optimized code for a given function, and the results of the optimized
// compilation.
class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
38 39 40 41
 public:
  // Various configuration flags for a compilation, as well as some properties
  // of the compiled code produced by a compilation.
  enum Flag {
42 43 44
    kAccessorInliningEnabled = 1 << 0,
    kFunctionContextSpecializing = 1 << 1,
    kInliningEnabled = 1 << 2,
45 46 47 48 49 50 51 52 53 54 55
    kDisableFutureOptimization = 1 << 3,
    kSplittingEnabled = 1 << 4,
    kSourcePositionsEnabled = 1 << 5,
    kBailoutOnUninitialized = 1 << 6,
    kLoopPeelingEnabled = 1 << 7,
    kUntrustedCodeMitigations = 1 << 8,
    kSwitchJumpTableEnabled = 1 << 9,
    kCalledWithCodeStartRegister = 1 << 10,
    kPoisonRegisterArguments = 1 << 11,
    kAllocationFoldingEnabled = 1 << 12,
    kAnalyzeEnvironmentLiveness = 1 << 13,
56 57 58
    kTraceTurboJson = 1 << 14,
    kTraceTurboGraph = 1 << 15,
    kTraceTurboScheduled = 1 << 16,
59 60
  };

61
  // Construct a compilation info for optimized compilation.
62 63 64
  OptimizedCompilationInfo(Zone* zone, Isolate* isolate,
                           Handle<SharedFunctionInfo> shared,
                           Handle<JSFunction> closure);
65
  // Construct a compilation info for stub compilation, Wasm, and testing.
66 67
  OptimizedCompilationInfo(Vector<const char> debug_name, Zone* zone,
                           Code::Kind code_kind);
68

69
  ~OptimizedCompilationInfo();
70 71

  Zone* zone() { return zone_; }
72
  bool is_osr() const { return !osr_offset_.IsNone(); }
73 74
  Handle<SharedFunctionInfo> shared_info() const { return shared_info_; }
  bool has_shared_info() const { return !shared_info().is_null(); }
75
  Handle<JSFunction> closure() const { return closure_; }
76 77 78 79 80
  Handle<Code> code() const { return code_.as_js_code(); }

  wasm::WasmCode* wasm_code() const {
    return const_cast<wasm::WasmCode*>(code_.as_wasm_code());
  }
81 82 83 84 85
  AbstractCode::Kind abstract_code_kind() const { return code_kind_; }
  Code::Kind code_kind() const {
    DCHECK(code_kind_ < static_cast<AbstractCode::Kind>(Code::NUMBER_OF_KINDS));
    return static_cast<Code::Kind>(code_kind_);
  }
86 87
  uint32_t stub_key() const { return stub_key_; }
  void set_stub_key(uint32_t stub_key) { stub_key_ = stub_key; }
88 89
  int32_t builtin_index() const { return builtin_index_; }
  void set_builtin_index(int32_t index) { builtin_index_ = index; }
90
  BailoutId osr_offset() const { return osr_offset_; }
91
  JavaScriptFrame* osr_frame() const { return osr_frame_; }
92

93
  // Flags used by optimized compilation.
94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114

  void MarkAsFunctionContextSpecializing() {
    SetFlag(kFunctionContextSpecializing);
  }
  bool is_function_context_specializing() const {
    return GetFlag(kFunctionContextSpecializing);
  }

  void MarkAsAccessorInliningEnabled() { SetFlag(kAccessorInliningEnabled); }
  bool is_accessor_inlining_enabled() const {
    return GetFlag(kAccessorInliningEnabled);
  }

  void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
  bool is_source_positions_enabled() const {
    return GetFlag(kSourcePositionsEnabled);
  }

  void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
  bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }

115 116 117 118 119 120
  void SetPoisoningMitigationLevel(PoisoningMitigationLevel poisoning_level) {
    poisoning_level_ = poisoning_level;
  }
  PoisoningMitigationLevel GetPoisoningMitigationLevel() const {
    return poisoning_level_;
  }
121

122 123 124 125 126 127 128 129
  void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
  bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }

  void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
  bool is_bailout_on_uninitialized() const {
    return GetFlag(kBailoutOnUninitialized);
  }

130 131 132
  void MarkAsLoopPeelingEnabled() { SetFlag(kLoopPeelingEnabled); }
  bool is_loop_peeling_enabled() const { return GetFlag(kLoopPeelingEnabled); }

133 134 135 136
  bool has_untrusted_code_mitigations() const {
    return GetFlag(kUntrustedCodeMitigations);
  }

137 138 139 140
  bool switch_jump_table_enabled() const {
    return GetFlag(kSwitchJumpTableEnabled);
  }

141 142
  bool called_with_code_start_register() const {
    bool enabled = GetFlag(kCalledWithCodeStartRegister);
143 144 145 146 147 148 149 150 151
    return enabled;
  }

  void MarkAsPoisoningRegisterArguments() {
    DCHECK(has_untrusted_code_mitigations());
    SetFlag(kPoisonRegisterArguments);
  }
  bool is_poisoning_register_arguments() const {
    bool enabled = GetFlag(kPoisonRegisterArguments);
152
    DCHECK_IMPLIES(enabled, has_untrusted_code_mitigations());
153
    DCHECK_IMPLIES(enabled, called_with_code_start_register());
154 155 156
    return enabled;
  }

157 158 159 160 161
  void MarkAsAllocationFoldingEnabled() { SetFlag(kAllocationFoldingEnabled); }
  bool is_allocation_folding_enabled() const {
    return GetFlag(kAllocationFoldingEnabled);
  }

162 163 164 165 166 167 168
  void MarkAsAnalyzeEnvironmentLiveness() {
    SetFlag(kAnalyzeEnvironmentLiveness);
  }
  bool is_analyze_environment_liveness() const {
    return GetFlag(kAnalyzeEnvironmentLiveness);
  }

169 170 171 172 173 174 175 176
  bool trace_turbo_json_enabled() const { return GetFlag(kTraceTurboJson); }

  bool trace_turbo_graph_enabled() const { return GetFlag(kTraceTurboGraph); }

  bool trace_turbo_scheduled_enabled() const {
    return GetFlag(kTraceTurboScheduled);
  }

177
  // Code getters and setters.
178

179 180 181 182
  template <typename T>
  void SetCode(T code) {
    code_ = CodeReference(code);
  }
183

184 185 186
  bool has_context() const;
  Context* context() const;

187 188 189 190 191 192 193
  bool has_native_context() const;
  Context* native_context() const;

  bool has_global_object() const;
  JSGlobalObject* global_object() const;

  // Accessors for the different compilation modes.
194 195 196 197 198 199 200 201
  bool IsOptimizing() const {
    return abstract_code_kind() == AbstractCode::OPTIMIZED_FUNCTION;
  }
  bool IsWasm() const {
    return abstract_code_kind() == AbstractCode::WASM_FUNCTION;
  }
  bool IsStub() const {
    return abstract_code_kind() != AbstractCode::OPTIMIZED_FUNCTION &&
202
           abstract_code_kind() != AbstractCode::WASM_FUNCTION;
203
  }
204
  void SetOptimizingForOsr(BailoutId osr_offset, JavaScriptFrame* osr_frame) {
205
    DCHECK(IsOptimizing());
206
    osr_offset_ = osr_offset;
207 208 209
    osr_frame_ = osr_frame;
  }

210 211 212 213
  void set_deferred_handles(std::shared_ptr<DeferredHandles> deferred_handles);
  void set_deferred_handles(DeferredHandles* deferred_handles);
  std::shared_ptr<DeferredHandles> deferred_handles() {
    return deferred_handles_;
214 215
  }

216
  void ReopenHandlesInNewHandleScope(Isolate* isolate);
217 218

  void AbortOptimization(BailoutReason reason) {
219 220
    DCHECK_NE(reason, BailoutReason::kNoReason);
    if (bailout_reason_ == BailoutReason::kNoReason) bailout_reason_ = reason;
221 222 223 224
    SetFlag(kDisableFutureOptimization);
  }

  void RetryOptimization(BailoutReason reason) {
225
    DCHECK_NE(reason, BailoutReason::kNoReason);
226 227 228 229 230 231
    if (GetFlag(kDisableFutureOptimization)) return;
    bailout_reason_ = reason;
  }

  BailoutReason bailout_reason() const { return bailout_reason_; }

232 233 234 235
  int optimization_id() const {
    DCHECK(IsOptimizing());
    return optimization_id_;
  }
236 237 238 239

  struct InlinedFunctionHolder {
    Handle<SharedFunctionInfo> shared_info;

240 241
    InliningPosition position;

242
    InlinedFunctionHolder(Handle<SharedFunctionInfo> inlined_shared_info,
243
                          SourcePosition pos)
244
        : shared_info(inlined_shared_info) {
245 246
      position.position = pos;
      // initialized when generating the deoptimization literals
247
      position.inlined_function_id = DeoptimizationData::kNotInlinedIndex;
248 249 250 251 252
    }

    void RegisterInlinedFunctionId(size_t inlined_function_id) {
      position.inlined_function_id = static_cast<int>(inlined_function_id);
    }
253 254 255
  };

  typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
256
  InlinedFunctionList& inlined_functions() { return inlined_functions_; }
257

258 259 260
  // Returns the inlining id for source position tracking.
  int AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function,
                         SourcePosition pos);
261 262 263 264 265

  std::unique_ptr<char[]> GetDebugName() const;

  StackFrame::Type GetOutputStackFrameType() const;

266 267 268 269 270 271 272 273
  const char* trace_turbo_filename() const {
    return trace_turbo_filename_.get();
  }

  void set_trace_turbo_filename(std::unique_ptr<char[]> filename) {
    trace_turbo_filename_ = std::move(filename);
  }

274
 private:
275 276
  OptimizedCompilationInfo(Vector<const char> debug_name,
                           AbstractCode::Kind code_kind, Zone* zone);
277 278 279 280

  void SetFlag(Flag flag) { flags_ |= flag; }
  bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }

281 282
  void SetTracingFlags(bool passes_filter);

283
  // Compilation flags.
284
  unsigned flags_;
285 286
  PoisoningMitigationLevel poisoning_level_ =
      PoisoningMitigationLevel::kDontPoison;
287

288
  AbstractCode::Kind code_kind_;
289
  uint32_t stub_key_;
290
  int32_t builtin_index_;
291

292 293
  Handle<SharedFunctionInfo> shared_info_;

294 295 296
  Handle<JSFunction> closure_;

  // The compiled code.
297
  CodeReference code_;
298

299
  // Entry point when compiling for OSR, {BailoutId::None} otherwise.
300
  BailoutId osr_offset_;
301 302

  // The zone from which the compilation pipeline working on this
303
  // OptimizedCompilationInfo allocates.
304 305
  Zone* zone_;

306
  std::shared_ptr<DeferredHandles> deferred_handles_;
307 308 309 310 311 312 313 314 315 316 317

  BailoutReason bailout_reason_;

  InlinedFunctionList inlined_functions_;

  int optimization_id_;

  // The current OSR frame for specialization or {nullptr}.
  JavaScriptFrame* osr_frame_ = nullptr;

  Vector<const char> debug_name_;
318
  std::unique_ptr<char[]> trace_turbo_filename_;
319

320
  DISALLOW_COPY_AND_ASSIGN(OptimizedCompilationInfo);
321 322 323 324 325
};

}  // namespace internal
}  // namespace v8

326
#endif  // V8_OPTIMIZED_COMPILATION_INFO_H_