compiler.h 22.4 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4 5 6 7

#ifndef V8_COMPILER_H_
#define V8_COMPILER_H_

8 9
#include "src/allocation.h"
#include "src/ast.h"
10
#include "src/bailout-reason.h"
11
#include "src/compilation-dependencies.h"
12
#include "src/signature.h"
13
#include "src/zone.h"
14

15 16
namespace v8 {
namespace internal {
17

18
class AstValueFactory;
19
class HydrogenCodeStub;
20
class JavaScriptFrame;
21 22
class ParseInfo;
class ScriptData;
23

24

25 26 27 28 29 30 31 32 33
// This class encapsulates encoding and decoding of sources positions from
// which hydrogen values originated.
// When FLAG_track_hydrogen_positions is set this object encodes the
// identifier of the inlining and absolute offset from the start of the
// inlined function.
// When the flag is not set we simply track absolute offset from the
// script start.
class SourcePosition {
 public:
34 35 36
  static SourcePosition Unknown() {
    return SourcePosition::FromRaw(kNoPosition);
  }
37

38
  bool IsUnknown() const { return value_ == kNoPosition; }
39

40 41
  uint32_t position() const { return PositionField::decode(value_); }
  void set_position(uint32_t position) {
42
    if (FLAG_hydrogen_track_positions) {
43
      value_ = static_cast<uint32_t>(PositionField::update(value_, position));
44 45 46 47 48
    } else {
      value_ = position;
    }
  }

49 50
  uint32_t inlining_id() const { return InliningIdField::decode(value_); }
  void set_inlining_id(uint32_t inlining_id) {
51
    if (FLAG_hydrogen_track_positions) {
52 53
      value_ =
          static_cast<uint32_t>(InliningIdField::update(value_, inlining_id));
54 55 56
    }
  }

57
  uint32_t raw() const { return value_; }
58 59

 private:
60 61 62
  static const uint32_t kNoPosition =
      static_cast<uint32_t>(RelocInfo::kNoPosition);
  typedef BitField<uint32_t, 0, 9> InliningIdField;
63 64

  // Offset from the start of the inlined function.
65
  typedef BitField<uint32_t, 9, 23> PositionField;
66 67

  friend class HPositionInfo;
68 69 70 71 72 73 74
  friend class Deoptimizer;

  static SourcePosition FromRaw(uint32_t raw_position) {
    SourcePosition position;
    position.value_ = raw_position;
    return position;
  }
75 76 77 78

  // If FLAG_hydrogen_track_positions is set contains bitfields InliningIdField
  // and PositionField.
  // Otherwise contains absolute offset from the script start.
79
  uint32_t value_;
80 81 82 83 84 85
};


std::ostream& operator<<(std::ostream& os, const SourcePosition& p);


86 87 88 89 90 91 92 93 94 95 96
struct InlinedFunctionInfo {
  InlinedFunctionInfo(int parent_id, SourcePosition inline_position,
                      int script_id, int start_position)
      : parent_id(parent_id),
        inline_position(inline_position),
        script_id(script_id),
        start_position(start_position) {}
  int parent_id;
  SourcePosition inline_position;
  int script_id;
  int start_position;
97
  std::vector<size_t> deopt_pc_offsets;
98 99

  static const int kNoParentId = -1;
100 101 102
};


103 104
// CompilationInfo encapsulates some information known at compile time.  It
// is constructed based on the resources available at compile-time.
105
class CompilationInfo {
106
 public:
107 108 109
  // Various configuration flags for a compilation, as well as some properties
  // of the compiled code produced by a compilation.
  enum Flag {
110 111 112 113
    kDeferredCalling = 1 << 0,
    kNonDeferredCalling = 1 << 1,
    kSavesCallerDoubles = 1 << 2,
    kRequiresFrame = 1 << 3,
114 115 116
    kMustNotHaveEagerFrame = 1 << 4,
    kDeoptimizationSupport = 1 << 5,
    kDebug = 1 << 6,
117
    kSerializing = 1 << 7,
118
    kFunctionContextSpecializing = 1 << 8,
119
    kFrameSpecializing = 1 << 9,
120 121 122 123 124 125 126 127
    kNativeContextSpecializing = 1 << 10,
    kInliningEnabled = 1 << 11,
    kTypingEnabled = 1 << 12,
    kDisableFutureOptimization = 1 << 13,
    kSplittingEnabled = 1 << 14,
    kDeoptimizationEnabled = 1 << 16,
    kSourcePositionsEnabled = 1 << 17,
    kFirstCompile = 1 << 18,
128 129
  };

130
  explicit CompilationInfo(ParseInfo* parse_info);
131
  CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone);
132
  CompilationInfo(const char* debug_name, Isolate* isolate, Zone* zone);
133
  virtual ~CompilationInfo();
134

135 136 137 138 139 140 141 142 143 144 145
  ParseInfo* parse_info() const { return parse_info_; }

  // -----------------------------------------------------------
  // TODO(titzer): inline and delete accessors of ParseInfo
  // -----------------------------------------------------------
  Handle<Script> script() const;
  bool is_eval() const;
  bool is_native() const;
  bool is_module() const;
  LanguageMode language_mode() const;
  Handle<JSFunction> closure() const;
146
  FunctionLiteral* literal() const;
147 148 149 150
  Scope* scope() const;
  Handle<Context> context() const;
  Handle<SharedFunctionInfo> shared_info() const;
  bool has_shared_info() const;
151
  bool has_context() const;
152 153
  bool has_literal() const;
  bool has_scope() const;
154 155
  // -----------------------------------------------------------

156
  Isolate* isolate() const {
157 158
    return isolate_;
  }
159 160
  Zone* zone() { return zone_; }
  bool is_osr() const { return !osr_ast_id_.IsNone(); }
161
  Handle<Code> code() const { return code_; }
162
  CodeStub* code_stub() const { return code_stub_; }
163
  BailoutId osr_ast_id() const { return osr_ast_id_; }
164
  Handle<Code> unoptimized_code() const { return unoptimized_code_; }
165
  int opt_count() const { return opt_count_; }
166
  int num_parameters() const;
167 168
  int num_parameters_including_this() const;
  bool is_this_defined() const;
169
  int num_heap_slots() const;
170

171
  void set_parameter_count(int parameter_count) {
172
    DCHECK(IsStub());
173 174
    parameter_count_ = parameter_count;
  }
175

176 177 178
  bool has_bytecode_array() const { return !bytecode_array_.is_null(); }
  Handle<BytecodeArray> bytecode_array() const { return bytecode_array_; }

179 180
  bool is_tracking_positions() const { return track_positions_; }

181
  bool is_calling() const {
182
    return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
183 184
  }

185
  void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
186

187
  bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
188

189
  void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
190

191
  bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
192

193
  void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
194

195
  bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
196

197
  void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
198

199
  bool requires_frame() const { return GetFlag(kRequiresFrame); }
200

201
  void MarkMustNotHaveEagerFrame() { SetFlag(kMustNotHaveEagerFrame); }
202 203

  bool GetMustNotHaveEagerFrame() const {
204
    return GetFlag(kMustNotHaveEagerFrame);
205 206
  }

207 208
  // Compiles marked as debug produce unoptimized code with debug break slots.
  // Inner functions that cannot be compiled w/o context are compiled eagerly.
209 210 211 212 213
  // Always include deoptimization support to avoid having to recompile again.
  void MarkAsDebug() {
    SetFlag(kDebug);
    SetFlag(kDeoptimizationSupport);
  }
214

215
  bool is_debug() const { return GetFlag(kDebug); }
216

217
  void PrepareForSerializing() { SetFlag(kSerializing); }
218

219
  bool will_serialize() const { return GetFlag(kSerializing); }
220

221 222 223
  void MarkAsFunctionContextSpecializing() {
    SetFlag(kFunctionContextSpecializing);
  }
224

225 226 227
  bool is_function_context_specializing() const {
    return GetFlag(kFunctionContextSpecializing);
  }
228

229 230 231 232
  void MarkAsFrameSpecializing() { SetFlag(kFrameSpecializing); }

  bool is_frame_specializing() const { return GetFlag(kFrameSpecializing); }

233 234 235 236 237 238 239 240
  void MarkAsNativeContextSpecializing() {
    SetFlag(kNativeContextSpecializing);
  }

  bool is_native_context_specializing() const {
    return GetFlag(kNativeContextSpecializing);
  }

241 242 243 244 245 246
  void MarkAsDeoptimizationEnabled() { SetFlag(kDeoptimizationEnabled); }

  bool is_deoptimization_enabled() const {
    return GetFlag(kDeoptimizationEnabled);
  }

247 248 249 250 251 252
  void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }

  bool is_source_positions_enabled() const {
    return GetFlag(kSourcePositionsEnabled);
  }

253 254 255 256
  void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }

  bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }

257 258 259 260
  void MarkAsTypingEnabled() { SetFlag(kTypingEnabled); }

  bool is_typing_enabled() const { return GetFlag(kTypingEnabled); }

261 262 263 264
  void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }

  bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }

265 266
  void MarkAsFirstCompile() { SetFlag(kFirstCompile); }

267 268
  void MarkAsCompiled() { SetFlag(kFirstCompile, false); }

269 270
  bool is_first_compile() const { return GetFlag(kFirstCompile); }

271
  bool IsCodePreAgingActive() const {
272 273
    return FLAG_optimize_for_size && FLAG_age_code && !will_serialize() &&
           !is_debug();
274 275
  }

276
  void EnsureFeedbackVector();
277
  Handle<TypeFeedbackVector> feedback_vector() const {
278 279
    return feedback_vector_;
  }
280
  void SetCode(Handle<Code> code) { code_ = code; }
281

282 283 284 285
  void SetBytecodeArray(Handle<BytecodeArray> bytecode_array) {
    bytecode_array_ = bytecode_array;
  }

286 287 288 289 290
  bool ShouldTrapOnDeopt() const {
    return (FLAG_trap_on_deopt && IsOptimizing()) ||
        (FLAG_trap_on_stub_deopt && IsStub());
  }

291
  bool has_global_object() const {
292 293
    return !closure().is_null() &&
        (closure()->context()->global_object() != NULL);
294 295
  }

296
  JSGlobalObject* global_object() const {
297
    return has_global_object() ? closure()->context()->global_object() : NULL;
298 299 300 301
  }

  // Accessors for the different compilation modes.
  bool IsOptimizing() const { return mode_ == OPTIMIZE; }
302
  bool IsStub() const { return mode_ == STUB; }
303
  void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
304
    DCHECK(has_shared_info());
305 306
    SetMode(OPTIMIZE);
    osr_ast_id_ = osr_ast_id;
307
    unoptimized_code_ = unoptimized;
308
    optimization_id_ = isolate()->NextOptimizationId();
309
    set_output_code_kind(Code::OPTIMIZED_FUNCTION);
310 311
  }

312 313 314 315 316
  void SetFunctionType(Type::FunctionType* function_type) {
    function_type_ = function_type;
  }
  Type::FunctionType* function_type() const { return function_type_; }

317
  void SetStub(CodeStub* code_stub);
318

319
  // Deoptimization support.
320
  bool HasDeoptimizationSupport() const {
321
    return GetFlag(kDeoptimizationSupport);
322
  }
323
  void EnableDeoptimizationSupport() {
324
    DCHECK_EQ(BASE, mode_);
325
    SetFlag(kDeoptimizationSupport);
326
  }
327
  bool ShouldEnsureSpaceForLazyDeopt() { return !IsStub(); }
328

329
  bool ExpectsJSReceiverAsReceiver();
330

331 332 333
  // Determines whether or not to insert a self-optimization header.
  bool ShouldSelfOptimize();

334
  void set_deferred_handles(DeferredHandles* deferred_handles) {
335
    DCHECK(deferred_handles_ == NULL);
336 337 338
    deferred_handles_ = deferred_handles;
  }

339 340
  void ReopenHandlesInNewHandleScope() {
    unoptimized_code_ = Handle<Code>(*unoptimized_code_);
341 342
  }

343
  void AbortOptimization(BailoutReason reason) {
344 345
    DCHECK(reason != kNoReason);
    if (bailout_reason_ == kNoReason) bailout_reason_ = reason;
346 347 348 349
    SetFlag(kDisableFutureOptimization);
  }

  void RetryOptimization(BailoutReason reason) {
350 351 352
    DCHECK(reason != kNoReason);
    if (GetFlag(kDisableFutureOptimization)) return;
    bailout_reason_ = reason;
353 354
  }

355
  BailoutReason bailout_reason() const { return bailout_reason_; }
356

357
  int prologue_offset() const {
358
    DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
359 360 361 362
    return prologue_offset_;
  }

  void set_prologue_offset(int prologue_offset) {
363
    DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
364 365 366
    prologue_offset_ = prologue_offset;
  }

367 368
  int start_position_for(uint32_t inlining_id) {
    return inlined_function_infos_.at(inlining_id).start_position;
369
  }
370 371 372
  const std::vector<InlinedFunctionInfo>& inlined_function_infos() {
    return inlined_function_infos_;
  }
373 374

  void LogDeoptCallPosition(int pc_offset, int inlining_id);
375
  int TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
376
                           SourcePosition position, int pareint_id);
377

378
  CompilationDependencies* dependencies() { return &dependencies_; }
379

380
  bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
381
    return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure());
382 383
  }

384 385
  int optimization_id() const { return optimization_id_; }

386 387 388 389 390
  int osr_expr_stack_height() { return osr_expr_stack_height_; }
  void set_osr_expr_stack_height(int height) {
    DCHECK(height >= 0);
    osr_expr_stack_height_ = height;
  }
391 392
  JavaScriptFrame* osr_frame() const { return osr_frame_; }
  void set_osr_frame(JavaScriptFrame* osr_frame) { osr_frame_ = osr_frame; }
393

394 395 396 397
#if DEBUG
  void PrintAstForTesting();
#endif

398
  bool has_simple_parameters();
399

400 401 402 403 404 405 406 407 408 409 410 411 412 413 414
  struct InlinedFunctionHolder {
    Handle<SharedFunctionInfo> shared_info;

    // Root that holds the unoptimized code of the inlined function alive
    // (and out of reach of code flushing) until we finish compilation.
    // Do not remove.
    Handle<Code> inlined_code_object_root;

    explicit InlinedFunctionHolder(
        Handle<SharedFunctionInfo> inlined_shared_info)
        : shared_info(inlined_shared_info),
          inlined_code_object_root(inlined_shared_info->code()) {}
  };

  typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
415 416 417
  InlinedFunctionList const& inlined_functions() const {
    return inlined_functions_;
  }
418

419
  void AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function) {
420
    inlined_functions_.push_back(InlinedFunctionHolder(inlined_function));
421 422
  }

423 424
  base::SmartArrayPointer<char> GetDebugName() const;

425 426 427 428
  Code::Kind output_code_kind() const { return output_code_kind_; }

  void set_output_code_kind(Code::Kind kind) { output_code_kind_ = kind; }

429
 protected:
430
  ParseInfo* parse_info_;
431

432 433 434 435 436
  void DisableFutureOptimization() {
    if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
      shared_info()->DisableOptimization(bailout_reason());
    }
  }
437

438
 private:
439 440 441 442 443 444
  // Compilation mode.
  // BASE is generated by the full codegen, optionally prepared for bailouts.
  // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
  enum Mode {
    BASE,
    OPTIMIZE,
445
    STUB
446 447
  };

448
  CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub,
449
                  const char* debug_name, Mode mode, Isolate* isolate,
450
                  Zone* zone);
451 452

  Isolate* isolate_;
453 454 455 456 457

  void SetMode(Mode mode) {
    mode_ = mode;
  }

458 459 460 461 462 463 464
  void SetFlag(Flag flag) { flags_ |= flag; }

  void SetFlag(Flag flag, bool value) {
    flags_ = value ? flags_ | flag : flags_ & ~flag;
  }

  bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
465 466 467

  unsigned flags_;

468 469
  Code::Kind output_code_kind_;

470
  // For compiled stubs, the stub object
471
  CodeStub* code_stub_;
472 473
  // The compiled code.
  Handle<Code> code_;
474

475
  // Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
476
  Handle<TypeFeedbackVector> feedback_vector_;
477

478 479
  // Compilation mode flag and whether deoptimization is allowed.
  Mode mode_;
480
  BailoutId osr_ast_id_;
481 482 483
  // The unoptimized code we patched for OSR may not be the shared code
  // afterwards, since we may need to compile it again to include deoptimization
  // data.  Keep track which code we patched.
484
  Handle<Code> unoptimized_code_;
485

486 487 488 489 490
  // Holds the bytecode array generated by the interpreter.
  // TODO(rmcilroy/mstarzinger): Temporary work-around until compiler.cc is
  // refactored to avoid us needing to carry the BytcodeArray around.
  Handle<BytecodeArray> bytecode_array_;

491 492 493 494
  // The zone from which the compilation pipeline working on this
  // CompilationInfo allocates.
  Zone* zone_;

495 496
  DeferredHandles* deferred_handles_;

497 498
  // Dependencies for this compilation, e.g. stable maps.
  CompilationDependencies dependencies_;
499

500
  BailoutReason bailout_reason_;
501

502 503
  int prologue_offset_;

504
  std::vector<InlinedFunctionInfo> inlined_function_infos_;
505
  bool track_positions_;
506

507 508
  InlinedFunctionList inlined_functions_;

509 510 511 512
  // A copy of shared_info()->opt_count() to avoid handle deref
  // during graph optimization.
  int opt_count_;

513 514 515
  // Number of parameters used for compilation of stubs that require arguments.
  int parameter_count_;

516 517
  int optimization_id_;

518 519
  int osr_expr_stack_height_;

520 521 522
  // The current OSR frame for specialization or {nullptr}.
  JavaScriptFrame* osr_frame_ = nullptr;

523 524
  Type::FunctionType* function_type_;

525
  const char* debug_name_;
526

527
  DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
528 529 530
};


531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547
// A wrapper around a CompilationInfo that detaches the Handles from
// the underlying DeferredHandleScope and stores them in info_ on
// destruction.
class CompilationHandleScope BASE_EMBEDDED {
 public:
  explicit CompilationHandleScope(CompilationInfo* info)
      : deferred_(info->isolate()), info_(info) {}
  ~CompilationHandleScope() {
    info_->set_deferred_handles(deferred_.Detach());
  }

 private:
  DeferredHandleScope deferred_;
  CompilationInfo* info_;
};


548
class HGraph;
549
class HOptimizedGraphBuilder;
550 551 552 553 554 555 556 557
class LChunk;

// A helper class that calls the three compilation phases in
// Crankshaft and keeps track of its state.  The three phases
// CreateGraph, OptimizeGraph and GenerateAndInstallCode can either
// fail, bail-out to the full code generator or succeed.  Apart from
// their return value, the status of the phase last run can be checked
// using last_status().
558
class OptimizedCompileJob: public ZoneObject {
559
 public:
560
  explicit OptimizedCompileJob(CompilationInfo* info)
561 562 563 564
      : info_(info),
        graph_builder_(NULL),
        graph_(NULL),
        chunk_(NULL),
565 566
        last_status_(FAILED),
        awaiting_install_(false) { }
567 568 569 570 571 572 573

  enum Status {
    FAILED, BAILED_OUT, SUCCEEDED
  };

  MUST_USE_RESULT Status CreateGraph();
  MUST_USE_RESULT Status OptimizeGraph();
574
  MUST_USE_RESULT Status GenerateCode();
575 576 577

  Status last_status() const { return last_status_; }
  CompilationInfo* info() const { return info_; }
578
  Isolate* isolate() const { return info()->isolate(); }
579

580 581
  Status RetryOptimization(BailoutReason reason) {
    info_->RetryOptimization(reason);
582 583 584
    return SetLastStatus(BAILED_OUT);
  }

585 586
  Status AbortOptimization(BailoutReason reason) {
    info_->AbortOptimization(reason);
587 588 589
    return SetLastStatus(BAILED_OUT);
  }

590
  void WaitForInstall() {
591
    DCHECK(info_->is_osr());
592 593 594 595 596
    awaiting_install_ = true;
  }

  bool IsWaitingForInstall() { return awaiting_install_; }

597 598
 private:
  CompilationInfo* info_;
599
  HOptimizedGraphBuilder* graph_builder_;
600 601
  HGraph* graph_;
  LChunk* chunk_;
602 603 604
  base::TimeDelta time_taken_to_create_graph_;
  base::TimeDelta time_taken_to_optimize_;
  base::TimeDelta time_taken_to_codegen_;
605
  Status last_status_;
606
  bool awaiting_install_;
607 608 609 610 611 612 613 614

  MUST_USE_RESULT Status SetLastStatus(Status status) {
    last_status_ = status;
    return last_status_;
  }
  void RecordOptimizationStats();

  struct Timer {
615
    Timer(OptimizedCompileJob* job, base::TimeDelta* location)
616
        : job_(job), location_(location) {
617
      DCHECK(location_ != NULL);
618 619
      timer_.Start();
    }
620 621

    ~Timer() {
622
      *location_ += timer_.Elapsed();
623 624
    }

625
    OptimizedCompileJob* job_;
626 627
    base::ElapsedTimer timer_;
    base::TimeDelta* location_;
628 629 630 631
  };
};


632 633 634 635 636 637 638
// The V8 compiler
//
// General strategy: Source code is translated into an anonymous function w/o
// parameters which then can be executed. If the source code contains other
// functions, they will be compiled and allocated as part of the compilation
// of the source code.

639 640 641
// Please note this interface returns shared function infos.  This means you
// need to call Factory::NewFunctionFromSharedFunctionInfo before you have a
// real function with a context.
642 643 644

class Compiler : public AllStatic {
 public:
645 646
  MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
      Handle<JSFunction> function);
647 648
  MUST_USE_RESULT static MaybeHandle<Code> GetLazyCode(
      Handle<JSFunction> function);
649 650
  MUST_USE_RESULT static MaybeHandle<Code> GetStubCode(
      Handle<JSFunction> function, CodeStub* stub);
651

652
  static bool Compile(Handle<JSFunction> function, ClearExceptionFlag flag);
653 654
  static bool CompileDebugCode(Handle<JSFunction> function);
  static bool CompileDebugCode(Handle<SharedFunctionInfo> shared);
655
  static void CompileForLiveEdit(Handle<Script> script);
656

657
  // Parser::Parse, then Compiler::Analyze.
658
  static bool ParseAndAnalyze(ParseInfo* info);
659
  // Rewrite, analyze scopes, and renumber.
660
  static bool Analyze(ParseInfo* info);
661 662 663
  // Adds deoptimization support, requires ParseAndAnalyze.
  static bool EnsureDeoptimizationSupport(CompilationInfo* info);

664
  // Compile a String source within a context for eval.
665
  MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval(
666
      Handle<String> source, Handle<SharedFunctionInfo> outer_info,
667
      Handle<Context> context, LanguageMode language_mode,
668 669 670
      ParseRestriction restriction, int line_offset, int column_offset = 0,
      Handle<Object> script_name = Handle<Object>(),
      ScriptOriginOptions options = ScriptOriginOptions());
671

672
  // Compile a String source within a context.
673
  static Handle<SharedFunctionInfo> CompileScript(
674
      Handle<String> source, Handle<Object> script_name, int line_offset,
675
      int column_offset, ScriptOriginOptions resource_options,
676 677 678
      Handle<Object> source_map_url, Handle<Context> context,
      v8::Extension* extension, ScriptData** cached_data,
      ScriptCompiler::CompileOptions compile_options,
679
      NativesFlag is_natives_code, bool is_module);
680

681 682
  static Handle<SharedFunctionInfo> CompileStreamedScript(Handle<Script> script,
                                                          ParseInfo* info,
683 684
                                                          int source_length);

685
  // Create a shared function info object (the code may be lazily compiled).
686 687
  static Handle<SharedFunctionInfo> GetSharedFunctionInfo(
      FunctionLiteral* node, Handle<Script> script, CompilationInfo* outer);
688

689
  enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
690

691 692 693
  // Generate and return optimized code or start a concurrent optimization job.
  // In the latter case, return the InOptimizationQueue builtin.  On failure,
  // return the empty handle.
694
  MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode(
695 696 697
      Handle<JSFunction> function, Handle<Code> current_code,
      ConcurrencyMode mode, BailoutId osr_ast_id = BailoutId::None(),
      JavaScriptFrame* osr_frame = nullptr);
698

699 700 701
  // Generate and return code from previously queued optimization job.
  // On failure, return the empty handle.
  static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
702 703
};

704

705 706
class CompilationPhase BASE_EMBEDDED {
 public:
707
  CompilationPhase(const char* name, CompilationInfo* info);
708 709 710 711 712 713
  ~CompilationPhase();

 protected:
  bool ShouldProduceTraceOutput() const;

  const char* name() const { return name_; }
714 715
  CompilationInfo* info() const { return info_; }
  Isolate* isolate() const { return info()->isolate(); }
716
  Zone* zone() { return &zone_; }
717 718 719

 private:
  const char* name_;
720 721
  CompilationInfo* info_;
  Zone zone_;
722
  size_t info_zone_start_allocation_size_;
723
  base::ElapsedTimer timer_;
724 725 726 727

  DISALLOW_COPY_AND_ASSIGN(CompilationPhase);
};

728 729
}  // namespace internal
}  // namespace v8
730 731

#endif  // V8_COMPILER_H_