pipeline.cc 76.6 KB
Newer Older
1 2 3 4
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5 6 7
#include "src/compiler/pipeline.h"

#include <fstream>  // NOLINT(readability/streams)
8
#include <memory>
9 10
#include <sstream>

11
#include "src/base/adapters.h"
12
#include "src/base/optional.h"
13
#include "src/base/platform/elapsed-timer.h"
14 15
#include "src/compilation-info.h"
#include "src/compiler.h"
16
#include "src/compiler/ast-graph-builder.h"
17
#include "src/compiler/ast-loop-assignment-analyzer.h"
18
#include "src/compiler/basic-block-instrumentor.h"
19
#include "src/compiler/branch-elimination.h"
20
#include "src/compiler/bytecode-graph-builder.h"
21
#include "src/compiler/checkpoint-elimination.h"
22
#include "src/compiler/code-generator.h"
23
#include "src/compiler/common-operator-reducer.h"
24
#include "src/compiler/control-flow-optimizer.h"
25
#include "src/compiler/dead-code-elimination.h"
26
#include "src/compiler/effect-control-linearizer.h"
27
#include "src/compiler/escape-analysis-reducer.h"
28
#include "src/compiler/escape-analysis.h"
29
#include "src/compiler/frame-elider.h"
30
#include "src/compiler/graph-trimmer.h"
31 32
#include "src/compiler/graph-visualizer.h"
#include "src/compiler/instruction-selector.h"
33
#include "src/compiler/instruction.h"
34
#include "src/compiler/js-builtin-reducer.h"
35
#include "src/compiler/js-call-reducer.h"
36
#include "src/compiler/js-context-specialization.h"
37
#include "src/compiler/js-create-lowering.h"
38
#include "src/compiler/js-frame-specialization.h"
39
#include "src/compiler/js-generic-lowering.h"
40
#include "src/compiler/js-inlining-heuristic.h"
41
#include "src/compiler/js-intrinsic-lowering.h"
42
#include "src/compiler/js-native-context-specialization.h"
43
#include "src/compiler/js-typed-lowering.h"
44
#include "src/compiler/jump-threading.h"
45
#include "src/compiler/live-range-separator.h"
46
#include "src/compiler/load-elimination.h"
47 48
#include "src/compiler/loop-analysis.h"
#include "src/compiler/loop-peeling.h"
49
#include "src/compiler/loop-variable-optimizer.h"
50
#include "src/compiler/machine-graph-verifier.h"
51
#include "src/compiler/machine-operator-reducer.h"
52
#include "src/compiler/memory-optimizer.h"
53
#include "src/compiler/move-optimizer.h"
54
#include "src/compiler/osr.h"
55
#include "src/compiler/pipeline-statistics.h"
56
#include "src/compiler/redundancy-elimination.h"
57
#include "src/compiler/register-allocator-verifier.h"
58
#include "src/compiler/register-allocator.h"
59 60
#include "src/compiler/schedule.h"
#include "src/compiler/scheduler.h"
61
#include "src/compiler/select-lowering.h"
62
#include "src/compiler/simplified-lowering.h"
63
#include "src/compiler/simplified-operator-reducer.h"
64
#include "src/compiler/simplified-operator.h"
65
#include "src/compiler/store-store-elimination.h"
66
#include "src/compiler/tail-call-optimization.h"
67
#include "src/compiler/typed-optimization.h"
68
#include "src/compiler/typer.h"
69
#include "src/compiler/value-numbering-reducer.h"
70
#include "src/compiler/verifier.h"
71
#include "src/compiler/zone-stats.h"
72
#include "src/isolate-inl.h"
73
#include "src/ostreams.h"
74
#include "src/parsing/parse-info.h"
75
#include "src/register-configuration.h"
76
#include "src/trap-handler/trap-handler.h"
77
#include "src/utils.h"
78
#include "src/wasm/wasm-module.h"
79 80 81 82 83

namespace v8 {
namespace internal {
namespace compiler {

84
class PipelineData {
85
 public:
86
  // For main entry point.
87
  PipelineData(ZoneStats* zone_stats, CompilationInfo* info,
88
               PipelineStatistics* pipeline_statistics)
89
      : isolate_(info->isolate()),
90
        info_(info),
91
        debug_name_(info_->GetDebugName()),
92
        outer_zone_(info_->zone()),
93
        zone_stats_(zone_stats),
94
        pipeline_statistics_(pipeline_statistics),
95
        graph_zone_scope_(zone_stats_, ZONE_NAME),
96
        graph_zone_(graph_zone_scope_.zone()),
97
        instruction_zone_scope_(zone_stats_, ZONE_NAME),
98
        instruction_zone_(instruction_zone_scope_.zone()),
99
        register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
100
        register_allocation_zone_(register_allocation_zone_scope_.zone()) {
101
    PhaseScope scope(pipeline_statistics, "init pipeline data");
102
    graph_ = new (graph_zone_) Graph(graph_zone_);
103
    source_positions_ = new (graph_zone_) SourcePositionTable(graph_);
104
    simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
105
    machine_ = new (graph_zone_) MachineOperatorBuilder(
106
        graph_zone_, MachineType::PointerRepresentation(),
107 108
        InstructionSelector::SupportedMachineOperatorFlags(),
        InstructionSelector::AlignmentRequirements());
109 110 111
    common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
    javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
    jsgraph_ = new (graph_zone_)
112
        JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
113
    is_asm_ = info->shared_info()->asm_function();
114 115
  }

116
  // For WebAssembly compile entry point.
117
  PipelineData(ZoneStats* zone_stats, CompilationInfo* info, JSGraph* jsgraph,
118
               PipelineStatistics* pipeline_statistics,
119 120 121
               SourcePositionTable* source_positions,
               ZoneVector<trap_handler::ProtectedInstructionData>*
                   protected_instructions)
122 123
      : isolate_(info->isolate()),
        info_(info),
124
        debug_name_(info_->GetDebugName()),
125
        zone_stats_(zone_stats),
126
        pipeline_statistics_(pipeline_statistics),
127
        graph_zone_scope_(zone_stats_, ZONE_NAME),
128
        graph_(jsgraph->graph()),
129
        source_positions_(source_positions),
130 131 132 133
        machine_(jsgraph->machine()),
        common_(jsgraph->common()),
        javascript_(jsgraph->javascript()),
        jsgraph_(jsgraph),
134
        instruction_zone_scope_(zone_stats_, ZONE_NAME),
135
        instruction_zone_(instruction_zone_scope_.zone()),
136
        register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
137
        register_allocation_zone_(register_allocation_zone_scope_.zone()),
138 139 140 141
        protected_instructions_(protected_instructions) {
    is_asm_ =
        info->has_shared_info() ? info->shared_info()->asm_function() : false;
  }
142

143
  // For machine graph testing entry point.
144
  PipelineData(ZoneStats* zone_stats, CompilationInfo* info, Graph* graph,
145
               Schedule* schedule, SourcePositionTable* source_positions)
146 147
      : isolate_(info->isolate()),
        info_(info),
148
        debug_name_(info_->GetDebugName()),
149
        zone_stats_(zone_stats),
150
        graph_zone_scope_(zone_stats_, ZONE_NAME),
151
        graph_(graph),
152
        source_positions_(source_positions),
153
        schedule_(schedule),
154
        instruction_zone_scope_(zone_stats_, ZONE_NAME),
155
        instruction_zone_(instruction_zone_scope_.zone()),
156
        register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
157 158 159
        register_allocation_zone_(register_allocation_zone_scope_.zone()) {
    is_asm_ = false;
  }
160
  // For register allocation testing entry point.
161
  PipelineData(ZoneStats* zone_stats, CompilationInfo* info,
162 163 164
               InstructionSequence* sequence)
      : isolate_(info->isolate()),
        info_(info),
165
        debug_name_(info_->GetDebugName()),
166
        zone_stats_(zone_stats),
167 168
        graph_zone_scope_(zone_stats_, ZONE_NAME),
        instruction_zone_scope_(zone_stats_, ZONE_NAME),
169 170
        instruction_zone_(sequence->zone()),
        sequence_(sequence),
171
        register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
172 173 174 175
        register_allocation_zone_(register_allocation_zone_scope_.zone()) {
    is_asm_ =
        info->has_shared_info() ? info->shared_info()->asm_function() : false;
  }
176

177
  ~PipelineData() {
178 179
    delete code_generator_;  // Must happen before zones are destroyed.
    code_generator_ = nullptr;
180
    DeleteRegisterAllocationZone();
181 182
    DeleteInstructionZone();
    DeleteGraphZone();
183 184
  }

185
  Isolate* isolate() const { return isolate_; }
186
  CompilationInfo* info() const { return info_; }
187
  ZoneStats* zone_stats() const { return zone_stats_; }
188
  PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
189
  OsrHelper* osr_helper() { return &(*osr_helper_); }
190 191
  bool compilation_failed() const { return compilation_failed_; }
  void set_compilation_failed() { compilation_failed_ = true; }
192

193
  bool is_asm() const { return is_asm_; }
194 195 196
  bool verify_graph() const { return verify_graph_; }
  void set_verify_graph(bool value) { verify_graph_ = value; }

197 198 199 200 201
  Handle<Code> code() { return code_; }
  void set_code(Handle<Code> code) {
    DCHECK(code_.is_null());
    code_ = code;
  }
202

203 204
  CodeGenerator* code_generator() const { return code_generator_; }

205 206
  // RawMachineAssembler generally produces graphs which cannot be verified.
  bool MayHaveUnverifiableGraph() const { return outer_zone_ == nullptr; }
207 208 209

  Zone* graph_zone() const { return graph_zone_; }
  Graph* graph() const { return graph_; }
210
  SourcePositionTable* source_positions() const { return source_positions_; }
211 212 213 214
  MachineOperatorBuilder* machine() const { return machine_; }
  CommonOperatorBuilder* common() const { return common_; }
  JSOperatorBuilder* javascript() const { return javascript_; }
  JSGraph* jsgraph() const { return jsgraph_; }
215 216 217 218 219
  Handle<Context> native_context() const {
    return handle(info()->native_context(), isolate());
  }
  Handle<JSGlobalObject> global_object() const {
    return handle(info()->global_object(), isolate());
220
  }
221

222 223
  LoopAssignmentAnalysis* loop_assignment() const { return loop_assignment_; }
  void set_loop_assignment(LoopAssignmentAnalysis* loop_assignment) {
224
    DCHECK(!loop_assignment_);
225 226 227
    loop_assignment_ = loop_assignment;
  }

228 229
  Schedule* schedule() const { return schedule_; }
  void set_schedule(Schedule* schedule) {
230
    DCHECK(!schedule_);
231 232
    schedule_ = schedule;
  }
233
  void reset_schedule() { schedule_ = nullptr; }
234 235

  Zone* instruction_zone() const { return instruction_zone_; }
236 237
  InstructionSequence* sequence() const { return sequence_; }
  Frame* frame() const { return frame_; }
238 239 240 241 242

  Zone* register_allocation_zone() const { return register_allocation_zone_; }
  RegisterAllocationData* register_allocation_data() const {
    return register_allocation_data_;
  }
243

244 245 246 247 248 249 250 251 252 253 254 255
  BasicBlockProfiler::Data* profiler_data() const { return profiler_data_; }
  void set_profiler_data(BasicBlockProfiler::Data* profiler_data) {
    profiler_data_ = profiler_data;
  }

  std::string const& source_position_output() const {
    return source_position_output_;
  }
  void set_source_position_output(std::string const& source_position_output) {
    source_position_output_ = source_position_output;
  }

256 257 258 259 260
  ZoneVector<trap_handler::ProtectedInstructionData>* protected_instructions()
      const {
    return protected_instructions_;
  }

261
  void DeleteGraphZone() {
262
    if (graph_zone_ == nullptr) return;
263
    graph_zone_scope_.Destroy();
264 265
    graph_zone_ = nullptr;
    graph_ = nullptr;
266
    source_positions_ = nullptr;
267
    loop_assignment_ = nullptr;
268
    simplified_ = nullptr;
269 270 271 272 273
    machine_ = nullptr;
    common_ = nullptr;
    javascript_ = nullptr;
    jsgraph_ = nullptr;
    schedule_ = nullptr;
274 275 276
  }

  void DeleteInstructionZone() {
277
    if (instruction_zone_ == nullptr) return;
278
    instruction_zone_scope_.Destroy();
279 280 281
    instruction_zone_ = nullptr;
    sequence_ = nullptr;
    frame_ = nullptr;
282 283 284 285 286 287 288
  }

  void DeleteRegisterAllocationZone() {
    if (register_allocation_zone_ == nullptr) return;
    register_allocation_zone_scope_.Destroy();
    register_allocation_zone_ = nullptr;
    register_allocation_data_ = nullptr;
289 290
  }

291
  void InitializeInstructionSequence(const CallDescriptor* descriptor) {
292
    DCHECK(sequence_ == nullptr);
293 294 295
    InstructionBlocks* instruction_blocks =
        InstructionSequence::InstructionBlocksFor(instruction_zone(),
                                                  schedule());
296 297
    sequence_ = new (instruction_zone()) InstructionSequence(
        info()->isolate(), instruction_zone(), instruction_blocks);
298 299
    if (descriptor && descriptor->RequiresFrameAsIncoming()) {
      sequence_->instruction_blocks()[0]->mark_needs_frame();
300
    } else {
301 302
      DCHECK_EQ(0u, descriptor->CalleeSavedFPRegisters());
      DCHECK_EQ(0u, descriptor->CalleeSavedRegisters());
303
    }
304 305
  }

306
  void InitializeFrameData(CallDescriptor* descriptor) {
307
    DCHECK(frame_ == nullptr);
308 309
    int fixed_frame_size = 0;
    if (descriptor != nullptr) {
310
      fixed_frame_size = descriptor->CalculateFixedFrameSize();
311
    }
312
    frame_ = new (instruction_zone()) Frame(fixed_frame_size);
313 314 315
  }

  void InitializeRegisterAllocationData(const RegisterConfiguration* config,
316
                                        CallDescriptor* descriptor) {
317
    DCHECK(register_allocation_data_ == nullptr);
318 319
    register_allocation_data_ = new (register_allocation_zone())
        RegisterAllocationData(config, register_allocation_zone(), frame(),
320
                               sequence(), debug_name());
321 322
  }

323 324 325 326 327
  void InitializeOsrHelper() {
    DCHECK(!osr_helper_.has_value());
    osr_helper_.emplace(info());
  }

328 329
  void InitializeCodeGenerator(Linkage* linkage) {
    DCHECK_NULL(code_generator_);
330 331
    code_generator_ =
        new CodeGenerator(frame(), linkage, sequence(), info(), osr_helper_);
332 333
  }

334 335 336 337 338 339 340 341 342 343 344 345
  void BeginPhaseKind(const char* phase_kind_name) {
    if (pipeline_statistics() != nullptr) {
      pipeline_statistics()->BeginPhaseKind(phase_kind_name);
    }
  }

  void EndPhaseKind() {
    if (pipeline_statistics() != nullptr) {
      pipeline_statistics()->EndPhaseKind();
    }
  }

346 347
  const char* debug_name() const { return debug_name_.get(); }

348
 private:
349 350
  Isolate* const isolate_;
  CompilationInfo* const info_;
351
  std::unique_ptr<char[]> debug_name_;
352
  Zone* outer_zone_ = nullptr;
353
  ZoneStats* const zone_stats_;
354 355
  PipelineStatistics* pipeline_statistics_ = nullptr;
  bool compilation_failed_ = false;
356
  bool verify_graph_ = false;
357
  bool is_asm_ = false;
358
  base::Optional<OsrHelper> osr_helper_;
359
  Handle<Code> code_ = Handle<Code>::null();
360
  CodeGenerator* code_generator_ = nullptr;
361 362

  // All objects in the following group of fields are allocated in graph_zone_.
363
  // They are all set to nullptr when the graph_zone_ is destroyed.
364
  ZoneStats::Scope graph_zone_scope_;
365 366 367 368 369 370 371 372 373 374
  Zone* graph_zone_ = nullptr;
  Graph* graph_ = nullptr;
  SourcePositionTable* source_positions_ = nullptr;
  LoopAssignmentAnalysis* loop_assignment_ = nullptr;
  SimplifiedOperatorBuilder* simplified_ = nullptr;
  MachineOperatorBuilder* machine_ = nullptr;
  CommonOperatorBuilder* common_ = nullptr;
  JSOperatorBuilder* javascript_ = nullptr;
  JSGraph* jsgraph_ = nullptr;
  Schedule* schedule_ = nullptr;
375 376

  // All objects in the following group of fields are allocated in
377
  // instruction_zone_.  They are all set to nullptr when the instruction_zone_
378
  // is destroyed.
379
  ZoneStats::Scope instruction_zone_scope_;
380
  Zone* instruction_zone_;
381 382
  InstructionSequence* sequence_ = nullptr;
  Frame* frame_ = nullptr;
383 384

  // All objects in the following group of fields are allocated in
385
  // register_allocation_zone_.  They are all set to nullptr when the zone is
386
  // destroyed.
387
  ZoneStats::Scope register_allocation_zone_scope_;
388
  Zone* register_allocation_zone_;
389
  RegisterAllocationData* register_allocation_data_ = nullptr;
390

391 392 393 394 395 396
  // Basic block profiling support.
  BasicBlockProfiler::Data* profiler_data_ = nullptr;

  // Source position output for --trace-turbo.
  std::string source_position_output_;

397 398 399
  ZoneVector<trap_handler::ProtectedInstructionData>* protected_instructions_ =
      nullptr;

400 401 402
  DISALLOW_COPY_AND_ASSIGN(PipelineData);
};

403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420
class PipelineImpl final {
 public:
  explicit PipelineImpl(PipelineData* data) : data_(data) {}

  // Helpers for executing pipeline phases.
  template <typename Phase>
  void Run();
  template <typename Phase, typename Arg0>
  void Run(Arg0 arg_0);
  template <typename Phase, typename Arg0, typename Arg1>
  void Run(Arg0 arg_0, Arg1 arg_1);

  // Run the graph creation and initial optimization passes.
  bool CreateGraph();

  // Run the concurrent optimization passes.
  bool OptimizeGraph(Linkage* linkage);

421 422 423 424 425
  // Run the code assembly pass.
  void AssembleCode(Linkage* linkage);

  // Run the code finalization pass.
  Handle<Code> FinalizeCode();
426

427
  bool ScheduleAndSelectInstructions(Linkage* linkage, bool trim_graph);
428 429 430 431 432 433 434 435 436 437
  void RunPrintAndVerify(const char* phase, bool untyped = false);
  Handle<Code> ScheduleAndGenerateCode(CallDescriptor* call_descriptor);
  void AllocateRegisters(const RegisterConfiguration* config,
                         CallDescriptor* descriptor, bool run_verifier);

  CompilationInfo* info() const;
  Isolate* isolate() const;

  PipelineData* const data_;
};
438

439 440
namespace {

441 442 443 444 445
struct TurboCfgFile : public std::ofstream {
  explicit TurboCfgFile(Isolate* isolate)
      : std::ofstream(isolate->GetTurboCfgFileName().c_str(),
                      std::ios_base::app) {}
};
446

447 448 449 450 451
struct TurboJsonFile : public std::ofstream {
  TurboJsonFile(CompilationInfo* info, std::ios_base::openmode mode)
      : std::ofstream(GetVisualizerLogFileName(info, nullptr, "json").get(),
                      mode) {}
};
452

453
void TraceSchedule(CompilationInfo* info, Schedule* schedule) {
454
  if (FLAG_trace_turbo) {
455
    AllowHandleDereference allow_deref;
456 457 458 459 460 461 462
    TurboJsonFile json_of(info, std::ios_base::app);
    json_of << "{\"name\":\"Schedule\",\"type\":\"schedule\",\"data\":\"";
    std::stringstream schedule_stream;
    schedule_stream << *schedule;
    std::string schedule_string(schedule_stream.str());
    for (const auto& c : schedule_string) {
      json_of << AsEscapedUC16ForJSON(c);
463
    }
464
    json_of << "\"},\n";
465
  }
466 467
  if (FLAG_trace_turbo_graph || FLAG_trace_turbo_scheduler) {
    AllowHandleDereference allow_deref;
468 469
    CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
    OFStream os(tracing_scope.file());
470 471
    os << "-- Schedule --------------------------------------\n" << *schedule;
  }
472 473 474
}


475
class SourcePositionWrapper final : public Reducer {
476 477 478
 public:
  SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
      : reducer_(reducer), table_(table) {}
479
  ~SourcePositionWrapper() final {}
480

481 482
  Reduction Reduce(Node* node) final {
    SourcePosition const pos = table_->GetSourcePosition(node);
483 484 485 486
    SourcePositionTable::Scope position(table_, pos);
    return reducer_->Reduce(node);
  }

487 488
  void Finalize() final { reducer_->Finalize(); }

489
 private:
490 491
  Reducer* const reducer_;
  SourcePositionTable* const table_;
492 493

  DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
494 495 496
};


497 498 499
class JSGraphReducer final : public GraphReducer {
 public:
  JSGraphReducer(JSGraph* jsgraph, Zone* zone)
500
      : GraphReducer(zone, jsgraph->graph(), jsgraph->Dead()) {}
501 502 503 504
  ~JSGraphReducer() final {}
};


505 506 507 508 509
void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
                Reducer* reducer) {
  if (data->info()->is_source_positions_enabled()) {
    void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
    SourcePositionWrapper* const wrapper =
510 511 512 513 514 515
        new (buffer) SourcePositionWrapper(reducer, data->source_positions());
    graph_reducer->AddReducer(wrapper);
  } else {
    graph_reducer->AddReducer(reducer);
  }
}
516

517

518 519 520 521 522 523
class PipelineRunScope {
 public:
  PipelineRunScope(PipelineData* data, const char* phase_name)
      : phase_scope_(
            phase_name == nullptr ? nullptr : data->pipeline_statistics(),
            phase_name),
524
        zone_scope_(data->zone_stats(), ZONE_NAME) {}
525 526 527 528 529

  Zone* zone() { return zone_scope_.zone(); }

 private:
  PhaseScope phase_scope_;
530
  ZoneStats::Scope zone_scope_;
531 532
};

533
PipelineStatistics* CreatePipelineStatistics(CompilationInfo* info,
534
                                             ZoneStats* zone_stats) {
535 536
  PipelineStatistics* pipeline_statistics = nullptr;

537
  if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
538
    pipeline_statistics = new PipelineStatistics(info, zone_stats);
539 540 541 542
    pipeline_statistics->BeginPhaseKind("initializing");
  }

  if (FLAG_trace_turbo) {
543
    TurboJsonFile json_of(info, std::ios_base::trunc);
544
    std::unique_ptr<char[]> function_name = info->GetDebugName();
545
    int pos = info->parse_info() ? info->shared_info()->start_position() : 0;
546 547
    json_of << "{\"function\":\"" << function_name.get()
            << "\", \"sourcePosition\":" << pos << ", \"source\":\"";
548
    Isolate* isolate = info->isolate();
549 550 551
    Handle<Script> script =
        info->parse_info() ? info->script() : Handle<Script>::null();
    if (!script.is_null() && !script->source()->IsUndefined(isolate)) {
552 553 554 555 556 557
      DisallowHeapAllocation no_allocation;
      int start = info->shared_info()->start_position();
      int len = info->shared_info()->end_position() - start;
      String::SubStringRange source(String::cast(script->source()), start, len);
      for (const auto& c : source) {
        json_of << AsEscapedUC16ForJSON(c);
558 559
      }
    }
560
    json_of << "\",\n\"phases\":[";
561 562 563 564 565
  }

  return pipeline_statistics;
}

566 567
}  // namespace

568
class PipelineCompilationJob final : public CompilationJob {
569
 public:
570
  PipelineCompilationJob(ParseInfo* parse_info, Handle<JSFunction> function)
571 572
      // Note that the CompilationInfo is not initialized at the time we pass it
      // to the CompilationJob constructor, but it is not dereferenced there.
573
      : CompilationJob(function->GetIsolate(), &info_, "TurboFan"),
574
        parse_info_(parse_info),
575 576 577
        zone_stats_(function->GetIsolate()->allocator()),
        info_(parse_info_.get()->zone(), parse_info_.get(),
              function->GetIsolate(), function),
578 579
        pipeline_statistics_(CreatePipelineStatistics(info(), &zone_stats_)),
        data_(&zone_stats_, info(), pipeline_statistics_.get()),
580
        pipeline_(&data_),
581
        linkage_(nullptr) {}
582 583

 protected:
584 585 586
  Status PrepareJobImpl() final;
  Status ExecuteJobImpl() final;
  Status FinalizeJobImpl() final;
587

588 589 590
  // Registers weak object to optimized code dependencies.
  void RegisterWeakObjectsInOptimizedCode(Handle<Code> code);

591
 private:
592
  std::unique_ptr<ParseInfo> parse_info_;
593
  ZoneStats zone_stats_;
594
  CompilationInfo info_;
595
  std::unique_ptr<PipelineStatistics> pipeline_statistics_;
596
  PipelineData data_;
597
  PipelineImpl pipeline_;
598
  Linkage* linkage_;
599 600

  DISALLOW_COPY_AND_ASSIGN(PipelineCompilationJob);
601 602
};

603
PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl() {
604
  if (info()->shared_info()->asm_function()) {
605 606 607
    if (info()->osr_frame() && !info()->is_optimizing_from_bytecode()) {
      info()->MarkAsFrameSpecializing();
    }
608 609 610 611 612
    info()->MarkAsFunctionContextSpecializing();
  } else {
    if (!FLAG_always_opt) {
      info()->MarkAsBailoutOnUninitialized();
    }
613 614 615
    if (FLAG_turbo_loop_peeling) {
      info()->MarkAsLoopPeelingEnabled();
    }
616
  }
617
  if (info()->is_optimizing_from_bytecode()) {
618
    info()->MarkAsDeoptimizationEnabled();
619 620 621
    if (FLAG_inline_accessors) {
      info()->MarkAsAccessorInliningEnabled();
    }
622 623 624 625
    if (info()->closure()->feedback_vector_cell()->map() ==
        isolate()->heap()->one_closure_cell_map()) {
      info()->MarkAsFunctionContextSpecializing();
    }
626 627
  }
  if (!info()->is_optimizing_from_bytecode()) {
628
    if (!Compiler::EnsureBaselineCode(info())) return FAILED;
629 630
  } else if (FLAG_turbo_inlining) {
    info()->MarkAsInliningEnabled();
631 632
  }

633 634
  linkage_ = new (info()->zone())
      Linkage(Linkage::ComputeIncoming(info()->zone(), info()));
635

636 637 638 639
  if (!pipeline_.CreateGraph()) {
    if (isolate()->has_pending_exception()) return FAILED;  // Stack overflowed.
    return AbortOptimization(kGraphBuildingFailed);
  }
640

641 642
  if (info()->is_osr()) data_.InitializeOsrHelper();

643 644 645 646 647
  // Make sure that we have generated the maximal number of deopt entries.
  // This is in order to avoid triggering the generation of deopt entries later
  // during code assembly.
  Deoptimizer::EnsureCodeForMaxDeoptimizationEntries(isolate());

648 649 650
  return SUCCEEDED;
}

651
PipelineCompilationJob::Status PipelineCompilationJob::ExecuteJobImpl() {
652
  if (!pipeline_.OptimizeGraph(linkage_)) return FAILED;
653 654 655
  return SUCCEEDED;
}

656
PipelineCompilationJob::Status PipelineCompilationJob::FinalizeJobImpl() {
657 658
  pipeline_.AssembleCode(linkage_);
  Handle<Code> code = pipeline_.FinalizeCode();
659 660 661 662
  if (code.is_null()) {
    if (info()->bailout_reason() == kNoReason) {
      return AbortOptimization(kCodeGenerationFailed);
    }
663
    return FAILED;
664 665 666
  }
  info()->dependencies()->Commit(code);
  info()->SetCode(code);
667
  if (info()->is_deoptimization_enabled()) {
668
    info()->context()->native_context()->AddOptimizedCode(*code);
669
    RegisterWeakObjectsInOptimizedCode(code);
670 671 672 673
  }
  return SUCCEEDED;
}

674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730
namespace {

void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
                                   Handle<Code> code) {
  Handle<WeakCell> cell = Code::WeakCellFor(code);
  Heap* heap = isolate->heap();
  if (heap->InNewSpace(*object)) {
    heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
  } else {
    Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
    dep =
        DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
    heap->AddWeakObjectToCodeDependency(object, dep);
  }
}

}  // namespace

void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
    Handle<Code> code) {
  DCHECK(code->is_optimized_code());
  std::vector<Handle<Map>> maps;
  std::vector<Handle<HeapObject>> objects;
  {
    DisallowHeapAllocation no_gc;
    int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
                          RelocInfo::ModeMask(RelocInfo::CELL);
    for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
      RelocInfo::Mode mode = it.rinfo()->rmode();
      if (mode == RelocInfo::CELL &&
          code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
        objects.push_back(handle(it.rinfo()->target_cell(), isolate()));
      } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
                 code->IsWeakObjectInOptimizedCode(
                     it.rinfo()->target_object())) {
        Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
                                  isolate());
        if (object->IsMap()) {
          maps.push_back(Handle<Map>::cast(object));
        } else {
          objects.push_back(object);
        }
      }
    }
  }
  for (Handle<Map> map : maps) {
    if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) {
      isolate()->heap()->AddRetainedMap(map);
    }
    Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
  }
  for (Handle<HeapObject> object : objects) {
    AddWeakObjectToCodeDependency(isolate(), object, code);
  }
  code->set_can_have_weak_objects(true);
}

731
class PipelineWasmCompilationJob final : public CompilationJob {
732
 public:
733 734 735
  explicit PipelineWasmCompilationJob(
      CompilationInfo* info, JSGraph* jsgraph, CallDescriptor* descriptor,
      SourcePositionTable* source_positions,
736
      ZoneVector<trap_handler::ProtectedInstructionData>* protected_insts,
737
      wasm::ModuleOrigin wasm_origin)
738 739
      : CompilationJob(info->isolate(), info, "TurboFan",
                       State::kReadyToExecute),
740
        zone_stats_(info->isolate()->allocator()),
741 742 743
        pipeline_statistics_(CreatePipelineStatistics(info, &zone_stats_)),
        data_(&zone_stats_, info, jsgraph, pipeline_statistics_.get(),
              source_positions, protected_insts),
744
        pipeline_(&data_),
745
        linkage_(descriptor),
746
        wasm_origin_(wasm_origin) {}
747 748

 protected:
749 750 751
  Status PrepareJobImpl() final;
  Status ExecuteJobImpl() final;
  Status FinalizeJobImpl() final;
752 753

 private:
754 755
  size_t AllocatedMemory() const override;

756
  ZoneStats zone_stats_;
757
  std::unique_ptr<PipelineStatistics> pipeline_statistics_;
758
  PipelineData data_;
759
  PipelineImpl pipeline_;
760
  Linkage linkage_;
761
  wasm::ModuleOrigin wasm_origin_;
762 763 764
};

PipelineWasmCompilationJob::Status
765 766
PipelineWasmCompilationJob::PrepareJobImpl() {
  UNREACHABLE();  // Prepare should always be skipped for WasmCompilationJob.
767 768 769 770
  return SUCCEEDED;
}

PipelineWasmCompilationJob::Status
771
PipelineWasmCompilationJob::ExecuteJobImpl() {
772
  if (FLAG_trace_turbo) {
773 774 775
    TurboJsonFile json_of(info(), std::ios_base::trunc);
    json_of << "{\"function\":\"" << info()->GetDebugName().get()
            << "\", \"source\":\"\",\n\"phases\":[";
776 777 778
  }

  pipeline_.RunPrintAndVerify("Machine", true);
779
  if (FLAG_wasm_opt || wasm_origin_ == wasm::ModuleOrigin::kAsmJsOrigin) {
780
    PipelineData* data = &data_;
781
    PipelineRunScope scope(data, "Wasm optimization");
782 783 784 785
    JSGraphReducer graph_reducer(data->jsgraph(), scope.zone());
    DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
                                              data->common());
    ValueNumberingReducer value_numbering(scope.zone(), data->graph()->zone());
786 787
    MachineOperatorReducer machine_reducer(
        data->jsgraph(), wasm_origin_ == wasm::ModuleOrigin::kAsmJsOrigin);
788 789 790 791 792
    CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
                                         data->common(), data->machine());
    AddReducer(data, &graph_reducer, &dead_code_elimination);
    AddReducer(data, &graph_reducer, &machine_reducer);
    AddReducer(data, &graph_reducer, &common_reducer);
793
    AddReducer(data, &graph_reducer, &value_numbering);
794 795 796
    graph_reducer.ReduceGraph();
    pipeline_.RunPrintAndVerify("Optimized Machine", true);
  }
797

798
  if (!pipeline_.ScheduleAndSelectInstructions(&linkage_, true)) return FAILED;
799 800 801
  return SUCCEEDED;
}

802 803 804 805
size_t PipelineWasmCompilationJob::AllocatedMemory() const {
  return pipeline_.data_->zone_stats()->GetCurrentAllocatedBytes();
}

806
PipelineWasmCompilationJob::Status
807
PipelineWasmCompilationJob::FinalizeJobImpl() {
808 809
  pipeline_.AssembleCode(&linkage_);
  pipeline_.FinalizeCode();
810 811 812
  return SUCCEEDED;
}

813
template <typename Phase>
814
void PipelineImpl::Run() {
815 816 817 818 819 820
  PipelineRunScope scope(this->data_, Phase::phase_name());
  Phase phase;
  phase.Run(this->data_, scope.zone());
}

template <typename Phase, typename Arg0>
821
void PipelineImpl::Run(Arg0 arg_0) {
822 823 824 825 826
  PipelineRunScope scope(this->data_, Phase::phase_name());
  Phase phase;
  phase.Run(this->data_, scope.zone(), arg_0);
}

827
template <typename Phase, typename Arg0, typename Arg1>
828
void PipelineImpl::Run(Arg0 arg_0, Arg1 arg_1) {
829 830 831 832
  PipelineRunScope scope(this->data_, Phase::phase_name());
  Phase phase;
  phase.Run(this->data_, scope.zone(), arg_0, arg_1);
}
833

834 835 836 837
struct LoopAssignmentAnalysisPhase {
  static const char* phase_name() { return "loop assignment analysis"; }

  void Run(PipelineData* data, Zone* temp_zone) {
838 839 840 841 842
    if (!data->info()->is_optimizing_from_bytecode()) {
      AstLoopAssignmentAnalyzer analyzer(data->graph_zone(), data->info());
      LoopAssignmentAnalysis* loop_assignment = analyzer.Analyze();
      data->set_loop_assignment(loop_assignment);
    }
843 844 845 846
  }
};


847 848 849
struct GraphBuilderPhase {
  static const char* phase_name() { return "graph builder"; }

850
  void Run(PipelineData* data, Zone* temp_zone) {
851
    if (data->info()->is_optimizing_from_bytecode()) {
852
      // Bytecode graph builder assumes deoptimization is enabled.
853
      DCHECK(data->info()->is_deoptimization_enabled());
854 855 856 857
      JSTypeHintLowering::Flags flags = JSTypeHintLowering::kNoFlags;
      if (data->info()->is_bailout_on_uninitialized()) {
        flags |= JSTypeHintLowering::kBailoutOnUninitialized;
      }
858 859 860
      BytecodeGraphBuilder graph_builder(
          temp_zone, data->info()->shared_info(),
          handle(data->info()->closure()->feedback_vector()),
861
          data->info()->osr_ast_id(), data->jsgraph(), CallFrequency(1.0f),
862
          data->source_positions(), SourcePosition::kNotInlined, flags);
863
      graph_builder.CreateGraph();
864
    } else {
865 866
      // AST-based graph builder assumes deoptimization is disabled.
      DCHECK(!data->info()->is_deoptimization_enabled());
867
      AstGraphBuilderWithPositions graph_builder(
868
          temp_zone, data->info(), data->jsgraph(), CallFrequency(1.0f),
869
          data->loop_assignment(), data->source_positions());
870 871 872
      if (!graph_builder.CreateGraph()) {
        data->set_compilation_failed();
      }
873 874 875 876
    }
  }
};

877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900
namespace {

Maybe<OuterContext> GetModuleContext(Handle<JSFunction> closure) {
  Context* current = closure->context();
  size_t distance = 0;
  while (!current->IsNativeContext()) {
    if (current->IsModuleContext()) {
      return Just(OuterContext(handle(current), distance));
    }
    current = current->previous();
    distance++;
  }
  return Nothing<OuterContext>();
}

Maybe<OuterContext> ChooseSpecializationContext(CompilationInfo* info) {
  if (info->is_function_context_specializing()) {
    DCHECK(info->has_context());
    return Just(OuterContext(handle(info->context()), 0));
  }
  return GetModuleContext(info->closure());
}

}  // anonymous namespace
901 902 903 904 905

struct InliningPhase {
  static const char* phase_name() { return "inlining"; }

  void Run(PipelineData* data, Zone* temp_zone) {
906
    JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
907 908
    DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
                                              data->common());
909
    CheckpointElimination checkpoint_elimination(&graph_reducer);
910 911
    CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
                                         data->common(), data->machine());
912
    JSCallReducer call_reducer(&graph_reducer, data->jsgraph(),
913 914 915
                               data->info()->is_bailout_on_uninitialized()
                                   ? JSCallReducer::kBailoutOnUninitialized
                                   : JSCallReducer::kNoFlags,
916
                               data->native_context(),
917
                               data->info()->dependencies());
918
    JSContextSpecialization context_specialization(
919
        &graph_reducer, data->jsgraph(),
920
        ChooseSpecializationContext(data->info()),
921 922 923
        data->info()->is_function_context_specializing()
            ? data->info()->closure()
            : MaybeHandle<JSFunction>());
924 925
    JSFrameSpecialization frame_specialization(
        &graph_reducer, data->info()->osr_frame(), data->jsgraph());
926 927
    JSNativeContextSpecialization::Flags flags =
        JSNativeContextSpecialization::kNoFlags;
928 929 930
    if (data->info()->is_accessor_inlining_enabled()) {
      flags |= JSNativeContextSpecialization::kAccessorInliningEnabled;
    }
931 932 933
    if (data->info()->is_bailout_on_uninitialized()) {
      flags |= JSNativeContextSpecialization::kBailoutOnUninitialized;
    }
934
    JSNativeContextSpecialization native_context_specialization(
935 936
        &graph_reducer, data->jsgraph(), flags, data->native_context(),
        data->info()->dependencies(), temp_zone);
937 938 939 940 941
    JSInliningHeuristic inlining(
        &graph_reducer, data->info()->is_inlining_enabled()
                            ? JSInliningHeuristic::kGeneralInlining
                            : JSInliningHeuristic::kRestrictedInlining,
        temp_zone, data->info(), data->jsgraph(), data->source_positions());
942 943 944 945 946
    JSIntrinsicLowering intrinsic_lowering(
        &graph_reducer, data->jsgraph(),
        data->info()->is_deoptimization_enabled()
            ? JSIntrinsicLowering::kDeoptimizationEnabled
            : JSIntrinsicLowering::kDeoptimizationDisabled);
947
    AddReducer(data, &graph_reducer, &dead_code_elimination);
948
    AddReducer(data, &graph_reducer, &checkpoint_elimination);
949
    AddReducer(data, &graph_reducer, &common_reducer);
950 951 952
    if (data->info()->is_frame_specializing()) {
      AddReducer(data, &graph_reducer, &frame_specialization);
    }
953 954 955
    if (data->info()->is_deoptimization_enabled()) {
      AddReducer(data, &graph_reducer, &native_context_specialization);
    }
956
    AddReducer(data, &graph_reducer, &context_specialization);
957
    AddReducer(data, &graph_reducer, &intrinsic_lowering);
958 959 960
    if (data->info()->is_deoptimization_enabled()) {
      AddReducer(data, &graph_reducer, &call_reducer);
    }
961
    AddReducer(data, &graph_reducer, &inlining);
962
    graph_reducer.ReduceGraph();
963 964 965 966 967 968 969
  }
};


struct TyperPhase {
  static const char* phase_name() { return "typer"; }

970 971 972
  void Run(PipelineData* data, Zone* temp_zone, Typer* typer) {
    NodeVector roots(temp_zone);
    data->jsgraph()->GetCachedNodes(&roots);
973 974 975 976
    LoopVariableOptimizer induction_vars(data->jsgraph()->graph(),
                                         data->common(), temp_zone);
    if (FLAG_turbo_loop_variable) induction_vars.Run();
    typer->Run(roots, &induction_vars);
977
  }
978 979
};

980 981 982 983 984 985 986 987 988 989 990 991 992 993 994
struct UntyperPhase {
  static const char* phase_name() { return "untyper"; }

  void Run(PipelineData* data, Zone* temp_zone) {
    class RemoveTypeReducer final : public Reducer {
     public:
      Reduction Reduce(Node* node) final {
        if (NodeProperties::IsTyped(node)) {
          NodeProperties::RemoveType(node);
          return Changed(node);
        }
        return NoChange();
      }
    };

995 996 997 998 999 1000
    NodeVector roots(temp_zone);
    data->jsgraph()->GetCachedNodes(&roots);
    for (Node* node : roots) {
      NodeProperties::RemoveType(node);
    }

1001 1002 1003 1004 1005 1006 1007
    JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    RemoveTypeReducer remove_type_reducer;
    AddReducer(data, &graph_reducer, &remove_type_reducer);
    graph_reducer.ReduceGraph();
  }
};

1008 1009 1010 1011
struct OsrDeconstructionPhase {
  static const char* phase_name() { return "OSR deconstruction"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1012 1013 1014 1015 1016
    GraphTrimmer trimmer(temp_zone, data->graph());
    NodeVector roots(temp_zone);
    data->jsgraph()->GetCachedNodes(&roots);
    trimmer.TrimGraph(roots.begin(), roots.end());

1017
    // TODO(neis): Use data->osr_helper() here once AST graph builder is gone.
1018
    OsrHelper osr_helper(data->info());
1019
    osr_helper.Deconstruct(data->jsgraph(), data->common(), temp_zone);
1020 1021 1022 1023
  }
};


1024 1025 1026 1027
struct TypedLoweringPhase {
  static const char* phase_name() { return "typed lowering"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1028
    JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1029 1030
    DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
                                              data->common());
1031 1032 1033 1034 1035
    JSBuiltinReducer builtin_reducer(
        &graph_reducer, data->jsgraph(),
        data->info()->is_deoptimization_enabled()
            ? JSBuiltinReducer::kDeoptimizationEnabled
            : JSBuiltinReducer::kNoFlags,
1036
        data->info()->dependencies(), data->native_context());
1037
    Handle<FeedbackVector> feedback_vector(
1038
        data->info()->closure()->feedback_vector());
1039
    JSCreateLowering create_lowering(
1040
        &graph_reducer, data->info()->dependencies(), data->jsgraph(),
1041
        feedback_vector, data->native_context(), temp_zone);
1042 1043 1044 1045
    JSTypedLowering::Flags typed_lowering_flags = JSTypedLowering::kNoFlags;
    if (data->info()->is_deoptimization_enabled()) {
      typed_lowering_flags |= JSTypedLowering::kDeoptimizationEnabled;
    }
1046
    JSTypedLowering typed_lowering(&graph_reducer, data->info()->dependencies(),
1047 1048
                                   typed_lowering_flags, data->jsgraph(),
                                   temp_zone);
1049 1050 1051 1052 1053 1054
    TypedOptimization typed_optimization(
        &graph_reducer, data->info()->dependencies(),
        data->info()->is_deoptimization_enabled()
            ? TypedOptimization::kDeoptimizationEnabled
            : TypedOptimization::kNoFlags,
        data->jsgraph());
1055
    SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph());
1056
    CheckpointElimination checkpoint_elimination(&graph_reducer);
1057 1058
    CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
                                         data->common(), data->machine());
1059
    AddReducer(data, &graph_reducer, &dead_code_elimination);
1060
    AddReducer(data, &graph_reducer, &builtin_reducer);
1061 1062 1063
    if (data->info()->is_deoptimization_enabled()) {
      AddReducer(data, &graph_reducer, &create_lowering);
    }
1064
    AddReducer(data, &graph_reducer, &typed_optimization);
1065
    AddReducer(data, &graph_reducer, &typed_lowering);
1066
    AddReducer(data, &graph_reducer, &simple_reducer);
1067
    AddReducer(data, &graph_reducer, &checkpoint_elimination);
1068
    AddReducer(data, &graph_reducer, &common_reducer);
1069 1070 1071 1072 1073
    graph_reducer.ReduceGraph();
  }
};


1074 1075 1076 1077
struct EscapeAnalysisPhase {
  static const char* phase_name() { return "escape analysis"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1078 1079
    EscapeAnalysis escape_analysis(data->graph(), data->jsgraph()->common(),
                                   temp_zone);
1080
    if (!escape_analysis.Run()) return;
1081 1082
    JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    EscapeAnalysisReducer escape_reducer(&graph_reducer, data->jsgraph(),
1083
                                         &escape_analysis, temp_zone);
1084 1085
    AddReducer(data, &graph_reducer, &escape_reducer);
    graph_reducer.ReduceGraph();
1086 1087 1088 1089
    if (escape_reducer.compilation_failed()) {
      data->set_compilation_failed();
      return;
    }
1090
    escape_reducer.VerifyReplacement();
1091 1092 1093
  }
};

1094 1095
struct SimplifiedLoweringPhase {
  static const char* phase_name() { return "simplified lowering"; }
1096 1097

  void Run(PipelineData* data, Zone* temp_zone) {
1098
    SimplifiedLowering lowering(data->jsgraph(), temp_zone,
1099
                                data->source_positions());
1100
    lowering.LowerAllNodes();
1101 1102
  }
};
1103

1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119
struct LoopPeelingPhase {
  static const char* phase_name() { return "loop peeling"; }

  void Run(PipelineData* data, Zone* temp_zone) {
    GraphTrimmer trimmer(temp_zone, data->graph());
    NodeVector roots(temp_zone);
    data->jsgraph()->GetCachedNodes(&roots);
    trimmer.TrimGraph(roots.begin(), roots.end());

    LoopTree* loop_tree =
        LoopFinder::BuildLoopTree(data->jsgraph()->graph(), temp_zone);
    LoopPeeler::PeelInnerLoopsOfTree(data->graph(), data->common(), loop_tree,
                                     temp_zone);
  }
};

1120 1121 1122 1123 1124 1125 1126 1127
struct LoopExitEliminationPhase {
  static const char* phase_name() { return "loop exit elimination"; }

  void Run(PipelineData* data, Zone* temp_zone) {
    LoopPeeler::EliminateLoopExits(data->graph(), temp_zone);
  }
};

1128
struct ConcurrentOptimizationPrepPhase {
1129
  static const char* phase_name() { return "concurrency preparation"; }
1130 1131 1132 1133 1134 1135

  void Run(PipelineData* data, Zone* temp_zone) {
    // Make sure we cache these code stubs.
    data->jsgraph()->CEntryStubConstant(1);
    data->jsgraph()->CEntryStubConstant(2);
    data->jsgraph()->CEntryStubConstant(3);
1136 1137 1138 1139

    // This is needed for escape analysis.
    NodeProperties::SetType(data->jsgraph()->FalseConstant(), Type::Boolean());
    NodeProperties::SetType(data->jsgraph()->TrueConstant(), Type::Boolean());
1140 1141 1142
  }
};

1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153
struct GenericLoweringPhase {
  static const char* phase_name() { return "generic lowering"; }

  void Run(PipelineData* data, Zone* temp_zone) {
    JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    JSGenericLowering generic_lowering(data->jsgraph());
    AddReducer(data, &graph_reducer, &generic_lowering);
    graph_reducer.ReduceGraph();
  }
};

1154 1155 1156 1157
struct EarlyOptimizationPhase {
  static const char* phase_name() { return "early optimization"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1158
    JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1159 1160
    DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
                                              data->common());
1161
    SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph());
1162
    RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1163
    ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1164
    MachineOperatorReducer machine_reducer(data->jsgraph());
1165 1166
    CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
                                         data->common(), data->machine());
1167
    AddReducer(data, &graph_reducer, &dead_code_elimination);
1168
    AddReducer(data, &graph_reducer, &simple_reducer);
1169
    AddReducer(data, &graph_reducer, &redundancy_elimination);
1170 1171
    AddReducer(data, &graph_reducer, &machine_reducer);
    AddReducer(data, &graph_reducer, &common_reducer);
1172
    AddReducer(data, &graph_reducer, &value_numbering);
1173 1174 1175 1176
    graph_reducer.ReduceGraph();
  }
};

1177 1178 1179 1180
struct ControlFlowOptimizationPhase {
  static const char* phase_name() { return "control flow optimization"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1181 1182
    ControlFlowOptimizer optimizer(data->graph(), data->common(),
                                   data->machine(), temp_zone);
1183 1184 1185 1186
    optimizer.Optimize();
  }
};

1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203
struct EffectControlLinearizationPhase {
  static const char* phase_name() { return "effect linearization"; }

  void Run(PipelineData* data, Zone* temp_zone) {
    // The scheduler requires the graphs to be trimmed, so trim now.
    // TODO(jarin) Remove the trimming once the scheduler can handle untrimmed
    // graphs.
    GraphTrimmer trimmer(temp_zone, data->graph());
    NodeVector roots(temp_zone);
    data->jsgraph()->GetCachedNodes(&roots);
    trimmer.TrimGraph(roots.begin(), roots.end());

    // Schedule the graph without node splitting so that we can
    // fix the effect and control flow for nodes with low-level side
    // effects (such as changing representation to tagged or
    // 'floating' allocation regions.)
    Schedule* schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1204
                                                    Scheduler::kTempSchedule);
1205
    if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1206
    TraceSchedule(data->info(), schedule);
1207 1208 1209 1210 1211 1212

    // Post-pass for wiring the control/effects
    // - connect allocating representation changes into the control&effect
    //   chains and lower them,
    // - get rid of the region markers,
    // - introduce effect phis and rewire effects to get SSA again.
1213 1214
    EffectControlLinearizer linearizer(data->jsgraph(), schedule, temp_zone,
                                       data->source_positions());
1215
    linearizer.Run();
1216 1217
  }
};
1218

1219
// The store-store elimination greatly benefits from doing a common operator
1220 1221
// reducer and dead code elimination just before it, to eliminate conditional
// deopts with a constant condition.
1222 1223

struct DeadCodeEliminationPhase {
1224
  static const char* phase_name() { return "dead code elimination"; }
1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237

  void Run(PipelineData* data, Zone* temp_zone) {
    JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
                                              data->common());
    CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
                                         data->common(), data->machine());
    AddReducer(data, &graph_reducer, &dead_code_elimination);
    AddReducer(data, &graph_reducer, &common_reducer);
    graph_reducer.ReduceGraph();
  }
};

1238
struct StoreStoreEliminationPhase {
1239
  static const char* phase_name() { return "store-store elimination"; }
1240 1241

  void Run(PipelineData* data, Zone* temp_zone) {
1242 1243 1244 1245 1246 1247
    GraphTrimmer trimmer(temp_zone, data->graph());
    NodeVector roots(temp_zone);
    data->jsgraph()->GetCachedNodes(&roots);
    trimmer.TrimGraph(roots.begin(), roots.end());

    StoreStoreElimination::Run(data->jsgraph(), temp_zone);
1248 1249 1250
  }
};

1251 1252 1253 1254
struct LoadEliminationPhase {
  static const char* phase_name() { return "load elimination"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1255
    JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1256 1257 1258 1259
    BranchElimination branch_condition_elimination(&graph_reducer,
                                                   data->jsgraph(), temp_zone);
    DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
                                              data->common());
1260
    RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1261 1262
    LoadElimination load_elimination(&graph_reducer, data->jsgraph(),
                                     temp_zone);
1263
    CheckpointElimination checkpoint_elimination(&graph_reducer);
1264
    ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1265 1266 1267 1268
    CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
                                         data->common(), data->machine());
    AddReducer(data, &graph_reducer, &branch_condition_elimination);
    AddReducer(data, &graph_reducer, &dead_code_elimination);
1269 1270
    AddReducer(data, &graph_reducer, &redundancy_elimination);
    AddReducer(data, &graph_reducer, &load_elimination);
1271
    AddReducer(data, &graph_reducer, &checkpoint_elimination);
1272
    AddReducer(data, &graph_reducer, &common_reducer);
1273
    AddReducer(data, &graph_reducer, &value_numbering);
1274
    graph_reducer.ReduceGraph();
1275 1276 1277
  }
};

1278 1279 1280 1281
struct MemoryOptimizationPhase {
  static const char* phase_name() { return "memory optimization"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1282 1283 1284 1285 1286 1287 1288
    // The memory optimizer requires the graphs to be trimmed, so trim now.
    GraphTrimmer trimmer(temp_zone, data->graph());
    NodeVector roots(temp_zone);
    data->jsgraph()->GetCachedNodes(&roots);
    trimmer.TrimGraph(roots.begin(), roots.end());

    // Optimize allocations and load/store operations.
1289 1290 1291 1292 1293
    MemoryOptimizer optimizer(data->jsgraph(), temp_zone);
    optimizer.Optimize();
  }
};

1294 1295
struct LateOptimizationPhase {
  static const char* phase_name() { return "late optimization"; }
1296 1297 1298

  void Run(PipelineData* data, Zone* temp_zone) {
    JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1299 1300
    BranchElimination branch_condition_elimination(&graph_reducer,
                                                   data->jsgraph(), temp_zone);
1301 1302
    DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
                                              data->common());
1303
    ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1304 1305 1306
    MachineOperatorReducer machine_reducer(data->jsgraph());
    CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
                                         data->common(), data->machine());
1307 1308 1309
    SelectLowering select_lowering(data->jsgraph()->graph(),
                                   data->jsgraph()->common());
    TailCallOptimization tco(data->common(), data->graph());
1310
    AddReducer(data, &graph_reducer, &branch_condition_elimination);
1311 1312 1313
    AddReducer(data, &graph_reducer, &dead_code_elimination);
    AddReducer(data, &graph_reducer, &machine_reducer);
    AddReducer(data, &graph_reducer, &common_reducer);
1314 1315
    AddReducer(data, &graph_reducer, &select_lowering);
    AddReducer(data, &graph_reducer, &tco);
1316
    AddReducer(data, &graph_reducer, &value_numbering);
1317 1318 1319 1320
    graph_reducer.ReduceGraph();
  }
};

1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336
struct EarlyGraphTrimmingPhase {
  static const char* phase_name() { return "early graph trimming"; }
  void Run(PipelineData* data, Zone* temp_zone) {
    GraphTrimmer trimmer(temp_zone, data->graph());
    NodeVector roots(temp_zone);
    data->jsgraph()->GetCachedNodes(&roots);
    trimmer.TrimGraph(roots.begin(), roots.end());
  }
};


struct LateGraphTrimmingPhase {
  static const char* phase_name() { return "late graph trimming"; }
  void Run(PipelineData* data, Zone* temp_zone) {
    GraphTrimmer trimmer(temp_zone, data->graph());
    NodeVector roots(temp_zone);
1337 1338 1339
    if (data->jsgraph()) {
      data->jsgraph()->GetCachedNodes(&roots);
    }
1340 1341 1342 1343 1344
    trimmer.TrimGraph(roots.begin(), roots.end());
  }
};


1345 1346 1347 1348
struct ComputeSchedulePhase {
  static const char* phase_name() { return "scheduling"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1349 1350 1351 1352
    Schedule* schedule = Scheduler::ComputeSchedule(
        temp_zone, data->graph(), data->info()->is_splitting_enabled()
                                      ? Scheduler::kSplitNodes
                                      : Scheduler::kNoFlags);
1353
    if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1354 1355 1356 1357 1358 1359 1360 1361 1362
    data->set_schedule(schedule);
  }
};


struct InstructionSelectionPhase {
  static const char* phase_name() { return "select instructions"; }

  void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
1363 1364
    InstructionSelector selector(
        temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
1365
        data->schedule(), data->source_positions(), data->frame(),
1366 1367
        data->info()->is_source_positions_enabled()
            ? InstructionSelector::kAllSourcePositions
1368 1369 1370 1371 1372 1373 1374 1375
            : InstructionSelector::kCallSourcePositions,
        InstructionSelector::SupportedFeatures(),
        FLAG_turbo_instruction_scheduling
            ? InstructionSelector::kEnableScheduling
            : InstructionSelector::kDisableScheduling,
        data->info()->will_serialize()
            ? InstructionSelector::kEnableSerialization
            : InstructionSelector::kDisableSerialization);
1376 1377 1378
    if (!selector.SelectInstructions()) {
      data->set_compilation_failed();
    }
1379 1380 1381 1382
  }
};


1383 1384
struct MeetRegisterConstraintsPhase {
  static const char* phase_name() { return "meet register constraints"; }
1385 1386

  void Run(PipelineData* data, Zone* temp_zone) {
1387
    ConstraintBuilder builder(data->register_allocation_data());
1388
    builder.MeetRegisterConstraints();
1389 1390
  }
};
1391 1392


1393 1394 1395 1396
struct ResolvePhisPhase {
  static const char* phase_name() { return "resolve phis"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1397
    ConstraintBuilder builder(data->register_allocation_data());
1398
    builder.ResolvePhis();
1399 1400 1401 1402
  }
};


1403 1404 1405 1406
struct BuildLiveRangesPhase {
  static const char* phase_name() { return "build live ranges"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1407
    LiveRangeBuilder builder(data->register_allocation_data(), temp_zone);
1408
    builder.BuildLiveRanges();
1409 1410 1411 1412
  }
};


1413 1414
struct SplinterLiveRangesPhase {
  static const char* phase_name() { return "splinter live ranges"; }
1415 1416

  void Run(PipelineData* data, Zone* temp_zone) {
1417 1418 1419
    LiveRangeSeparator live_range_splinterer(data->register_allocation_data(),
                                             temp_zone);
    live_range_splinterer.Splinter();
1420 1421 1422 1423
  }
};


1424
template <typename RegAllocator>
1425 1426 1427 1428
struct AllocateGeneralRegistersPhase {
  static const char* phase_name() { return "allocate general registers"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1429 1430
    RegAllocator allocator(data->register_allocation_data(), GENERAL_REGISTERS,
                           temp_zone);
1431
    allocator.AllocateRegisters();
1432 1433 1434
  }
};

1435
template <typename RegAllocator>
1436
struct AllocateFPRegistersPhase {
1437
  static const char* phase_name() { return "allocate f.p. registers"; }
1438 1439

  void Run(PipelineData* data, Zone* temp_zone) {
1440
    RegAllocator allocator(data->register_allocation_data(), FP_REGISTERS,
1441
                           temp_zone);
1442
    allocator.AllocateRegisters();
1443 1444 1445 1446
  }
};


1447 1448 1449 1450 1451 1452 1453 1454 1455 1456
struct MergeSplintersPhase {
  static const char* phase_name() { return "merge splintered ranges"; }
  void Run(PipelineData* pipeline_data, Zone* temp_zone) {
    RegisterAllocationData* data = pipeline_data->register_allocation_data();
    LiveRangeMerger live_range_merger(data, temp_zone);
    live_range_merger.Merge();
  }
};


1457 1458 1459 1460 1461 1462 1463 1464 1465 1466
struct LocateSpillSlotsPhase {
  static const char* phase_name() { return "locate spill slots"; }

  void Run(PipelineData* data, Zone* temp_zone) {
    SpillSlotLocator locator(data->register_allocation_data());
    locator.LocateSpillSlots();
  }
};


1467 1468
struct AssignSpillSlotsPhase {
  static const char* phase_name() { return "assign spill slots"; }
1469 1470

  void Run(PipelineData* data, Zone* temp_zone) {
1471 1472
    OperandAssigner assigner(data->register_allocation_data());
    assigner.AssignSpillSlots();
1473 1474 1475 1476
  }
};


1477 1478 1479 1480
struct CommitAssignmentPhase {
  static const char* phase_name() { return "commit assignment"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1481 1482
    OperandAssigner assigner(data->register_allocation_data());
    assigner.CommitAssignment();
1483 1484 1485 1486
  }
};


1487
struct PopulateReferenceMapsPhase {
1488 1489 1490
  static const char* phase_name() { return "populate pointer maps"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1491 1492
    ReferenceMapPopulator populator(data->register_allocation_data());
    populator.PopulateReferenceMaps();
1493 1494 1495 1496 1497 1498 1499 1500
  }
};


struct ConnectRangesPhase {
  static const char* phase_name() { return "connect ranges"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1501 1502
    LiveRangeConnector connector(data->register_allocation_data());
    connector.ConnectRanges(temp_zone);
1503 1504 1505 1506 1507 1508 1509 1510
  }
};


struct ResolveControlFlowPhase {
  static const char* phase_name() { return "resolve control flow"; }

  void Run(PipelineData* data, Zone* temp_zone) {
1511
    LiveRangeConnector connector(data->register_allocation_data());
1512
    connector.ResolveControlFlow(temp_zone);
1513 1514 1515 1516
  }
};


1517 1518 1519 1520 1521 1522 1523 1524 1525 1526
struct OptimizeMovesPhase {
  static const char* phase_name() { return "optimize moves"; }

  void Run(PipelineData* data, Zone* temp_zone) {
    MoveOptimizer move_optimizer(temp_zone, data->sequence());
    move_optimizer.Run();
  }
};


1527 1528 1529 1530 1531 1532 1533 1534 1535
struct FrameElisionPhase {
  static const char* phase_name() { return "frame elision"; }

  void Run(PipelineData* data, Zone* temp_zone) {
    FrameElider(data->sequence()).Run();
  }
};


1536 1537 1538
struct JumpThreadingPhase {
  static const char* phase_name() { return "jump threading"; }

1539
  void Run(PipelineData* data, Zone* temp_zone, bool frame_at_start) {
1540
    ZoneVector<RpoNumber> result(temp_zone);
1541 1542
    if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence(),
                                         frame_at_start)) {
1543 1544 1545 1546 1547
      JumpThreading::ApplyForwarding(result, data->sequence());
    }
  }
};

1548 1549 1550 1551 1552 1553 1554
struct AssembleCodePhase {
  static const char* phase_name() { return "assemble code"; }

  void Run(PipelineData* data, Zone* temp_zone) {
    data->code_generator()->AssembleCode();
  }
};
1555

1556 1557
struct FinalizeCodePhase {
  static const char* phase_name() { return "finalize code"; }
1558

1559 1560
  void Run(PipelineData* data, Zone* temp_zone) {
    data->set_code(data->code_generator()->FinalizeCode());
1561 1562 1563 1564 1565 1566 1567 1568 1569 1570
  }
};


struct PrintGraphPhase {
  static const char* phase_name() { return nullptr; }

  void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
    CompilationInfo* info = data->info();
    Graph* graph = data->graph();
1571

1572
    {  // Print JSON.
1573
      AllowHandleDereference allow_deref;
1574
      TurboJsonFile json_of(info, std::ios_base::app);
danno's avatar
danno committed
1575 1576
      json_of << "{\"name\":\"" << phase << "\",\"type\":\"graph\",\"data\":"
              << AsJSON(*graph, data->source_positions()) << "},\n";
1577
    }
1578

1579
    if (FLAG_trace_turbo_graph) {  // Simple textual RPO.
1580
      AllowHandleDereference allow_deref;
1581 1582
      CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
      OFStream os(tracing_scope.file());
1583 1584 1585
      os << "-- Graph after " << phase << " -- " << std::endl;
      os << AsRPO(*graph);
    }
1586
  }
1587
};
1588 1589


1590 1591
struct VerifyGraphPhase {
  static const char* phase_name() { return nullptr; }
1592

1593 1594 1595 1596
  void Run(PipelineData* data, Zone* temp_zone, const bool untyped,
           bool values_only = false) {
    Verifier::Run(data->graph(), !untyped ? Verifier::TYPED : Verifier::UNTYPED,
                  values_only ? Verifier::kValuesOnly : Verifier::kAll);
1597 1598 1599
  }
};

1600
void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) {
1601 1602 1603
  if (FLAG_trace_turbo) {
    Run<PrintGraphPhase>(phase);
  }
1604
  if (FLAG_turbo_verify) {
1605
    Run<VerifyGraphPhase>(untyped);
1606 1607 1608
  }
}

1609
bool PipelineImpl::CreateGraph() {
1610
  PipelineData* data = this->data_;
1611

1612
  data->BeginPhaseKind("graph creation");
1613

1614
  if (FLAG_trace_turbo) {
1615 1616
    CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
    OFStream os(tracing_scope.file());
1617
    os << "---------------------------------------------------\n"
1618
       << "Begin compiling method " << info()->GetDebugName().get()
1619
       << " using Turbofan" << std::endl;
1620 1621
    TurboCfgFile tcf(isolate());
    tcf << AsC1VCompilation(info());
1622 1623
  }

1624
  data->source_positions()->AddDecorator();
1625

1626 1627 1628 1629
  if (FLAG_loop_assignment_analysis) {
    Run<LoopAssignmentAnalysisPhase>();
  }

1630
  Run<GraphBuilderPhase>();
1631
  if (data->compilation_failed()) {
1632
    data->EndPhaseKind();
1633 1634
    return false;
  }
1635
  RunPrintAndVerify("Initial untyped", true);
1636

1637 1638 1639 1640 1641 1642
  // Perform OSR deconstruction.
  if (info()->is_osr()) {
    Run<OsrDeconstructionPhase>();
    RunPrintAndVerify("OSR deconstruction", true);
  }

1643
  // Perform function context specialization and inlining (if enabled).
1644 1645
  Run<InliningPhase>();
  RunPrintAndVerify("Inlined", true);
1646

1647
  // Remove dead->live edges from the graph.
1648 1649 1650
  Run<EarlyGraphTrimmingPhase>();
  RunPrintAndVerify("Early trimmed", true);

1651 1652
  // Run the type-sensitive lowerings and optimizations on the graph.
  {
1653 1654 1655
    // Determine the Typer operation flags.
    Typer::Flags flags = Typer::kNoFlags;
    if (is_sloppy(info()->shared_info()->language_mode()) &&
1656
        info()->shared_info()->IsUserJavaScript()) {
1657 1658 1659 1660 1661 1662 1663 1664
      // Sloppy mode functions always have an Object for this.
      flags |= Typer::kThisIsReceiver;
    }
    if (IsClassConstructor(info()->shared_info()->kind())) {
      // Class constructors cannot be [[Call]]ed.
      flags |= Typer::kNewTargetIsReceiver;
    }

1665 1666 1667
    // Type the graph and keep the Typer running on newly created nodes within
    // this scope; the Typer is automatically unlinked from the Graph once we
    // leave this scope below.
1668
    Typer typer(isolate(), flags, data->graph());
1669 1670
    Run<TyperPhase>(&typer);
    RunPrintAndVerify("Typed");
1671

1672 1673 1674 1675 1676
    // Lower JSOperators where we can determine types.
    Run<TypedLoweringPhase>();
    RunPrintAndVerify("Lowered typed");
  }

1677 1678 1679
  // Do some hacky things to prepare for the optimization phase.
  // (caching handles, etc.).
  Run<ConcurrentOptimizationPrepPhase>();
1680 1681 1682 1683 1684 1685 1686 1687 1688

  data->EndPhaseKind();

  return true;
}

bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
  PipelineData* data = this->data_;

1689 1690
  data->BeginPhaseKind("lowering");

1691 1692 1693 1694 1695 1696 1697 1698
  if (data->info()->is_loop_peeling_enabled()) {
    Run<LoopPeelingPhase>();
    RunPrintAndVerify("Loops peeled", true);
  } else {
    Run<LoopExitEliminationPhase>();
    RunPrintAndVerify("Loop exits eliminated", true);
  }

1699
  if (!data->is_asm()) {
1700 1701 1702 1703 1704
    if (FLAG_turbo_load_elimination) {
      Run<LoadEliminationPhase>();
      RunPrintAndVerify("Load eliminated");
    }

1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715
    if (FLAG_turbo_escape) {
      Run<EscapeAnalysisPhase>();
      if (data->compilation_failed()) {
        info()->AbortOptimization(kCyclicObjectStateDetectedInEscapeAnalysis);
        data->EndPhaseKind();
        return false;
      }
      RunPrintAndVerify("Escape Analysed");
    }
  }

1716 1717 1718 1719 1720
  // Perform simplified lowering. This has to run w/o the Typer decorator,
  // because we cannot compute meaningful types anyways, and the computed types
  // might even conflict with the representation/truncation logic.
  Run<SimplifiedLoweringPhase>();
  RunPrintAndVerify("Simplified lowering", true);
1721

Georg Neis's avatar
Georg Neis committed
1722 1723 1724 1725 1726 1727 1728
  // From now on it is invalid to look at types on the nodes, because the types
  // on the nodes might not make sense after representation selection due to the
  // way we handle truncations; if we'd want to look at types afterwards we'd
  // essentially need to re-type (large portions of) the graph.

  // In order to catch bugs related to type access after this point, we now
  // remove the types from the nodes (currently only in Debug builds).
1729 1730 1731 1732
#ifdef DEBUG
  Run<UntyperPhase>();
  RunPrintAndVerify("Untyped", true);
#endif
1733

1734 1735 1736 1737
  // Run generic lowering pass.
  Run<GenericLoweringPhase>();
  RunPrintAndVerify("Generic lowering", true);

1738
  data->BeginPhaseKind("block building");
1739

1740 1741 1742 1743
  // Run early optimization pass.
  Run<EarlyOptimizationPhase>();
  RunPrintAndVerify("Early optimized", true);

1744
  Run<EffectControlLinearizationPhase>();
1745
  RunPrintAndVerify("Effect and control linearized", true);
1746

1747
  Run<DeadCodeEliminationPhase>();
1748
  RunPrintAndVerify("Dead code elimination", true);
1749

1750 1751 1752 1753 1754
  if (FLAG_turbo_store_elimination) {
    Run<StoreStoreEliminationPhase>();
    RunPrintAndVerify("Store-store elimination", true);
  }

1755 1756 1757
  // Optimize control flow.
  if (FLAG_turbo_cf_optimization) {
    Run<ControlFlowOptimizationPhase>();
1758
    RunPrintAndVerify("Control flow optimized", true);
1759
  }
1760

1761 1762 1763 1764 1765
  // Optimize memory access and allocation operations.
  Run<MemoryOptimizationPhase>();
  // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
  RunPrintAndVerify("Memory optimized", true);

1766
  // Lower changes that have been inserted before.
1767
  Run<LateOptimizationPhase>();
1768
  // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
1769
  RunPrintAndVerify("Late optimized", true);
1770

1771
  data->source_positions()->RemoveDecorator();
1772

1773
  return ScheduleAndSelectInstructions(linkage, true);
1774
}
1775

1776 1777 1778
Handle<Code> Pipeline::GenerateCodeForCodeStub(Isolate* isolate,
                                               CallDescriptor* call_descriptor,
                                               Graph* graph, Schedule* schedule,
1779
                                               Code::Flags flags,
1780
                                               const char* debug_name) {
1781
  CompilationInfo info(CStrVector(debug_name), isolate, graph->zone(), flags);
1782
  if (isolate->serializer_enabled()) info.PrepareForSerializing();
1783 1784

  // Construct a pipeline for scheduling and code generation.
1785
  ZoneStats zone_stats(isolate->allocator());
1786 1787
  SourcePositionTable source_positions(graph);
  PipelineData data(&zone_stats, &info, graph, schedule, &source_positions);
1788
  data.set_verify_graph(FLAG_verify_csa);
1789
  std::unique_ptr<PipelineStatistics> pipeline_statistics;
1790
  if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
1791
    pipeline_statistics.reset(new PipelineStatistics(&info, &zone_stats));
1792
    pipeline_statistics->BeginPhaseKind("stub codegen");
1793
  }
1794

1795
  PipelineImpl pipeline(&data);
1796 1797
  DCHECK_NOT_NULL(data.schedule());

1798
  if (FLAG_trace_turbo) {
1799 1800 1801 1802 1803 1804
    {
      CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
      OFStream os(tracing_scope.file());
      os << "---------------------------------------------------\n"
         << "Begin compiling " << debug_name << " using Turbofan" << std::endl;
    }
1805 1806 1807 1808 1809
    {
      TurboJsonFile json_of(&info, std::ios_base::trunc);
      json_of << "{\"function\":\"" << info.GetDebugName().get()
              << "\", \"source\":\"\",\n\"phases\":[";
    }
1810
    pipeline.Run<PrintGraphPhase>("Machine");
1811 1812
  }

1813
  pipeline.Run<VerifyGraphPhase>(false, true);
1814 1815 1816
  return pipeline.ScheduleAndGenerateCode(call_descriptor);
}

1817 1818
// static
Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info) {
1819
  ZoneStats zone_stats(info->isolate()->allocator());
1820
  std::unique_ptr<PipelineStatistics> pipeline_statistics(
1821 1822
      CreatePipelineStatistics(info, &zone_stats));
  PipelineData data(&zone_stats, info, pipeline_statistics.get());
1823
  PipelineImpl pipeline(&data);
1824 1825 1826 1827 1828

  Linkage linkage(Linkage::ComputeIncoming(data.instruction_zone(), info));

  if (!pipeline.CreateGraph()) return Handle<Code>::null();
  if (!pipeline.OptimizeGraph(&linkage)) return Handle<Code>::null();
1829 1830
  pipeline.AssembleCode(&linkage);
  return pipeline.FinalizeCode();
1831
}
1832

1833
// static
1834 1835 1836 1837 1838 1839 1840 1841
Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
                                              Graph* graph,
                                              Schedule* schedule) {
  CallDescriptor* call_descriptor =
      Linkage::ComputeIncoming(info->zone(), info);
  return GenerateCodeForTesting(info, call_descriptor, graph, schedule);
}

1842
// static
1843 1844 1845
Handle<Code> Pipeline::GenerateCodeForTesting(
    CompilationInfo* info, CallDescriptor* call_descriptor, Graph* graph,
    Schedule* schedule, SourcePositionTable* source_positions) {
1846
  // Construct a pipeline for scheduling and code generation.
1847
  ZoneStats zone_stats(info->isolate()->allocator());
1848 1849 1850 1851 1852
  // TODO(wasm): Refactor code generation to check for non-existing source
  // table, then remove this conditional allocation.
  if (!source_positions)
    source_positions = new (info->zone()) SourcePositionTable(graph);
  PipelineData data(&zone_stats, info, graph, schedule, source_positions);
1853
  std::unique_ptr<PipelineStatistics> pipeline_statistics;
1854
  if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
1855
    pipeline_statistics.reset(new PipelineStatistics(info, &zone_stats));
1856 1857 1858
    pipeline_statistics->BeginPhaseKind("test codegen");
  }

1859
  PipelineImpl pipeline(&data);
1860 1861

  if (FLAG_trace_turbo) {
1862 1863 1864
    TurboJsonFile json_of(info, std::ios_base::trunc);
    json_of << "{\"function\":\"" << info->GetDebugName().get()
            << "\", \"source\":\"\",\n\"phases\":[";
1865
  }
1866 1867
  // TODO(rossberg): Should this really be untyped?
  pipeline.RunPrintAndVerify("Machine", true);
1868

1869
  return pipeline.ScheduleAndGenerateCode(call_descriptor);
1870 1871
}

1872
// static
1873 1874 1875 1876 1877 1878 1879 1880 1881 1882
CompilationJob* Pipeline::NewCompilationJob(Handle<JSFunction> function,
                                            bool has_script) {
  Handle<SharedFunctionInfo> shared = handle(function->shared());
  ParseInfo* parse_info;
  if (!has_script) {
    parse_info = ParseInfo::AllocateWithoutScript(shared);
  } else {
    parse_info = new ParseInfo(shared);
  }
  return new PipelineCompilationJob(parse_info, function);
1883
}
1884

1885
// static
1886
CompilationJob* Pipeline::NewWasmCompilationJob(
1887
    CompilationInfo* info, JSGraph* jsgraph, CallDescriptor* descriptor,
1888
    SourcePositionTable* source_positions,
1889
    ZoneVector<trap_handler::ProtectedInstructionData>* protected_instructions,
1890 1891 1892 1893
    wasm::ModuleOrigin wasm_origin) {
  return new PipelineWasmCompilationJob(info, jsgraph, descriptor,
                                        source_positions,
                                        protected_instructions, wasm_origin);
1894
}
1895

1896 1897 1898
bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
                                           InstructionSequence* sequence,
                                           bool run_verifier) {
1899
  CompilationInfo info(ArrayVector("testing"), sequence->isolate(),
1900
                       sequence->zone(), Code::ComputeFlags(Code::STUB));
1901 1902
  ZoneStats zone_stats(sequence->isolate()->allocator());
  PipelineData data(&zone_stats, &info, sequence);
1903
  PipelineImpl pipeline(&data);
1904
  pipeline.data_->InitializeFrameData(nullptr);
1905
  pipeline.AllocateRegisters(config, nullptr, run_verifier);
1906 1907 1908
  return !data.compilation_failed();
}

1909 1910
bool PipelineImpl::ScheduleAndSelectInstructions(Linkage* linkage,
                                                 bool trim_graph) {
1911
  CallDescriptor* call_descriptor = linkage->GetIncomingDescriptor();
1912
  PipelineData* data = this->data_;
1913

1914
  DCHECK_NOT_NULL(data->graph());
1915

1916 1917 1918 1919
  if (trim_graph) {
    Run<LateGraphTrimmingPhase>();
    RunPrintAndVerify("Late trimmed", true);
  }
1920
  if (data->schedule() == nullptr) Run<ComputeSchedulePhase>();
1921
  TraceSchedule(data->info(), data->schedule());
1922

1923
  if (FLAG_turbo_profiling) {
1924 1925
    data->set_profiler_data(BasicBlockInstrumentor::Instrument(
        info(), data->graph(), data->schedule()));
1926 1927
  }

1928
  bool verify_stub_graph = data->verify_graph();
1929 1930 1931 1932
  if (verify_stub_graph ||
      (FLAG_turbo_verify_machine_graph != nullptr &&
       (!strcmp(FLAG_turbo_verify_machine_graph, "*") ||
        !strcmp(FLAG_turbo_verify_machine_graph, data->debug_name())))) {
1933
    if (FLAG_trace_verify_csa) {
1934 1935 1936 1937 1938
      AllowHandleDereference allow_deref;
      CompilationInfo* info = data->info();
      CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
      OFStream os(tracing_scope.file());
      os << "--------------------------------------------------\n"
1939
         << "--- Verifying " << data->debug_name() << " generated by TurboFan\n"
1940 1941 1942
         << "--------------------------------------------------\n"
         << *data->schedule()
         << "--------------------------------------------------\n"
1943
         << "--- End of " << data->debug_name() << " generated by TurboFan\n"
1944 1945
         << "--------------------------------------------------\n";
    }
1946
    Zone temp_zone(data->isolate()->allocator(), ZONE_NAME);
1947
    MachineGraphVerifier::Run(data->graph(), data->schedule(), linkage,
1948 1949
                              data->info()->IsStub(), data->debug_name(),
                              &temp_zone);
1950 1951
  }

1952
  data->InitializeInstructionSequence(call_descriptor);
1953

1954
  data->InitializeFrameData(call_descriptor);
1955
  // Select and schedule instructions covering the scheduled graph.
1956
  Run<InstructionSelectionPhase>(linkage);
1957 1958 1959 1960 1961
  if (data->compilation_failed()) {
    info()->AbortOptimization(kCodeGenerationFailed);
    data->EndPhaseKind();
    return false;
  }
1962

1963
  if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
1964
    AllowHandleDereference allow_deref;
1965
    TurboCfgFile tcf(isolate());
1966
    tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
1967
                 data->sequence());
1968 1969
  }

1970
  if (FLAG_trace_turbo) {
1971
    std::ostringstream source_position_output;
1972
    // Output source position information before the graph is deleted.
1973 1974
    data_->source_positions()->Print(source_position_output);
    data_->set_source_position_output(source_position_output.str());
1975 1976
  }

1977 1978
  data->DeleteGraphZone();

1979
  data->BeginPhaseKind("register allocation");
1980

1981
  bool run_verifier = FLAG_turbo_verify_allocation;
1982

1983
  // Allocate registers.
1984 1985
  AllocateRegisters(RegisterConfiguration::Turbofan(), call_descriptor,
                    run_verifier);
1986
  Run<FrameElisionPhase>();
1987 1988
  if (data->compilation_failed()) {
    info()->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
1989
    data->EndPhaseKind();
1990
    return false;
1991 1992
  }

1993 1994
  // TODO(mtrofin): move this off to the register allocator.
  bool generate_frame_at_start =
1995
      data_->sequence()->instruction_blocks().front()->must_construct_frame();
1996 1997
  // Optimimize jumps.
  if (FLAG_turbo_jt) {
1998
    Run<JumpThreadingPhase>(generate_frame_at_start);
1999
  }
2000

2001
  data->EndPhaseKind();
2002

2003 2004
  return true;
}
2005

2006
void PipelineImpl::AssembleCode(Linkage* linkage) {
2007
  PipelineData* data = this->data_;
2008
  data->BeginPhaseKind("code generation");
2009 2010 2011
  data->InitializeCodeGenerator(linkage);
  Run<AssembleCodePhase>();
}
2012

2013 2014 2015
Handle<Code> PipelineImpl::FinalizeCode() {
  PipelineData* data = this->data_;
  Run<FinalizeCodePhase>();
2016

2017
  Handle<Code> code = data->code();
2018
  if (data->profiler_data()) {
2019
#if ENABLE_DISASSEMBLER
2020
    std::ostringstream os;
2021
    code->Disassemble(nullptr, os);
2022
    data->profiler_data()->SetCode(&os);
2023 2024
#endif
  }
2025 2026 2027 2028 2029

  info()->SetCode(code);
  v8::internal::CodeGenerator::PrintCode(code, info());

  if (FLAG_trace_turbo) {
2030 2031
    TurboJsonFile json_of(info(), std::ios_base::app);
    json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2032
#if ENABLE_DISASSEMBLER
2033 2034 2035 2036 2037
    std::stringstream disassembly_stream;
    code->Disassemble(nullptr, disassembly_stream);
    std::string disassembly_string(disassembly_stream.str());
    for (const auto& c : disassembly_string) {
      json_of << AsEscapedUC16ForJSON(c);
2038
    }
2039 2040 2041 2042 2043 2044
#endif  // ENABLE_DISASSEMBLER
    json_of << "\"}\n],\n";
    json_of << "\"nodePositions\":";
    json_of << data->source_position_output();
    json_of << "}";

2045 2046
    CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
    OFStream os(tracing_scope.file());
2047
    os << "---------------------------------------------------\n"
2048
       << "Finished compiling method " << info()->GetDebugName().get()
2049 2050
       << " using Turbofan" << std::endl;
  }
2051

2052
  return code;
2053 2054
}

2055
Handle<Code> PipelineImpl::ScheduleAndGenerateCode(
2056 2057 2058 2059
    CallDescriptor* call_descriptor) {
  Linkage linkage(call_descriptor);

  // Schedule the graph, perform instruction selection and register allocation.
2060
  if (!ScheduleAndSelectInstructions(&linkage, false)) return Handle<Code>();
2061 2062

  // Generate the final machine code.
2063 2064
  AssembleCode(&linkage);
  return FinalizeCode();
2065 2066
}

2067 2068 2069
void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
                                     CallDescriptor* descriptor,
                                     bool run_verifier) {
2070 2071
  PipelineData* data = this->data_;
  // Don't track usage for this zone in compiler stats.
2072
  std::unique_ptr<Zone> verifier_zone;
2073 2074
  RegisterAllocatorVerifier* verifier = nullptr;
  if (run_verifier) {
2075
    verifier_zone.reset(new Zone(isolate()->allocator(), ZONE_NAME));
2076 2077 2078 2079 2080
    verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
        verifier_zone.get(), config, data->sequence());
  }

#ifdef DEBUG
2081
  data_->sequence()->ValidateEdgeSplitForm();
2082
  data_->sequence()->ValidateDeferredBlockEntryPaths();
2083
  data_->sequence()->ValidateDeferredBlockExitPaths();
2084 2085
#endif

2086
  data->InitializeRegisterAllocationData(config, descriptor);
2087
  if (info()->is_osr()) data->osr_helper()->SetupFrame(data->frame());
2088 2089

  Run<MeetRegisterConstraintsPhase>();
2090
  Run<ResolvePhisPhase>();
2091
  Run<BuildLiveRangesPhase>();
2092
  if (FLAG_trace_turbo_graph) {
2093
    AllowHandleDereference allow_deref;
2094 2095
    CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
    OFStream os(tracing_scope.file());
2096
    os << "----- Instruction sequence before register allocation -----\n"
2097
       << PrintableInstructionSequence({config, data->sequence()});
2098
  }
2099
  if (verifier != nullptr) {
2100
    CHECK(!data->register_allocation_data()->ExistsUseWithoutDefinition());
2101 2102
    CHECK(data->register_allocation_data()
              ->RangesDefinedInDeferredStayInDeferred());
2103
  }
2104

2105 2106 2107
  if (FLAG_turbo_preprocess_ranges) {
    Run<SplinterLiveRangesPhase>();
  }
2108

2109 2110
  Run<AllocateGeneralRegistersPhase<LinearScanAllocator>>();
  Run<AllocateFPRegistersPhase<LinearScanAllocator>>();
2111

2112 2113 2114
  if (FLAG_turbo_preprocess_ranges) {
    Run<MergeSplintersPhase>();
  }
2115

2116 2117
  Run<AssignSpillSlotsPhase>();

2118
  Run<CommitAssignmentPhase>();
2119 2120 2121 2122 2123

  // TODO(chromium:725559): remove this check once
  // we understand the cause of the bug. We keep just the
  // check at the end of the allocation.
  if (verifier != nullptr) {
2124
    verifier->VerifyAssignment("Immediately after CommitAssignmentPhase.");
2125 2126
  }

2127
  Run<PopulateReferenceMapsPhase>();
2128 2129
  Run<ConnectRangesPhase>();
  Run<ResolveControlFlowPhase>();
2130 2131 2132
  if (FLAG_turbo_move_optimization) {
    Run<OptimizeMovesPhase>();
  }
2133

2134 2135
  Run<LocateSpillSlotsPhase>();

2136
  if (FLAG_trace_turbo_graph) {
2137
    AllowHandleDereference allow_deref;
2138 2139
    CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
    OFStream os(tracing_scope.file());
2140
    os << "----- Instruction sequence after register allocation -----\n"
2141
       << PrintableInstructionSequence({config, data->sequence()});
2142 2143 2144
  }

  if (verifier != nullptr) {
2145
    verifier->VerifyAssignment("End of regalloc pipeline.");
2146 2147 2148 2149 2150
    verifier->VerifyGapMoves();
  }

  if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
    TurboCfgFile tcf(data->isolate());
2151 2152
    tcf << AsC1VRegisterAllocationData("CodeGen",
                                       data->register_allocation_data());
2153
  }
2154 2155

  data->DeleteRegisterAllocationZone();
2156 2157
}

2158
CompilationInfo* PipelineImpl::info() const { return data_->info(); }
2159

2160
Isolate* PipelineImpl::isolate() const { return info()->isolate(); }
2161

2162 2163 2164
}  // namespace compiler
}  // namespace internal
}  // namespace v8