bytecode-generator.cc 198 KB
Newer Older
1 2 3 4 5 6
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/interpreter/bytecode-generator.h"

7
#include "src/api-inl.h"
8
#include "src/ast/ast-source-ranges.h"
9
#include "src/ast/scopes.h"
10
#include "src/builtins/builtins-constructor.h"
11
#include "src/code-stubs.h"
12
#include "src/compiler.h"
13
#include "src/interpreter/bytecode-flags.h"
14
#include "src/interpreter/bytecode-jump-table.h"
15
#include "src/interpreter/bytecode-label.h"
16
#include "src/interpreter/bytecode-register-allocator.h"
17
#include "src/interpreter/control-flow-builders.h"
18
#include "src/objects-inl.h"
19
#include "src/objects/debug-objects.h"
20
#include "src/objects/literal-objects-inl.h"
21
#include "src/objects/smi.h"
22
#include "src/parsing/parse-info.h"
23
#include "src/parsing/token.h"
24
#include "src/unoptimized-compilation-info.h"
25 26 27 28 29

namespace v8 {
namespace internal {
namespace interpreter {

30 31 32
// Scoped class tracking context objects created by the visitor. Represents
// mutations of the context chain within the function body, allowing pushing and
// popping of the current {context_register} during visitation.
33
class BytecodeGenerator::ContextScope {
34
 public:
35
  ContextScope(BytecodeGenerator* generator, Scope* scope)
36
      : generator_(generator),
37 38
        scope_(scope),
        outer_(generator_->execution_context()),
39
        register_(Register::current_context()),
40
        depth_(0) {
41
    DCHECK(scope->NeedsContext() || outer_ == nullptr);
42 43
    if (outer_) {
      depth_ = outer_->depth_ + 1;
44 45

      // Push the outer context into a new context register.
46 47
      Register outer_context_reg =
          generator_->register_allocator()->NewRegister();
48 49
      outer_->set_register(outer_context_reg);
      generator_->builder()->PushContext(outer_context_reg);
50 51
    }
    generator_->set_execution_context(this);
52 53 54
  }

  ~ContextScope() {
55
    if (outer_) {
56
      DCHECK_EQ(register_.index(), Register::current_context().index());
57
      generator_->builder()->PopContext(outer_->reg());
58
      outer_->set_register(register_);
59
    }
60
    generator_->set_execution_context(outer_);
61 62
  }

63 64 65 66 67 68 69 70
  // Returns the depth of the given |scope| for the current execution context.
  int ContextChainDepth(Scope* scope) {
    return scope_->ContextChainLength(scope);
  }

  // Returns the execution context at |depth| in the current context chain if it
  // is a function local execution context, otherwise returns nullptr.
  ContextScope* Previous(int depth) {
71 72 73 74 75 76 77
    if (depth > depth_) {
      return nullptr;
    }

    ContextScope* previous = this;
    for (int i = depth; i > 0; --i) {
      previous = previous->outer_;
78
    }
79
    return previous;
80 81
  }

82 83 84
  Register reg() const { return register_; }

 private:
85 86 87 88
  const BytecodeArrayBuilder* builder() const { return generator_->builder(); }

  void set_register(Register reg) { register_ = reg; }

89
  BytecodeGenerator* generator_;
90 91 92 93
  Scope* scope_;
  ContextScope* outer_;
  Register register_;
  int depth_;
94 95
};

96 97
// Scoped class for tracking control statements entered by the
// visitor. The pattern derives AstGraphBuilder::ControlScope.
98
class BytecodeGenerator::ControlScope {
99 100
 public:
  explicit ControlScope(BytecodeGenerator* generator)
101 102
      : generator_(generator), outer_(generator->execution_control()),
        context_(generator->execution_context()) {
103
    generator_->set_execution_control(this);
104
  }
105
  virtual ~ControlScope() { generator_->set_execution_control(outer()); }
106

107 108 109 110 111 112 113 114 115 116 117 118
  void Break(Statement* stmt) {
    PerformCommand(CMD_BREAK, stmt, kNoSourcePosition);
  }
  void Continue(Statement* stmt) {
    PerformCommand(CMD_CONTINUE, stmt, kNoSourcePosition);
  }
  void ReturnAccumulator(int source_position = kNoSourcePosition) {
    PerformCommand(CMD_RETURN, nullptr, source_position);
  }
  void AsyncReturnAccumulator(int source_position = kNoSourcePosition) {
    PerformCommand(CMD_ASYNC_RETURN, nullptr, source_position);
  }
119 120

  class DeferredCommands;
121 122

 protected:
123 124 125 126 127 128 129
  enum Command {
    CMD_BREAK,
    CMD_CONTINUE,
    CMD_RETURN,
    CMD_ASYNC_RETURN,
    CMD_RETHROW
  };
130 131 132 133
  static constexpr bool CommandUsesAccumulator(Command command) {
    return command != CMD_BREAK && command != CMD_CONTINUE;
  }

134 135 136 137
  void PerformCommand(Command command, Statement* statement,
                      int source_position);
  virtual bool Execute(Command command, Statement* statement,
                       int source_position) = 0;
138

139 140 141 142 143
  // Helper to pop the context chain to a depth expected by this control scope.
  // Note that it is the responsibility of each individual {Execute} method to
  // trigger this when commands are handled and control-flow continues locally.
  void PopContextToExpectedDepth();

144 145
  BytecodeGenerator* generator() const { return generator_; }
  ControlScope* outer() const { return outer_; }
146
  ContextScope* context() const { return context_; }
147 148 149 150

 private:
  BytecodeGenerator* generator_;
  ControlScope* outer_;
151
  ContextScope* context_;
152 153 154 155

  DISALLOW_COPY_AND_ASSIGN(ControlScope);
};

156 157 158 159 160 161 162 163 164 165 166
// Helper class for a try-finally control scope. It can record intercepted
// control-flow commands that cause entry into a finally-block, and re-apply
// them after again leaving that block. Special tokens are used to identify
// paths going through the finally-block to dispatch after leaving the block.
class BytecodeGenerator::ControlScope::DeferredCommands final {
 public:
  DeferredCommands(BytecodeGenerator* generator, Register token_register,
                   Register result_register)
      : generator_(generator),
        deferred_(generator->zone()),
        token_register_(token_register),
167 168 169 170
        result_register_(result_register),
        return_token_(-1),
        async_return_token_(-1),
        rethrow_token_(-1) {}
171 172 173 174 175 176 177 178 179 180 181 182

  // One recorded control-flow command.
  struct Entry {
    Command command;       // The command type being applied on this path.
    Statement* statement;  // The target statement for the command or {nullptr}.
    int token;             // A token identifying this particular path.
  };

  // Records a control-flow command while entering the finally-block. This also
  // generates a new dispatch token that identifies one particular path. This
  // expects the result to be in the accumulator.
  void RecordCommand(Command command, Statement* statement) {
183 184 185 186 187 188
    int token = GetTokenForCommand(command, statement);

    DCHECK_LT(token, deferred_.size());
    DCHECK_EQ(deferred_[token].command, command);
    DCHECK_EQ(deferred_[token].statement, statement);
    DCHECK_EQ(deferred_[token].token, token);
189

190 191 192
    if (CommandUsesAccumulator(command)) {
      builder()->StoreAccumulatorInRegister(result_register_);
    }
193 194
    builder()->LoadLiteral(Smi::FromInt(token));
    builder()->StoreAccumulatorInRegister(token_register_);
195 196 197 198 199 200 201
    if (!CommandUsesAccumulator(command)) {
      // If we're not saving the accumulator in the result register, shove a
      // harmless value there instead so that it is still considered "killed" in
      // the liveness analysis. Normally we would LdaUndefined first, but the
      // Smi token value is just as good, and by reusing it we save a bytecode.
      builder()->StoreAccumulatorInRegister(result_register_);
    }
202 203 204 205 206 207 208 209 210 211 212 213 214 215 216
  }

  // Records the dispatch token to be used to identify the re-throw path when
  // the finally-block has been entered through the exception handler. This
  // expects the exception to be in the accumulator.
  void RecordHandlerReThrowPath() {
    // The accumulator contains the exception object.
    RecordCommand(CMD_RETHROW, nullptr);
  }

  // Records the dispatch token to be used to identify the implicit fall-through
  // path at the end of a try-block into the corresponding finally-block.
  void RecordFallThroughPath() {
    builder()->LoadLiteral(Smi::FromInt(-1));
    builder()->StoreAccumulatorInRegister(token_register_);
217 218 219 220 221
    // Since we're not saving the accumulator in the result register, shove a
    // harmless value there instead so that it is still considered "killed" in
    // the liveness analysis. Normally we would LdaUndefined first, but the Smi
    // token value is just as good, and by reusing it we save a bytecode.
    builder()->StoreAccumulatorInRegister(result_register_);
222 223 224 225 226
  }

  // Applies all recorded control-flow commands after the finally-block again.
  // This generates a dynamic dispatch on the token from the entry point.
  void ApplyDeferredCommands() {
227 228
    if (deferred_.size() == 0) return;

229
    BytecodeLabel fall_through;
230 231 232 233 234 235

    if (deferred_.size() == 1) {
      // For a single entry, just jump to the fallthrough if we don't match the
      // entry token.
      const Entry& entry = deferred_[0];

236
      builder()
237
          ->LoadLiteral(Smi::FromInt(entry.token))
238
          .CompareReference(token_register_)
239 240
          .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &fall_through);

241 242 243
      if (CommandUsesAccumulator(entry.command)) {
        builder()->LoadAccumulatorWithRegister(result_register_);
      }
244 245
      execution_control()->PerformCommand(entry.command, entry.statement,
                                          kNoSourcePosition);
246 247 248 249 250 251 252 253 254 255 256
    } else {
      // For multiple entries, build a jump table and switch on the token,
      // jumping to the fallthrough if none of them match.

      BytecodeJumpTable* jump_table =
          builder()->AllocateJumpTable(static_cast<int>(deferred_.size()), 0);
      builder()
          ->LoadAccumulatorWithRegister(token_register_)
          .SwitchOnSmiNoFeedback(jump_table)
          .Jump(&fall_through);
      for (const Entry& entry : deferred_) {
257 258 259 260 261
        builder()->Bind(jump_table, entry.token);

        if (CommandUsesAccumulator(entry.command)) {
          builder()->LoadAccumulatorWithRegister(result_register_);
        }
262 263
        execution_control()->PerformCommand(entry.command, entry.statement,
                                            kNoSourcePosition);
264
      }
265
    }
266

267
    builder()->Bind(&fall_through);
268 269 270 271 272 273
  }

  BytecodeArrayBuilder* builder() { return generator_->builder(); }
  ControlScope* execution_control() { return generator_->execution_control(); }

 private:
274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315
  int GetTokenForCommand(Command command, Statement* statement) {
    switch (command) {
      case CMD_RETURN:
        return GetReturnToken();
      case CMD_ASYNC_RETURN:
        return GetAsyncReturnToken();
      case CMD_RETHROW:
        return GetRethrowToken();
      default:
        // TODO(leszeks): We could also search for entries with the same
        // command and statement.
        return GetNewTokenForCommand(command, statement);
    }
  }

  int GetReturnToken() {
    if (return_token_ == -1) {
      return_token_ = GetNewTokenForCommand(CMD_RETURN, nullptr);
    }
    return return_token_;
  }

  int GetAsyncReturnToken() {
    if (async_return_token_ == -1) {
      async_return_token_ = GetNewTokenForCommand(CMD_ASYNC_RETURN, nullptr);
    }
    return async_return_token_;
  }

  int GetRethrowToken() {
    if (rethrow_token_ == -1) {
      rethrow_token_ = GetNewTokenForCommand(CMD_RETHROW, nullptr);
    }
    return rethrow_token_;
  }

  int GetNewTokenForCommand(Command command, Statement* statement) {
    int token = static_cast<int>(deferred_.size());
    deferred_.push_back({command, statement, token});
    return token;
  }

316 317 318 319
  BytecodeGenerator* generator_;
  ZoneVector<Entry> deferred_;
  Register token_register_;
  Register result_register_;
320 321 322 323 324

  // Tokens for commands that don't need a statement.
  int return_token_;
  int async_return_token_;
  int rethrow_token_;
325 326 327 328 329 330 331 332 333 334
};

// Scoped class for dealing with control flow reaching the function level.
class BytecodeGenerator::ControlScopeForTopLevel final
    : public BytecodeGenerator::ControlScope {
 public:
  explicit ControlScopeForTopLevel(BytecodeGenerator* generator)
      : ControlScope(generator) {}

 protected:
335 336
  bool Execute(Command command, Statement* statement,
               int source_position) override {
337
    switch (command) {
338
      case CMD_BREAK:  // We should never see break/continue in top-level.
339
      case CMD_CONTINUE:
340
        UNREACHABLE();
341
      case CMD_RETURN:
342
        // No need to pop contexts, execution leaves the method body.
343
        generator()->BuildReturn(source_position);
344
        return true;
345
      case CMD_ASYNC_RETURN:
346
        // No need to pop contexts, execution leaves the method body.
347
        generator()->BuildAsyncReturn(source_position);
348
        return true;
349
      case CMD_RETHROW:
350
        // No need to pop contexts, execution leaves the method body.
351
        generator()->BuildReThrow();
352 353 354 355 356 357
        return true;
    }
    return false;
  }
};

358 359
// Scoped class for enabling break inside blocks and switch blocks.
class BytecodeGenerator::ControlScopeForBreakable final
360 361
    : public BytecodeGenerator::ControlScope {
 public:
362 363 364
  ControlScopeForBreakable(BytecodeGenerator* generator,
                           BreakableStatement* statement,
                           BreakableControlFlowBuilder* control_builder)
365 366
      : ControlScope(generator),
        statement_(statement),
367
        control_builder_(control_builder) {}
368 369

 protected:
370 371
  bool Execute(Command command, Statement* statement,
               int source_position) override {
372
    control_builder_->set_needs_continuation_counter();
373 374 375
    if (statement != statement_) return false;
    switch (command) {
      case CMD_BREAK:
376
        PopContextToExpectedDepth();
377
        control_builder_->Break();
378 379
        return true;
      case CMD_CONTINUE:
380
      case CMD_RETURN:
381
      case CMD_ASYNC_RETURN:
382
      case CMD_RETHROW:
383
        break;
384 385 386 387 388 389
    }
    return false;
  }

 private:
  Statement* statement_;
390
  BreakableControlFlowBuilder* control_builder_;
391 392
};

393 394 395
// Scoped class for enabling 'break' and 'continue' in iteration
// constructs, e.g. do...while, while..., for...
class BytecodeGenerator::ControlScopeForIteration final
396 397
    : public BytecodeGenerator::ControlScope {
 public:
398 399 400
  ControlScopeForIteration(BytecodeGenerator* generator,
                           IterationStatement* statement,
                           LoopBuilder* loop_builder)
401 402
      : ControlScope(generator),
        statement_(statement),
403 404 405
        loop_builder_(loop_builder) {
    generator->loop_depth_++;
  }
406
  ~ControlScopeForIteration() override { generator()->loop_depth_--; }
407 408

 protected:
409 410
  bool Execute(Command command, Statement* statement,
               int source_position) override {
411 412 413
    if (statement != statement_) return false;
    switch (command) {
      case CMD_BREAK:
414
        PopContextToExpectedDepth();
415
        loop_builder_->Break();
416 417
        return true;
      case CMD_CONTINUE:
418
        PopContextToExpectedDepth();
419 420
        loop_builder_->Continue();
        return true;
421
      case CMD_RETURN:
422
      case CMD_ASYNC_RETURN:
423 424
      case CMD_RETHROW:
        break;
425 426 427 428 429 430
    }
    return false;
  }

 private:
  Statement* statement_;
431
  LoopBuilder* loop_builder_;
432 433
};

434 435 436 437 438 439
// Scoped class for enabling 'throw' in try-catch constructs.
class BytecodeGenerator::ControlScopeForTryCatch final
    : public BytecodeGenerator::ControlScope {
 public:
  ControlScopeForTryCatch(BytecodeGenerator* generator,
                          TryCatchBuilder* try_catch_builder)
440
      : ControlScope(generator) {}
441 442

 protected:
443 444
  bool Execute(Command command, Statement* statement,
               int source_position) override {
445 446 447 448
    switch (command) {
      case CMD_BREAK:
      case CMD_CONTINUE:
      case CMD_RETURN:
449
      case CMD_ASYNC_RETURN:
450 451
        break;
      case CMD_RETHROW:
452 453
        // No need to pop contexts, execution re-enters the method body via the
        // stack unwinding mechanism which itself restores contexts correctly.
454
        generator()->BuildReThrow();
455 456 457 458 459 460 461 462 463 464 465 466 467 468 469
        return true;
    }
    return false;
  }
};

// Scoped class for enabling control flow through try-finally constructs.
class BytecodeGenerator::ControlScopeForTryFinally final
    : public BytecodeGenerator::ControlScope {
 public:
  ControlScopeForTryFinally(BytecodeGenerator* generator,
                            TryFinallyBuilder* try_finally_builder,
                            DeferredCommands* commands)
      : ControlScope(generator),
        try_finally_builder_(try_finally_builder),
470
        commands_(commands) {}
471 472

 protected:
473 474
  bool Execute(Command command, Statement* statement,
               int source_position) override {
475 476 477 478
    switch (command) {
      case CMD_BREAK:
      case CMD_CONTINUE:
      case CMD_RETURN:
479
      case CMD_ASYNC_RETURN:
480
      case CMD_RETHROW:
481
        PopContextToExpectedDepth();
482 483 484 485 486
        // We don't record source_position here since we don't generate return
        // bytecode right here and will generate it later as part of finally
        // block. Each return bytecode generated in finally block will get own
        // return source position from corresponded return statement or we'll
        // use end of function if no return statement is presented.
487 488 489 490 491 492 493 494 495 496 497 498
        commands_->RecordCommand(command, statement);
        try_finally_builder_->LeaveTry();
        return true;
    }
    return false;
  }

 private:
  TryFinallyBuilder* try_finally_builder_;
  DeferredCommands* commands_;
};

499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523
// Allocate and fetch the coverage indices tracking NaryLogical Expressions.
class BytecodeGenerator::NaryCodeCoverageSlots {
 public:
  NaryCodeCoverageSlots(BytecodeGenerator* generator, NaryOperation* expr)
      : generator_(generator) {
    if (generator_->block_coverage_builder_ == nullptr) return;
    for (size_t i = 0; i < expr->subsequent_length(); i++) {
      coverage_slots_.push_back(
          generator_->AllocateNaryBlockCoverageSlotIfEnabled(expr, i));
    }
  }

  int GetSlotFor(size_t subsequent_expr_index) const {
    if (generator_->block_coverage_builder_ == nullptr) {
      return BlockCoverageBuilder::kNoCoverageArraySlot;
    }
    DCHECK(coverage_slots_.size() > subsequent_expr_index);
    return coverage_slots_[subsequent_expr_index];
  }

 private:
  BytecodeGenerator* generator_;
  std::vector<int> coverage_slots_;
};

524
void BytecodeGenerator::ControlScope::PerformCommand(Command command,
525 526
                                                     Statement* statement,
                                                     int source_position) {
527 528
  ControlScope* current = this;
  do {
529
    if (current->Execute(command, statement, source_position)) {
530 531 532
      return;
    }
    current = current->outer();
533 534 535 536
  } while (current != nullptr);
  UNREACHABLE();
}

537 538 539 540 541 542 543 544
void BytecodeGenerator::ControlScope::PopContextToExpectedDepth() {
  // Pop context to the expected depth. Note that this can in fact pop multiple
  // contexts at once because the {PopContext} bytecode takes a saved register.
  if (generator()->execution_context() != context()) {
    generator()->builder()->PopContext(context()->reg());
  }
}

545
class BytecodeGenerator::RegisterAllocationScope final {
546
 public:
547
  explicit RegisterAllocationScope(BytecodeGenerator* generator)
548
      : generator_(generator),
549 550
        outer_next_register_index_(
            generator->register_allocator()->next_register_index()) {}
551

552
  ~RegisterAllocationScope() {
553 554
    generator_->register_allocator()->ReleaseRegisters(
        outer_next_register_index_);
555 556
  }

557 558
 private:
  BytecodeGenerator* generator_;
559
  int outer_next_register_index_;
560 561 562 563

  DISALLOW_COPY_AND_ASSIGN(RegisterAllocationScope);
};

564 565
// Scoped base class for determining how the result of an expression will be
// used.
566 567
class BytecodeGenerator::ExpressionResultScope {
 public:
568
  ExpressionResultScope(BytecodeGenerator* generator, Expression::Context kind)
569 570
      : generator_(generator),
        outer_(generator->execution_result()),
571 572 573
        allocator_(generator),
        kind_(kind),
        type_hint_(TypeHint::kAny) {
574 575 576 577 578 579 580
    generator_->set_execution_result(this);
  }

  virtual ~ExpressionResultScope() {
    generator_->set_execution_result(outer_);
  }

581 582 583
  bool IsEffect() const { return kind_ == Expression::kEffect; }
  bool IsValue() const { return kind_ == Expression::kValue; }
  bool IsTest() const { return kind_ == Expression::kTest; }
584 585 586 587 588

  TestResultScope* AsTest() {
    DCHECK(IsTest());
    return reinterpret_cast<TestResultScope*>(this);
  }
589

590 591
  // Specify expression always returns a Boolean result value.
  void SetResultIsBoolean() {
592
    DCHECK_EQ(type_hint_, TypeHint::kAny);
593
    type_hint_ = TypeHint::kBoolean;
594 595
  }

596 597 598 599 600
  void SetResultIsString() {
    DCHECK_EQ(type_hint_, TypeHint::kAny);
    type_hint_ = TypeHint::kString;
  }

601 602
  TypeHint type_hint() const { return type_hint_; }

603 604 605
 private:
  BytecodeGenerator* generator_;
  ExpressionResultScope* outer_;
606
  RegisterAllocationScope allocator_;
607
  Expression::Context kind_;
608
  TypeHint type_hint_;
609 610 611 612 613 614 615 616 617 618

  DISALLOW_COPY_AND_ASSIGN(ExpressionResultScope);
};

// Scoped class used when the result of the current expression is not
// expected to produce a result.
class BytecodeGenerator::EffectResultScope final
    : public ExpressionResultScope {
 public:
  explicit EffectResultScope(BytecodeGenerator* generator)
619
      : ExpressionResultScope(generator, Expression::kEffect) {}
620 621 622
};

// Scoped class used when the result of the current expression to be
623 624
// evaluated should go into the interpreter's accumulator.
class BytecodeGenerator::ValueResultScope final : public ExpressionResultScope {
625
 public:
626
  explicit ValueResultScope(BytecodeGenerator* generator)
627
      : ExpressionResultScope(generator, Expression::kValue) {}
628 629
};

630 631 632 633 634 635
// Scoped class used when the result of the current expression to be
// evaluated is only tested with jumps to two branches.
class BytecodeGenerator::TestResultScope final : public ExpressionResultScope {
 public:
  TestResultScope(BytecodeGenerator* generator, BytecodeLabels* then_labels,
                  BytecodeLabels* else_labels, TestFallthrough fallthrough)
636
      : ExpressionResultScope(generator, Expression::kTest),
637
        result_consumed_by_test_(false),
638
        fallthrough_(fallthrough),
639 640
        then_labels_(then_labels),
        else_labels_(else_labels) {}
641 642 643 644 645 646

  // Used when code special cases for TestResultScope and consumes any
  // possible value by testing and jumping to a then/else label.
  void SetResultConsumedByTest() {
    result_consumed_by_test_ = true;
  }
647
  bool result_consumed_by_test() { return result_consumed_by_test_; }
648

649 650 651 652 653 654 655
  // Inverts the control flow of the operation, swapping the then and else
  // labels and the fallthrough.
  void InvertControlFlow() {
    std::swap(then_labels_, else_labels_);
    fallthrough_ = inverted_fallthrough();
  }

656 657 658 659 660 661
  BytecodeLabel* NewThenLabel() { return then_labels_->New(); }
  BytecodeLabel* NewElseLabel() { return else_labels_->New(); }

  BytecodeLabels* then_labels() const { return then_labels_; }
  BytecodeLabels* else_labels() const { return else_labels_; }

662 663 664 665 666 667 668
  void set_then_labels(BytecodeLabels* then_labels) {
    then_labels_ = then_labels;
  }
  void set_else_labels(BytecodeLabels* else_labels) {
    else_labels_ = else_labels;
  }

669 670 671 672 673 674 675 676 677 678 679
  TestFallthrough fallthrough() const { return fallthrough_; }
  TestFallthrough inverted_fallthrough() const {
    switch (fallthrough_) {
      case TestFallthrough::kThen:
        return TestFallthrough::kElse;
      case TestFallthrough::kElse:
        return TestFallthrough::kThen;
      default:
        return TestFallthrough::kNone;
    }
  }
680 681 682
  void set_fallthrough(TestFallthrough fallthrough) {
    fallthrough_ = fallthrough;
  }
683 684

 private:
685 686
  bool result_consumed_by_test_;
  TestFallthrough fallthrough_;
687 688 689 690 691 692
  BytecodeLabels* then_labels_;
  BytecodeLabels* else_labels_;

  DISALLOW_COPY_AND_ASSIGN(TestResultScope);
};

693 694 695
// Used to build a list of global declaration initial value pairs.
class BytecodeGenerator::GlobalDeclarationsBuilder final : public ZoneObject {
 public:
696
  explicit GlobalDeclarationsBuilder(Zone* zone)
697
      : declarations_(0, zone),
698
        constant_pool_entry_(0),
699
        has_constant_pool_entry_(false) {}
700

701
  void AddFunctionDeclaration(const AstRawString* name, FeedbackSlot slot,
702
                              FeedbackSlot literal_slot,
703
                              FunctionLiteral* func) {
704
    DCHECK(!slot.IsInvalid());
705
    declarations_.push_back(Declaration(name, slot, literal_slot, func));
706 707
  }

708
  void AddUndefinedDeclaration(const AstRawString* name, FeedbackSlot slot) {
709
    DCHECK(!slot.IsInvalid());
710
    declarations_.push_back(Declaration(name, slot, nullptr));
711 712
  }

713
  Handle<FixedArray> AllocateDeclarations(UnoptimizedCompilationInfo* info,
714 715
                                          Handle<Script> script,
                                          Isolate* isolate) {
716 717
    DCHECK(has_constant_pool_entry_);
    int array_index = 0;
718
    Handle<FixedArray> data = isolate->factory()->NewFixedArray(
719
        static_cast<int>(declarations_.size() * 4), TENURED);
720 721
    for (const Declaration& declaration : declarations_) {
      FunctionLiteral* func = declaration.func;
722 723
      Handle<Object> initial_value;
      if (func == nullptr) {
724
        initial_value = isolate->factory()->undefined_value();
725
      } else {
726
        initial_value = Compiler::GetSharedFunctionInfo(func, script, isolate);
727 728 729 730 731 732
      }

      // Return a null handle if any initial values can't be created. Caller
      // will set stack overflow.
      if (initial_value.is_null()) return Handle<FixedArray>();

733
      data->set(array_index++, *declaration.name->string());
734
      data->set(array_index++, Smi::FromInt(declaration.slot.ToInt()));
735 736
      Object* undefined_or_literal_slot;
      if (declaration.literal_slot.IsInvalid()) {
737
        undefined_or_literal_slot = ReadOnlyRoots(isolate).undefined_value();
738 739 740 741 742
      } else {
        undefined_or_literal_slot =
            Smi::FromInt(declaration.literal_slot.ToInt());
      }
      data->set(array_index++, undefined_or_literal_slot);
743
      data->set(array_index++, *initial_value);
744
    }
745
    return data;
746 747 748 749 750 751 752 753 754 755 756 757 758 759
  }

  size_t constant_pool_entry() {
    DCHECK(has_constant_pool_entry_);
    return constant_pool_entry_;
  }

  void set_constant_pool_entry(size_t constant_pool_entry) {
    DCHECK(!empty());
    DCHECK(!has_constant_pool_entry_);
    constant_pool_entry_ = constant_pool_entry;
    has_constant_pool_entry_ = true;
  }

760
  bool empty() { return declarations_.empty(); }
761 762

 private:
763
  struct Declaration {
764
    Declaration() : slot(FeedbackSlot::Invalid()), func(nullptr) {}
765
    Declaration(const AstRawString* name, FeedbackSlot slot,
766
                FeedbackSlot literal_slot, FunctionLiteral* func)
767
        : name(name), slot(slot), literal_slot(literal_slot), func(func) {}
768 769
    Declaration(const AstRawString* name, FeedbackSlot slot,
                FunctionLiteral* func)
770 771
        : name(name),
          slot(slot),
772
          literal_slot(FeedbackSlot::Invalid()),
773
          func(func) {}
774

775
    const AstRawString* name;
776 777
    FeedbackSlot slot;
    FeedbackSlot literal_slot;
778 779 780
    FunctionLiteral* func;
  };
  ZoneVector<Declaration> declarations_;
781 782 783 784
  size_t constant_pool_entry_;
  bool has_constant_pool_entry_;
};

785 786 787 788 789
class BytecodeGenerator::CurrentScope final {
 public:
  CurrentScope(BytecodeGenerator* generator, Scope* scope)
      : generator_(generator), outer_scope_(generator->current_scope()) {
    if (scope != nullptr) {
790
      DCHECK_EQ(outer_scope_, scope->outer_scope());
791 792 793 794 795 796 797 798 799 800 801 802 803 804
      generator_->set_current_scope(scope);
    }
  }
  ~CurrentScope() {
    if (outer_scope_ != generator_->current_scope()) {
      generator_->set_current_scope(outer_scope_);
    }
  }

 private:
  BytecodeGenerator* generator_;
  Scope* outer_scope_;
};

805 806 807 808
class BytecodeGenerator::FeedbackSlotCache : public ZoneObject {
 public:
  explicit FeedbackSlotCache(Zone* zone) : map_(zone) {}

809
  void Put(FeedbackSlotKind slot_kind, Variable* variable, FeedbackSlot slot) {
810
    PutImpl(slot_kind, 0, variable, slot);
811 812
  }
  void Put(FeedbackSlotKind slot_kind, AstNode* node, FeedbackSlot slot) {
813 814 815 816 817
    PutImpl(slot_kind, 0, node, slot);
  }
  void Put(FeedbackSlotKind slot_kind, int variable_index,
           const AstRawString* name, FeedbackSlot slot) {
    PutImpl(slot_kind, variable_index, name, slot);
818 819 820
  }

  FeedbackSlot Get(FeedbackSlotKind slot_kind, Variable* variable) const {
821
    return GetImpl(slot_kind, 0, variable);
822
  }
823
  FeedbackSlot Get(FeedbackSlotKind slot_kind, AstNode* node) const {
824 825 826 827 828
    return GetImpl(slot_kind, 0, node);
  }
  FeedbackSlot Get(FeedbackSlotKind slot_kind, int variable_index,
                   const AstRawString* name) const {
    return GetImpl(slot_kind, variable_index, name);
829 830 831
  }

 private:
832
  typedef std::tuple<FeedbackSlotKind, int, const void*> Key;
833

834 835 836
  void PutImpl(FeedbackSlotKind slot_kind, int index, const void* node,
               FeedbackSlot slot) {
    Key key = std::make_tuple(slot_kind, index, node);
837 838 839 840
    auto entry = std::make_pair(key, slot);
    map_.insert(entry);
  }

841 842 843
  FeedbackSlot GetImpl(FeedbackSlotKind slot_kind, int index,
                       const void* node) const {
    Key key = std::make_tuple(slot_kind, index, node);
844 845 846 847 848 849 850 851 852 853
    auto iter = map_.find(key);
    if (iter != map_.end()) {
      return iter->second;
    }
    return FeedbackSlot();
  }

  ZoneMap<Key, FeedbackSlot> map_;
};

854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871
class BytecodeGenerator::IteratorRecord final {
 public:
  IteratorRecord(Register object_register, Register next_register,
                 IteratorType type = IteratorType::kNormal)
      : type_(type), object_(object_register), next_(next_register) {
    DCHECK(object_.is_valid() && next_.is_valid());
  }

  inline IteratorType type() const { return type_; }
  inline Register object() const { return object_; }
  inline Register next() const { return next_; }

 private:
  IteratorType type_;
  Register object_;
  Register next_;
};

872 873 874 875 876 877 878 879 880 881 882 883 884
#ifdef DEBUG

static bool IsInEagerLiterals(
    FunctionLiteral* literal,
    const ZoneVector<FunctionLiteral*>& eager_literals) {
  for (FunctionLiteral* eager_literal : eager_literals) {
    if (literal == eager_literal) return true;
  }
  return false;
}

#endif  // DEBUG

885
BytecodeGenerator::BytecodeGenerator(
886 887
    UnoptimizedCompilationInfo* info,
    const AstStringConstants* ast_string_constants,
888
    ZoneVector<FunctionLiteral*>* eager_inner_literals)
889
    : zone_(info->zone()),
890 891 892
      builder_(zone(), info->num_parameters_including_this(),
               info->scope()->num_stack_slots(), info->feedback_vector_spec(),
               info->SourcePositionRecordingMode()),
893
      info_(info),
894
      ast_string_constants_(ast_string_constants),
895 896
      closure_scope_(info->scope()),
      current_scope_(info->scope()),
897
      eager_inner_literals_(eager_inner_literals),
898 899
      feedback_slot_cache_(new (zone()) FeedbackSlotCache(zone())),
      globals_builder_(new (zone()) GlobalDeclarationsBuilder(zone())),
900
      block_coverage_builder_(nullptr),
901 902 903 904 905
      global_declarations_(0, zone()),
      function_literals_(0, zone()),
      native_function_literals_(0, zone()),
      object_literals_(0, zone()),
      array_literals_(0, zone()),
906
      class_literals_(0, zone()),
907
      template_objects_(0, zone()),
908
      execution_control_(nullptr),
909
      execution_context_(nullptr),
910
      execution_result_(nullptr),
911
      incoming_new_target_or_generator_(),
912
      dummy_feedback_slot_(feedback_spec(), FeedbackSlotKind::kCompareOp),
913
      generator_jump_table_(nullptr),
914
      suspend_count_(0),
915 916
      loop_depth_(0),
      catch_prediction_(HandlerTable::UNCAUGHT) {
917
  DCHECK_EQ(closure_scope(), closure_scope()->GetClosureScope());
918
  if (info->has_source_range_map()) {
919 920
    block_coverage_builder_ = new (zone())
        BlockCoverageBuilder(zone(), builder(), info->source_range_map());
921
  }
922
}
923

924 925
Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
    Isolate* isolate, Handle<Script> script) {
926
  DCHECK(ThreadId::Current().Equals(isolate->thread_id()));
927 928 929 930 931 932
#ifdef DEBUG
  // Unoptimized compilation should be context-independent. Verify that we don't
  // access the native context by nulling it out during finalization.
  SaveContext save(isolate);
  isolate->set_context(nullptr);
#endif
933

934
  AllocateDeferredConstants(isolate, script);
935

936
  if (block_coverage_builder_) {
937 938
    info()->set_coverage_info(
        isolate->factory()->NewCoverageInfo(block_coverage_builder_->slots()));
939
    if (FLAG_trace_block_coverage) {
940
      info()->coverage_info()->Print(info()->literal()->GetDebugName());
941
    }
942 943
  }

944
  if (HasStackOverflow()) return Handle<BytecodeArray>();
945 946 947 948 949 950 951 952
  Handle<BytecodeArray> bytecode_array = builder()->ToBytecodeArray(isolate);

  if (incoming_new_target_or_generator_.is_valid()) {
    bytecode_array->set_incoming_new_target_or_generator_register(
        incoming_new_target_or_generator_);
  }

  return bytecode_array;
953
}
954

955 956
void BytecodeGenerator::AllocateDeferredConstants(Isolate* isolate,
                                                  Handle<Script> script) {
957 958 959
  // Build global declaration pair arrays.
  for (GlobalDeclarationsBuilder* globals_builder : global_declarations_) {
    Handle<FixedArray> declarations =
960
        globals_builder->AllocateDeclarations(info(), script, isolate);
961
    if (declarations.is_null()) return SetStackOverflow();
962 963
    builder()->SetDeferredConstantPoolEntry(
        globals_builder->constant_pool_entry(), declarations);
964 965
  }

966 967 968
  // Find or build shared function infos.
  for (std::pair<FunctionLiteral*, size_t> literal : function_literals_) {
    FunctionLiteral* expr = literal.first;
969
    Handle<SharedFunctionInfo> shared_info =
970
        Compiler::GetSharedFunctionInfo(expr, script, isolate);
971
    if (shared_info.is_null()) return SetStackOverflow();
972
    builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
973 974 975 976 977 978
  }

  // Find or build shared function infos for the native function templates.
  for (std::pair<NativeFunctionLiteral*, size_t> literal :
       native_function_literals_) {
    NativeFunctionLiteral* expr = literal.first;
979 980 981 982 983 984 985 986
    v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);

    // Compute the function template for the native function.
    v8::Local<v8::FunctionTemplate> info =
        expr->extension()->GetNativeFunctionTemplate(
            v8_isolate, Utils::ToLocal(expr->name()));
    DCHECK(!info.IsEmpty());

987
    Handle<SharedFunctionInfo> shared_info =
988 989 990
        FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
            isolate, Utils::OpenHandle(*info), expr->name());
    DCHECK(!shared_info.is_null());
991
    builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
992
  }
993 994 995 996 997 998 999

  // Build object literal constant properties
  for (std::pair<ObjectLiteral*, size_t> literal : object_literals_) {
    ObjectLiteral* object_literal = literal.first;
    if (object_literal->properties_count() > 0) {
      // If constant properties is an empty fixed array, we've already added it
      // to the constant pool when visiting the object literal.
1000 1001
      Handle<ObjectBoilerplateDescription> constant_properties =
          object_literal->GetOrBuildBoilerplateDescription(isolate);
1002

1003 1004
      builder()->SetDeferredConstantPoolEntry(literal.second,
                                              constant_properties);
1005 1006 1007 1008 1009 1010
    }
  }

  // Build array literal constant elements
  for (std::pair<ArrayLiteral*, size_t> literal : array_literals_) {
    ArrayLiteral* array_literal = literal.first;
1011 1012
    Handle<ArrayBoilerplateDescription> constant_elements =
        array_literal->GetOrBuildBoilerplateDescription(isolate);
1013
    builder()->SetDeferredConstantPoolEntry(literal.second, constant_elements);
1014
  }
1015

1016 1017 1018 1019 1020 1021 1022 1023
  // Build class literal boilerplates.
  for (std::pair<ClassLiteral*, size_t> literal : class_literals_) {
    ClassLiteral* class_literal = literal.first;
    Handle<ClassBoilerplate> class_boilerplate =
        ClassBoilerplate::BuildClassBoilerplate(isolate, class_literal);
    builder()->SetDeferredConstantPoolEntry(literal.second, class_boilerplate);
  }

1024 1025 1026 1027 1028 1029 1030
  // Build template literals.
  for (std::pair<GetTemplateObject*, size_t> literal : template_objects_) {
    GetTemplateObject* get_template_object = literal.first;
    Handle<TemplateObjectDescription> description =
        get_template_object->GetOrBuildDescription(isolate);
    builder()->SetDeferredConstantPoolEntry(literal.second, description);
  }
1031 1032
}

1033
void BytecodeGenerator::GenerateBytecode(uintptr_t stack_limit) {
1034 1035 1036 1037
  DisallowHeapAllocation no_allocation;
  DisallowHandleAllocation no_handles;
  DisallowHandleDereference no_deref;

1038 1039
  InitializeAstVisitor(stack_limit);

1040
  // Initialize the incoming context.
1041
  ContextScope incoming_context(this, closure_scope());
1042

1043 1044 1045
  // Initialize control scope.
  ControlScopeForTopLevel control(this);

1046 1047
  RegisterAllocationScope register_scope(this);

1048 1049
  AllocateTopLevelRegisters();

1050 1051
  if (info()->literal()->CanSuspend()) {
    BuildGeneratorPrologue();
1052 1053
  }

1054
  if (closure_scope()->NeedsContext()) {
1055
    // Push a new inner context scope for the function.
1056
    BuildNewLocalActivationContext();
1057
    ContextScope local_function_context(this, closure_scope());
1058
    BuildLocalActivationContextInitialization();
1059
    GenerateBytecodeBody();
1060
  } else {
1061
    GenerateBytecodeBody();
1062 1063
  }

1064
  // Check that we are not falling off the end.
1065
  DCHECK(!builder()->RequiresImplicitReturn());
1066 1067
}

1068
void BytecodeGenerator::GenerateBytecodeBody() {
1069
  // Build the arguments object if it is used.
1070
  VisitArgumentsObject(closure_scope()->arguments());
1071

1072
  // Build rest arguments array if it is used.
1073
  Variable* rest_parameter = closure_scope()->rest_parameter();
1074
  VisitRestArgumentsArray(rest_parameter);
1075

1076 1077 1078
  // Build assignment to the function name or {.this_function}
  // variables if used.
  VisitThisFunctionVariable(closure_scope()->function_var());
1079
  VisitThisFunctionVariable(closure_scope()->this_function_var());
1080 1081

  // Build assignment to {new.target} variable if it is used.
1082
  VisitNewTargetVariable(closure_scope()->new_target_var());
1083

1084 1085
  // Create a generator object if necessary and initialize the
  // {.generator_object} variable.
1086
  if (IsResumableFunction(info()->literal()->kind())) {
1087 1088 1089
    BuildGeneratorObjectVariableInitialization();
  }

1090 1091
  // Emit tracing call if requested to do so.
  if (FLAG_trace) builder()->CallRuntime(Runtime::kTraceEnter);
1092

1093
  // Emit type profile call.
1094 1095
  if (info()->collect_type_profile()) {
    feedback_spec()->AddTypeProfileSlot();
1096 1097 1098 1099 1100 1101 1102 1103
    int num_parameters = closure_scope()->num_parameters();
    for (int i = 0; i < num_parameters; i++) {
      Register parameter(builder()->Parameter(i));
      builder()->LoadAccumulatorWithRegister(parameter).CollectTypeProfile(
          closure_scope()->parameter(i)->initializer_position());
    }
  }

1104
  // Visit declarations within the function scope.
1105
  VisitDeclarations(closure_scope()->declarations());
1106

1107 1108 1109
  // Emit initializing assignments for module namespace imports (if any).
  VisitModuleNamespaceImports();

1110
  // Perform a stack-check before the body.
1111
  builder()->StackCheck(info()->literal()->start_position());
1112

1113
  // The derived constructor case is handled in VisitCallSuper.
1114
  if (IsBaseConstructor(function_kind()) &&
1115 1116 1117
      info()->literal()->requires_instance_members_initializer()) {
    BuildInstanceMemberInitialization(Register::function_closure(),
                                      builder()->Receiver());
1118 1119
  }

1120
  // Visit statements in the function body.
1121
  VisitStatements(info()->literal()->body());
1122 1123 1124 1125 1126 1127 1128

  // Emit an implicit return instruction in case control flow can fall off the
  // end of the function without an explicit return being present on all paths.
  if (builder()->RequiresImplicitReturn()) {
    builder()->LoadUndefined();
    BuildReturn();
  }
1129 1130
}

1131
void BytecodeGenerator::AllocateTopLevelRegisters() {
1132
  if (IsResumableFunction(info()->literal()->kind())) {
1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154
    // Either directly use generator_object_var or allocate a new register for
    // the incoming generator object.
    Variable* generator_object_var = closure_scope()->generator_object_var();
    if (generator_object_var->location() == VariableLocation::LOCAL) {
      incoming_new_target_or_generator_ =
          GetRegisterForLocalVariable(generator_object_var);
    } else {
      incoming_new_target_or_generator_ = register_allocator()->NewRegister();
    }
  } else if (closure_scope()->new_target_var()) {
    // Either directly use new_target_var or allocate a new register for
    // the incoming new target object.
    Variable* new_target_var = closure_scope()->new_target_var();
    if (new_target_var->location() == VariableLocation::LOCAL) {
      incoming_new_target_or_generator_ =
          GetRegisterForLocalVariable(new_target_var);
    } else {
      incoming_new_target_or_generator_ = register_allocator()->NewRegister();
    }
  }
}

1155 1156
void BytecodeGenerator::BuildGeneratorPrologue() {
  DCHECK_GT(info()->literal()->suspend_count(), 0);
1157
  DCHECK(generator_object().is_valid());
1158 1159 1160
  generator_jump_table_ =
      builder()->AllocateJumpTable(info()->literal()->suspend_count(), 0);

1161 1162 1163 1164 1165 1166 1167
  // If the generator is not undefined, this is a resume, so perform state
  // dispatch.
  builder()->SwitchOnGeneratorState(generator_object(), generator_jump_table_);

  // Otherwise, fall-through to the ordinary function prologue, after which we
  // will run into the generator object creation and other extra code inserted
  // by the parser.
1168
}
1169

1170
void BytecodeGenerator::VisitBlock(Block* stmt) {
1171
  // Visit declarations and statements.
1172
  CurrentScope current_scope(this, stmt->scope());
1173
  if (stmt->scope() != nullptr && stmt->scope()->NeedsContext()) {
1174
    BuildNewLocalBlockContext(stmt->scope());
1175 1176
    ContextScope scope(this, stmt->scope());
    VisitBlockDeclarationsAndStatements(stmt);
1177
  } else {
1178 1179 1180 1181 1182
    VisitBlockDeclarationsAndStatements(stmt);
  }
}

void BytecodeGenerator::VisitBlockDeclarationsAndStatements(Block* stmt) {
1183
  BlockBuilder block_builder(builder(), block_coverage_builder_, stmt);
1184 1185 1186
  ControlScopeForBreakable execution_control(this, stmt, &block_builder);
  if (stmt->scope() != nullptr) {
    VisitDeclarations(stmt->scope()->declarations());
1187
  }
1188
  VisitStatements(stmt->statements());
1189 1190 1191 1192 1193
}

void BytecodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
  Variable* variable = decl->proxy()->var();
  switch (variable->location()) {
1194
    case VariableLocation::UNALLOCATED: {
1195
      DCHECK(!variable->binding_needs_init());
1196 1197
      FeedbackSlot slot =
          GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
1198
      globals_builder()->AddUndefinedDeclaration(variable->raw_name(), slot);
1199
      break;
1200
    }
1201
    case VariableLocation::LOCAL:
1202
      if (variable->binding_needs_init()) {
1203
        Register destination(builder()->Local(variable->index()));
1204 1205 1206 1207
        builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
      }
      break;
    case VariableLocation::PARAMETER:
1208
      if (variable->binding_needs_init()) {
1209
        Register destination(builder()->Parameter(variable->index()));
1210 1211
        builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
      }
1212 1213
      break;
    case VariableLocation::CONTEXT:
1214
      if (variable->binding_needs_init()) {
1215
        DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1216
        builder()->LoadTheHole().StoreContextSlot(execution_context()->reg(),
1217
                                                  variable->index(), 0);
1218 1219
      }
      break;
1220
    case VariableLocation::LOOKUP: {
1221
      DCHECK_EQ(VariableMode::kVar, variable->mode());
1222
      DCHECK(!variable->binding_needs_init());
1223

1224
      Register name = register_allocator()->NewRegister();
1225

1226
      builder()
1227
          ->LoadLiteral(variable->raw_name())
1228
          .StoreAccumulatorInRegister(name)
1229
          .CallRuntime(Runtime::kDeclareEvalVar, name);
1230
      break;
1231
    }
1232
    case VariableLocation::MODULE:
1233 1234
      if (variable->IsExport() && variable->binding_needs_init()) {
        builder()->LoadTheHole();
1235
        BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1236 1237
      }
      // Nothing to do for imports.
1238
      break;
1239 1240 1241
  }
}

1242
void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
1243
  Variable* variable = decl->proxy()->var();
1244 1245
  DCHECK(variable->mode() == VariableMode::kLet ||
         variable->mode() == VariableMode::kVar);
1246 1247
  switch (variable->location()) {
    case VariableLocation::UNALLOCATED: {
1248 1249 1250 1251 1252
      FeedbackSlot slot =
          GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
      FeedbackSlot literal_slot = GetCachedCreateClosureSlot(decl->fun());
      globals_builder()->AddFunctionDeclaration(variable->raw_name(), slot,
                                                literal_slot, decl->fun());
1253
      AddToEagerLiteralsIfEager(decl->fun());
1254 1255 1256
      break;
    }
    case VariableLocation::PARAMETER:
1257 1258
    case VariableLocation::LOCAL: {
      VisitForAccumulatorValue(decl->fun());
1259
      BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1260 1261 1262 1263 1264
      break;
    }
    case VariableLocation::CONTEXT: {
      DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
      VisitForAccumulatorValue(decl->fun());
1265 1266
      builder()->StoreContextSlot(execution_context()->reg(), variable->index(),
                                  0);
1267 1268
      break;
    }
1269
    case VariableLocation::LOOKUP: {
1270 1271
      RegisterList args = register_allocator()->NewRegisterList(2);
      builder()
1272
          ->LoadLiteral(variable->raw_name())
1273
          .StoreAccumulatorInRegister(args[0]);
1274
      VisitForAccumulatorValue(decl->fun());
1275 1276
      builder()->StoreAccumulatorInRegister(args[1]).CallRuntime(
          Runtime::kDeclareEvalFunction, args);
1277
      break;
1278
    }
1279
    case VariableLocation::MODULE:
1280
      DCHECK_EQ(variable->mode(), VariableMode::kLet);
1281
      DCHECK(variable->IsExport());
1282
      VisitForAccumulatorValue(decl->fun());
1283
      BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1284
      break;
1285
  }
1286 1287
  DCHECK_IMPLIES(decl->fun()->ShouldEagerCompile(),
                 IsInEagerLiterals(decl->fun(), *eager_inner_literals_));
1288 1289
}

1290
void BytecodeGenerator::VisitModuleNamespaceImports() {
1291
  if (!closure_scope()->is_module_scope()) return;
1292 1293 1294 1295

  RegisterAllocationScope register_scope(this);
  Register module_request = register_allocator()->NewRegister();

1296
  ModuleDescriptor* descriptor = closure_scope()->AsModuleScope()->module();
1297 1298 1299 1300 1301
  for (auto entry : descriptor->namespace_imports()) {
    builder()
        ->LoadLiteral(Smi::FromInt(entry->module_request))
        .StoreAccumulatorInRegister(module_request)
        .CallRuntime(Runtime::kGetModuleNamespace, module_request);
1302
    Variable* var = closure_scope()->LookupLocal(entry->local_name);
1303
    DCHECK_NOT_NULL(var);
1304
    BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kElided);
1305 1306 1307
  }
}

1308
void BytecodeGenerator::VisitDeclarations(Declaration::List* declarations) {
1309
  RegisterAllocationScope register_scope(this);
1310
  DCHECK(globals_builder()->empty());
1311
  for (Declaration* decl : *declarations) {
1312
    RegisterAllocationScope register_scope(this);
1313
    Visit(decl);
1314
  }
1315 1316 1317
  if (globals_builder()->empty()) return;

  globals_builder()->set_constant_pool_entry(
1318
      builder()->AllocateDeferredConstantPoolEntry());
1319 1320
  int encoded_flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
                      DeclareGlobalsNativeFlag::encode(info()->is_native());
1321

1322
  // Emit code to declare globals.
1323
  RegisterList args = register_allocator()->NewRegisterList(3);
1324 1325
  builder()
      ->LoadConstantPoolEntry(globals_builder()->constant_pool_entry())
1326
      .StoreAccumulatorInRegister(args[0])
1327
      .LoadLiteral(Smi::FromInt(encoded_flags))
1328 1329
      .StoreAccumulatorInRegister(args[1])
      .MoveRegister(Register::function_closure(), args[2])
1330
      .CallRuntime(Runtime::kDeclareGlobals, args);
1331 1332 1333

  // Push and reset globals builder.
  global_declarations_.push_back(globals_builder());
1334
  globals_builder_ = new (zone()) GlobalDeclarationsBuilder(zone());
1335 1336
}

1337 1338
void BytecodeGenerator::VisitStatements(
    const ZonePtrList<Statement>* statements) {
1339 1340 1341 1342 1343 1344 1345 1346 1347
  for (int i = 0; i < statements->length(); i++) {
    // Allocate an outer register allocations scope for the statement.
    RegisterAllocationScope allocation_scope(this);
    Statement* stmt = statements->at(i);
    Visit(stmt);
    if (stmt->IsJump()) break;
  }
}

1348
void BytecodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1349
  builder()->SetStatementPosition(stmt);
1350
  VisitForEffect(stmt->expression());
1351 1352
}

1353
void BytecodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1354 1355
}

1356
void BytecodeGenerator::VisitIfStatement(IfStatement* stmt) {
1357 1358
  ConditionalControlFlowBuilder conditional_builder(
      builder(), block_coverage_builder_, stmt);
1359
  builder()->SetStatementPosition(stmt);
1360

1361
  if (stmt->condition()->ToBooleanIsTrue()) {
1362
    // Generate then block unconditionally as always true.
1363
    conditional_builder.Then();
1364 1365
    Visit(stmt->then_statement());
  } else if (stmt->condition()->ToBooleanIsFalse()) {
1366
    // Generate else block unconditionally if it exists.
1367
    if (stmt->HasElseStatement()) {
1368
      conditional_builder.Else();
1369 1370
      Visit(stmt->else_statement());
    }
1371
  } else {
1372 1373 1374
    // TODO(oth): If then statement is BreakStatement or
    // ContinueStatement we can reduce number of generated
    // jump/jump_ifs here. See BasicLoops test.
1375 1376
    VisitForTest(stmt->condition(), conditional_builder.then_labels(),
                 conditional_builder.else_labels(), TestFallthrough::kThen);
1377

1378
    conditional_builder.Then();
1379
    Visit(stmt->then_statement());
1380

1381
    if (stmt->HasElseStatement()) {
1382 1383
      conditional_builder.JumpToEnd();
      conditional_builder.Else();
1384 1385
      Visit(stmt->else_statement());
    }
1386
  }
1387
}
1388

1389 1390 1391 1392 1393
void BytecodeGenerator::VisitSloppyBlockFunctionStatement(
    SloppyBlockFunctionStatement* stmt) {
  Visit(stmt->statement());
}

1394
void BytecodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1395
  AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1396
  builder()->SetStatementPosition(stmt);
1397
  execution_control()->Continue(stmt->target());
1398 1399
}

1400
void BytecodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1401
  AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1402
  builder()->SetStatementPosition(stmt);
1403
  execution_control()->Break(stmt->target());
1404 1405
}

1406
void BytecodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1407
  AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1408
  builder()->SetStatementPosition(stmt);
1409
  VisitForAccumulatorValue(stmt->expression());
1410
  if (stmt->is_async_return()) {
1411
    execution_control()->AsyncReturnAccumulator(stmt->end_position());
1412
  } else {
1413
    execution_control()->ReturnAccumulator(stmt->end_position());
1414
  }
1415 1416
}

1417
void BytecodeGenerator::VisitWithStatement(WithStatement* stmt) {
1418
  builder()->SetStatementPosition(stmt);
1419
  VisitForAccumulatorValue(stmt->expression());
1420
  BuildNewLocalWithContext(stmt->scope());
1421
  VisitInScope(stmt->statement(), stmt->scope());
1422 1423
}

1424
void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1425 1426
  // We need this scope because we visit for register values. We have to
  // maintain a execution result scope where registers can be allocated.
1427
  ZonePtrList<CaseClause>* clauses = stmt->cases();
1428 1429
  SwitchBuilder switch_builder(builder(), block_coverage_builder_, stmt,
                               clauses->length());
1430
  ControlScopeForBreakable scope(this, stmt, &switch_builder);
1431 1432
  int default_index = -1;

1433 1434
  builder()->SetStatementPosition(stmt);

1435 1436
  // Keep the switch value in a register until a case matches.
  Register tag = VisitForRegisterValue(stmt->tag());
1437 1438 1439
  FeedbackSlot slot = clauses->length() > 0
                          ? feedback_spec()->AddCompareICSlot()
                          : FeedbackSlot::Invalid();
1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452

  // Iterate over all cases and create nodes for label comparison.
  for (int i = 0; i < clauses->length(); i++) {
    CaseClause* clause = clauses->at(i);

    // The default is not a test, remember index.
    if (clause->is_default()) {
      default_index = i;
      continue;
    }

    // Perform label comparison as if via '===' with tag.
    VisitForAccumulatorValue(clause->label());
1453 1454
    builder()->CompareOperation(Token::Value::EQ_STRICT, tag,
                                feedback_index(slot));
1455
    switch_builder.Case(ToBooleanMode::kAlreadyBoolean, i);
1456 1457 1458 1459 1460 1461 1462
  }

  if (default_index >= 0) {
    // Emit default jump if there is a default case.
    switch_builder.DefaultAt(default_index);
  } else {
    // Otherwise if we have reached here none of the cases matched, so jump to
1463 1464
    // the end.
    switch_builder.Break();
1465 1466 1467 1468 1469
  }

  // Iterate over all cases and create the case bodies.
  for (int i = 0; i < clauses->length(); i++) {
    CaseClause* clause = clauses->at(i);
1470
    switch_builder.SetCaseTarget(i, clause);
1471 1472
    VisitStatements(clause->statements());
  }
1473 1474
}

1475 1476
void BytecodeGenerator::VisitIterationBody(IterationStatement* stmt,
                                           LoopBuilder* loop_builder) {
1477
  loop_builder->LoopBody();
1478
  ControlScopeForIteration execution_control(this, stmt, loop_builder);
1479
  builder()->StackCheck(stmt->position());
1480
  Visit(stmt->body());
1481
  loop_builder->BindContinueTarget();
1482
}
1483

1484
void BytecodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1485
  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1486
  if (stmt->cond()->ToBooleanIsFalse()) {
1487
    VisitIterationBody(stmt, &loop_builder);
1488
  } else if (stmt->cond()->ToBooleanIsTrue()) {
1489
    loop_builder.LoopHeader();
1490
    VisitIterationBody(stmt, &loop_builder);
1491
    loop_builder.JumpToHeader(loop_depth_);
1492
  } else {
1493
    loop_builder.LoopHeader();
1494
    VisitIterationBody(stmt, &loop_builder);
1495
    builder()->SetExpressionAsStatementPosition(stmt->cond());
1496 1497 1498 1499 1500
    BytecodeLabels loop_backbranch(zone());
    VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
                 TestFallthrough::kThen);
    loop_backbranch.Bind(builder());
    loop_builder.JumpToHeader(loop_depth_);
1501
  }
1502 1503
}

1504
void BytecodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1505
  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1506

1507
  if (stmt->cond()->ToBooleanIsFalse()) {
1508
    // If the condition is false there is no need to generate the loop.
1509 1510 1511
    return;
  }

1512
  loop_builder.LoopHeader();
1513
  if (!stmt->cond()->ToBooleanIsTrue()) {
1514
    builder()->SetExpressionAsStatementPosition(stmt->cond());
1515 1516 1517 1518
    BytecodeLabels loop_body(zone());
    VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
                 TestFallthrough::kThen);
    loop_body.Bind(builder());
1519
  }
1520
  VisitIterationBody(stmt, &loop_builder);
1521
  loop_builder.JumpToHeader(loop_depth_);
1522 1523
}

1524
void BytecodeGenerator::VisitForStatement(ForStatement* stmt) {
1525
  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1526

1527 1528 1529
  if (stmt->init() != nullptr) {
    Visit(stmt->init());
  }
1530 1531 1532 1533 1534 1535
  if (stmt->cond() && stmt->cond()->ToBooleanIsFalse()) {
    // If the condition is known to be false there is no need to generate
    // body, next or condition blocks. Init block should be generated.
    return;
  }

1536
  loop_builder.LoopHeader();
1537
  if (stmt->cond() && !stmt->cond()->ToBooleanIsTrue()) {
1538
    builder()->SetExpressionAsStatementPosition(stmt->cond());
1539 1540 1541 1542
    BytecodeLabels loop_body(zone());
    VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
                 TestFallthrough::kThen);
    loop_body.Bind(builder());
1543
  }
1544
  VisitIterationBody(stmt, &loop_builder);
1545
  if (stmt->next() != nullptr) {
1546
    builder()->SetStatementPosition(stmt->next());
1547 1548
    Visit(stmt->next());
  }
1549
  loop_builder.JumpToHeader(loop_depth_);
1550 1551
}

1552
void BytecodeGenerator::VisitForInAssignment(Expression* expr) {
1553 1554 1555 1556 1557
  DCHECK(expr->IsValidReferenceExpression());

  // Evaluate assignment starting with the value to be stored in the
  // accumulator.
  Property* property = expr->AsProperty();
1558
  LhsKind assign_type = Property::GetAssignType(property);
1559 1560
  switch (assign_type) {
    case VARIABLE: {
1561
      VariableProxy* proxy = expr->AsVariableProxy();
1562
      BuildVariableAssignment(proxy->var(), Token::ASSIGN,
1563
                              proxy->hole_check_mode());
1564 1565 1566
      break;
    }
    case NAMED_PROPERTY: {
1567 1568
      RegisterAllocationScope register_scope(this);
      Register value = register_allocator()->NewRegister();
1569 1570
      builder()->StoreAccumulatorInRegister(value);
      Register object = VisitForRegisterValue(property->obj());
1571 1572
      const AstRawString* name =
          property->key()->AsLiteral()->AsRawPropertyName();
1573
      builder()->LoadAccumulatorWithRegister(value);
1574
      FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
1575
      builder()->StoreNamedProperty(object, name, feedback_index(slot),
1576
                                    language_mode());
1577
      builder()->LoadAccumulatorWithRegister(value);
1578 1579 1580
      break;
    }
    case KEYED_PROPERTY: {
1581 1582
      RegisterAllocationScope register_scope(this);
      Register value = register_allocator()->NewRegister();
1583 1584 1585 1586
      builder()->StoreAccumulatorInRegister(value);
      Register object = VisitForRegisterValue(property->obj());
      Register key = VisitForRegisterValue(property->key());
      builder()->LoadAccumulatorWithRegister(value);
1587
      FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
1588 1589
      builder()->StoreKeyedProperty(object, key, feedback_index(slot),
                                    language_mode());
1590
      builder()->LoadAccumulatorWithRegister(value);
1591 1592
      break;
    }
1593 1594
    case NAMED_SUPER_PROPERTY: {
      RegisterAllocationScope register_scope(this);
1595 1596
      RegisterList args = register_allocator()->NewRegisterList(4);
      builder()->StoreAccumulatorInRegister(args[3]);
1597 1598
      SuperPropertyReference* super_property =
          property->obj()->AsSuperPropertyReference();
1599 1600
      VisitForRegisterValue(super_property->this_var(), args[0]);
      VisitForRegisterValue(super_property->home_object(), args[1]);
1601
      builder()
1602
          ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
1603 1604
          .StoreAccumulatorInRegister(args[2])
          .CallRuntime(StoreToSuperRuntimeId(), args);
1605 1606 1607 1608
      break;
    }
    case KEYED_SUPER_PROPERTY: {
      RegisterAllocationScope register_scope(this);
1609 1610
      RegisterList args = register_allocator()->NewRegisterList(4);
      builder()->StoreAccumulatorInRegister(args[3]);
1611 1612
      SuperPropertyReference* super_property =
          property->obj()->AsSuperPropertyReference();
1613 1614 1615 1616
      VisitForRegisterValue(super_property->this_var(), args[0]);
      VisitForRegisterValue(super_property->home_object(), args[1]);
      VisitForRegisterValue(property->key(), args[2]);
      builder()->CallRuntime(StoreKeyedToSuperRuntimeId(), args);
1617 1618
      break;
    }
1619 1620 1621
  }
}

1622
void BytecodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1623
  if (stmt->subject()->IsNullLiteral() ||
1624
      stmt->subject()->IsUndefinedLiteral()) {
1625 1626 1627 1628
    // ForIn generates lots of code, skip if it wouldn't produce any effects.
    return;
  }

1629
  BytecodeLabel subject_null_label, subject_undefined_label;
1630
  FeedbackSlot slot = feedback_spec()->AddForInSlot();
1631 1632

  // Prepare the state for executing ForIn.
1633
  builder()->SetExpressionAsStatementPosition(stmt->subject());
1634
  VisitForAccumulatorValue(stmt->subject());
1635 1636
  builder()->JumpIfUndefined(&subject_undefined_label);
  builder()->JumpIfNull(&subject_null_label);
1637
  Register receiver = register_allocator()->NewRegister();
1638
  builder()->ToObject(receiver);
1639

1640
  // Used as kRegTriple and kRegPair in ForInPrepare and ForInNext.
1641 1642
  RegisterList triple = register_allocator()->NewRegisterList(3);
  Register cache_length = triple[2];
1643 1644
  builder()->ForInEnumerate(receiver);
  builder()->ForInPrepare(triple, feedback_index(slot));
1645

1646
  // Set up loop counter
1647
  Register index = register_allocator()->NewRegister();
1648
  builder()->LoadLiteral(Smi::zero());
1649 1650 1651
  builder()->StoreAccumulatorInRegister(index);

  // The loop
1652
  {
1653
    LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1654
    loop_builder.LoopHeader();
1655 1656 1657 1658 1659 1660
    builder()->SetExpressionAsStatementPosition(stmt->each());
    builder()->ForInContinue(index, cache_length);
    loop_builder.BreakIfFalse(ToBooleanMode::kAlreadyBoolean);
    builder()->ForInNext(receiver, index, triple.Truncate(2),
                         feedback_index(slot));
    loop_builder.ContinueIfUndefined();
1661
    VisitForInAssignment(stmt->each());
1662 1663 1664 1665 1666
    VisitIterationBody(stmt, &loop_builder);
    builder()->ForInStep(index);
    builder()->StoreAccumulatorInRegister(index);
    loop_builder.JumpToHeader(loop_depth_);
  }
1667 1668
  builder()->Bind(&subject_null_label);
  builder()->Bind(&subject_undefined_label);
1669 1670
}

1671
void BytecodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1672
  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1673

1674
  builder()->SetExpressionAsStatementPosition(stmt->assign_iterator());
1675
  VisitForEffect(stmt->assign_iterator());
1676
  VisitForEffect(stmt->assign_next());
1677

1678
  loop_builder.LoopHeader();
1679
  builder()->SetExpressionAsStatementPosition(stmt->next_result());
1680
  VisitForEffect(stmt->next_result());
1681 1682
  TypeHint type_hint = VisitForAccumulatorValue(stmt->result_done());
  loop_builder.BreakIfTrue(ToBooleanModeFromTypeHint(type_hint));
1683 1684

  VisitForEffect(stmt->assign_each());
1685
  VisitIterationBody(stmt, &loop_builder);
1686
  loop_builder.JumpToHeader(loop_depth_);
1687 1688
}

1689
void BytecodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1690 1691 1692 1693 1694 1695
  // Update catch prediction tracking. The updated catch_prediction value lasts
  // until the end of the try_block in the AST node, and does not apply to the
  // catch_block.
  HandlerTable::CatchPrediction outer_catch_prediction = catch_prediction();
  set_catch_prediction(stmt->GetCatchPrediction(outer_catch_prediction));

1696 1697
  TryCatchBuilder try_control_builder(builder(), block_coverage_builder_, stmt,
                                      catch_prediction());
1698 1699 1700 1701 1702

  // Preserve the context in a dedicated register, so that it can be restored
  // when the handler is entered by the stack-unwinding machinery.
  // TODO(mstarzinger): Be smarter about register allocation.
  Register context = register_allocator()->NewRegister();
1703
  builder()->MoveRegister(Register::current_context(), context);
1704 1705 1706 1707

  // Evaluate the try-block inside a control scope. This simulates a handler
  // that is intercepting 'throw' control commands.
  try_control_builder.BeginTry(context);
1708 1709 1710
  {
    ControlScopeForTryCatch scope(this, &try_control_builder);
    Visit(stmt->try_block());
1711
    set_catch_prediction(outer_catch_prediction);
1712
  }
1713 1714
  try_control_builder.EndTry();

1715 1716 1717 1718 1719
  if (stmt->scope()) {
    // Create a catch scope that binds the exception.
    BuildNewLocalCatchContext(stmt->scope());
    builder()->StoreAccumulatorInRegister(context);
  }
1720

1721
  // If requested, clear message object as we enter the catch block.
1722
  if (stmt->ShouldClearPendingException(outer_catch_prediction)) {
1723
    builder()->LoadTheHole().SetPendingMessage();
1724
  }
1725 1726 1727

  // Load the catch context into the accumulator.
  builder()->LoadAccumulatorWithRegister(context);
1728 1729

  // Evaluate the catch-block.
1730 1731 1732 1733 1734
  if (stmt->scope()) {
    VisitInScope(stmt->catch_block(), stmt->scope());
  } else {
    VisitBlock(stmt->catch_block());
  }
1735
  try_control_builder.EndCatch();
1736 1737
}

1738
void BytecodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1739
  // We can't know whether the finally block will override ("catch") an
1740
  // exception thrown in the try block, so we just adopt the outer prediction.
1741 1742
  TryFinallyBuilder try_control_builder(builder(), block_coverage_builder_,
                                        stmt, catch_prediction());
1743

1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762
  // We keep a record of all paths that enter the finally-block to be able to
  // dispatch to the correct continuation point after the statements in the
  // finally-block have been evaluated.
  //
  // The try-finally construct can enter the finally-block in three ways:
  // 1. By exiting the try-block normally, falling through at the end.
  // 2. By exiting the try-block with a function-local control flow transfer
  //    (i.e. through break/continue/return statements).
  // 3. By exiting the try-block with a thrown exception.
  //
  // The result register semantics depend on how the block was entered:
  //  - ReturnStatement: It represents the return value being returned.
  //  - ThrowStatement: It represents the exception being thrown.
  //  - BreakStatement/ContinueStatement: Undefined and not used.
  //  - Falling through into finally-block: Undefined and not used.
  Register token = register_allocator()->NewRegister();
  Register result = register_allocator()->NewRegister();
  ControlScope::DeferredCommands commands(this, token, result);

1763 1764 1765 1766
  // Preserve the context in a dedicated register, so that it can be restored
  // when the handler is entered by the stack-unwinding machinery.
  // TODO(mstarzinger): Be smarter about register allocation.
  Register context = register_allocator()->NewRegister();
1767
  builder()->MoveRegister(Register::current_context(), context);
1768 1769 1770 1771

  // Evaluate the try-block inside a control scope. This simulates a handler
  // that is intercepting all control commands.
  try_control_builder.BeginTry(context);
1772 1773 1774 1775
  {
    ControlScopeForTryFinally scope(this, &try_control_builder, &commands);
    Visit(stmt->try_block());
  }
1776 1777
  try_control_builder.EndTry();

1778 1779 1780 1781 1782 1783
  // Record fall-through and exception cases.
  commands.RecordFallThroughPath();
  try_control_builder.LeaveTry();
  try_control_builder.BeginHandler();
  commands.RecordHandlerReThrowPath();

1784
  // Pending message object is saved on entry.
1785
  try_control_builder.BeginFinally();
1786
  Register message = context;  // Reuse register.
1787

1788
  // Clear message object as we enter the finally block.
1789 1790
  builder()->LoadTheHole().SetPendingMessage().StoreAccumulatorInRegister(
      message);
1791 1792 1793 1794

  // Evaluate the finally-block.
  Visit(stmt->finally_block());
  try_control_builder.EndFinally();
1795

1796
  // Pending message object is restored on exit.
1797
  builder()->LoadAccumulatorWithRegister(message).SetPendingMessage();
1798

1799 1800
  // Dynamic dispatch after the finally-block.
  commands.ApplyDeferredCommands();
1801 1802
}

1803
void BytecodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1804
  builder()->SetStatementPosition(stmt);
1805
  builder()->Debugger();
1806 1807
}

1808
void BytecodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1809
  DCHECK(expr->scope()->outer_scope() == current_scope());
1810 1811
  uint8_t flags = CreateClosureFlags::Encode(
      expr->pretenure(), closure_scope()->is_function_scope());
1812
  size_t entry = builder()->AllocateDeferredConstantPoolEntry();
1813 1814
  FeedbackSlot slot = GetCachedCreateClosureSlot(expr);
  builder()->CreateClosure(entry, feedback_index(slot), flags);
1815
  function_literals_.push_back(std::make_pair(expr, entry));
1816 1817 1818 1819 1820 1821 1822 1823
  AddToEagerLiteralsIfEager(expr);
}

void BytecodeGenerator::AddToEagerLiteralsIfEager(FunctionLiteral* literal) {
  if (eager_inner_literals_ && literal->ShouldEagerCompile()) {
    DCHECK(!IsInEagerLiterals(literal, *eager_inner_literals_));
    eager_inner_literals_->push_back(literal);
  }
1824 1825
}

1826 1827 1828 1829 1830
bool BytecodeGenerator::ShouldOptimizeAsOneShot() const {
  if (!FLAG_enable_one_shot_optimization) return false;

  if (loop_depth_ > 0) return false;

1831 1832 1833 1834 1835
  // A non-top-level iife is likely to be executed multiple times and so
  // shouldn`t be optimized as one-shot.
  bool is_toplevel_iife = info()->literal()->is_iife() &&
                          current_scope()->outer_scope()->is_script_scope();
  return info()->literal()->is_toplevel() || is_toplevel_iife;
1836 1837
}

1838
void BytecodeGenerator::BuildClassLiteral(ClassLiteral* expr, Register name) {
1839 1840 1841 1842
  size_t class_boilerplate_entry =
      builder()->AllocateDeferredConstantPoolEntry();
  class_literals_.push_back(std::make_pair(expr, class_boilerplate_entry));

1843
  VisitDeclarations(expr->scope()->declarations());
1844
  Register class_constructor = register_allocator()->NewRegister();
1845

1846
  {
1847
    RegisterAllocationScope register_scope(this);
1848 1849 1850
    RegisterList args = register_allocator()->NewGrowableRegisterList();

    Register class_boilerplate = register_allocator()->GrowRegisterList(&args);
1851 1852
    Register class_constructor_in_args =
        register_allocator()->GrowRegisterList(&args);
1853 1854 1855 1856
    Register super_class = register_allocator()->GrowRegisterList(&args);
    DCHECK_EQ(ClassBoilerplate::kFirstDynamicArgumentIndex,
              args.register_count());

1857
    VisitForAccumulatorValueOrTheHole(expr->extends());
1858 1859 1860
    builder()->StoreAccumulatorInRegister(super_class);

    VisitFunctionLiteral(expr->constructor());
1861
    builder()
1862
        ->StoreAccumulatorInRegister(class_constructor)
1863
        .MoveRegister(class_constructor, class_constructor_in_args)
1864 1865 1866 1867 1868 1869 1870
        .LoadConstantPoolEntry(class_boilerplate_entry)
        .StoreAccumulatorInRegister(class_boilerplate);

    // Create computed names and method values nodes to store into the literal.
    for (int i = 0; i < expr->properties()->length(); i++) {
      ClassLiteral::Property* property = expr->properties()->at(i);
      if (property->is_computed_name()) {
1871 1872
        DCHECK_IMPLIES(property->kind() == ClassLiteral::Property::FIELD,
                       !property->is_private());
1873 1874
        Register key = register_allocator()->GrowRegisterList(&args);

1875
        builder()->SetExpressionAsStatementPosition(property->key());
1876 1877 1878 1879 1880 1881
        BuildLoadPropertyKey(property, key);
        if (property->is_static()) {
          // The static prototype property is read only. We handle the non
          // computed property name case in the parser. Since this is the only
          // case where we need to check for an own read only property we
          // special case this so we do not need to do this for every property.
1882 1883

          FeedbackSlot slot = GetDummyCompareICSlot();
1884 1885 1886
          BytecodeLabel done;
          builder()
              ->LoadLiteral(ast_string_constants()->prototype_string())
1887 1888
              .CompareOperation(Token::Value::EQ_STRICT, key,
                                feedback_index(slot))
1889 1890 1891 1892 1893
              .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &done)
              .CallRuntime(Runtime::kThrowStaticPrototypeError)
              .Bind(&done);
        }

1894 1895
        if (property->kind() == ClassLiteral::Property::FIELD &&
            !property->is_private()) {
1896 1897 1898 1899 1900 1901 1902
          // Initialize field's name variable with the computed name.
          DCHECK_NOT_NULL(property->computed_name_var());
          builder()->LoadAccumulatorWithRegister(key);
          BuildVariableAssignment(property->computed_name_var(), Token::INIT,
                                  HoleCheckMode::kElided);
        }
      }
1903

1904 1905
      if (property->kind() == ClassLiteral::Property::FIELD) {
        if (property->is_private()) {
1906 1907 1908 1909
          builder()->CallRuntime(Runtime::kCreatePrivateNameSymbol);
          DCHECK_NOT_NULL(property->private_name_var());
          BuildVariableAssignment(property->private_name_var(), Token::INIT,
                                  HoleCheckMode::kElided);
1910
        }
1911 1912 1913 1914
        // We don't compute field's value here, but instead do it in the
        // initializer function.
        continue;
      }
1915

1916 1917 1918 1919 1920 1921
      Register value = register_allocator()->GrowRegisterList(&args);
      VisitForRegisterValue(property->value(), value);
    }

    builder()->CallRuntime(Runtime::kDefineClass, args);
  }
1922 1923
  Register prototype = register_allocator()->NewRegister();
  builder()->StoreAccumulatorInRegister(prototype);
1924

1925
  // Assign to class variable.
1926 1927 1928
  if (expr->class_variable() != nullptr) {
    DCHECK(expr->class_variable()->IsStackLocal() ||
           expr->class_variable()->IsContextSlot());
1929
    builder()->LoadAccumulatorWithRegister(class_constructor);
1930
    BuildVariableAssignment(expr->class_variable(), Token::INIT,
1931
                            HoleCheckMode::kElided);
1932
  }
1933

1934
  if (expr->instance_members_initializer_function() != nullptr) {
1935
    Register initializer =
1936
        VisitForRegisterValue(expr->instance_members_initializer_function());
1937

1938
    if (FunctionLiteral::NeedsHomeObject(
1939
            expr->instance_members_initializer_function())) {
1940 1941 1942 1943
      FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
      builder()->LoadAccumulatorWithRegister(prototype).StoreHomeObjectProperty(
          initializer, feedback_index(slot), language_mode());
    }
1944

1945 1946 1947 1948 1949
    FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
    builder()
        ->LoadAccumulatorWithRegister(initializer)
        .StoreClassFieldsInitializer(class_constructor, feedback_index(slot))
        .LoadAccumulatorWithRegister(class_constructor);
1950 1951
  }

1952
  if (expr->static_fields_initializer() != nullptr) {
1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970
    // TODO(gsathya): This can be optimized away to be a part of the
    // class boilerplate in the future. The name argument can be
    // passed to the DefineClass runtime function and have it set
    // there.
    if (name.is_valid()) {
      Register key = register_allocator()->NewRegister();
      builder()
          ->LoadLiteral(ast_string_constants()->name_string())
          .StoreAccumulatorInRegister(key);

      DataPropertyInLiteralFlags data_property_flags =
          DataPropertyInLiteralFlag::kNoFlags;
      FeedbackSlot slot =
          feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
      builder()->LoadAccumulatorWithRegister(name).StoreDataPropertyInLiteral(
          class_constructor, key, data_property_flags, feedback_index(slot));
    }

1971
    RegisterList args = register_allocator()->NewRegisterList(1);
1972 1973
    Register initializer =
        VisitForRegisterValue(expr->static_fields_initializer());
1974 1975 1976 1977

    if (FunctionLiteral::NeedsHomeObject(expr->static_fields_initializer())) {
      FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
      builder()
1978
          ->LoadAccumulatorWithRegister(class_constructor)
1979 1980 1981 1982
          .StoreHomeObjectProperty(initializer, feedback_index(slot),
                                   language_mode());
    }

1983
    builder()
1984
        ->MoveRegister(class_constructor, args[0])
1985
        .CallProperty(initializer, args,
1986
                      feedback_index(feedback_spec()->AddCallICSlot()));
1987
  }
1988
  builder()->LoadAccumulatorWithRegister(class_constructor);
1989 1990
}

1991
void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr) {
1992 1993 1994 1995
  VisitClassLiteral(expr, Register::invalid_value());
}

void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr, Register name) {
1996 1997 1998 1999 2000
  CurrentScope current_scope(this, expr->scope());
  DCHECK_NOT_NULL(expr->scope());
  if (expr->scope()->NeedsContext()) {
    BuildNewLocalBlockContext(expr->scope());
    ContextScope scope(this, expr->scope());
2001
    BuildClassLiteral(expr, name);
2002
  } else {
2003
    BuildClassLiteral(expr, name);
2004 2005 2006
  }
}

2007 2008
void BytecodeGenerator::VisitInitializeClassMembersStatement(
    InitializeClassMembersStatement* stmt) {
2009 2010 2011
  RegisterList args = register_allocator()->NewRegisterList(3);
  Register constructor = args[0], key = args[1], value = args[2];
  builder()->MoveRegister(builder()->Receiver(), constructor);
2012

2013 2014
  for (int i = 0; i < stmt->fields()->length(); i++) {
    ClassLiteral::Property* property = stmt->fields()->at(i);
2015 2016

    if (property->is_computed_name()) {
2017 2018
      DCHECK_EQ(property->kind(), ClassLiteral::Property::FIELD);
      DCHECK(!property->is_private());
2019 2020 2021 2022 2023 2024
      Variable* var = property->computed_name_var();
      DCHECK_NOT_NULL(var);
      // The computed name is already evaluated and stored in a
      // variable at class definition time.
      BuildVariableLoad(var, HoleCheckMode::kElided);
      builder()->StoreAccumulatorInRegister(key);
2025 2026
    } else if (property->kind() == ClassLiteral::Property::FIELD &&
               property->is_private()) {
2027 2028 2029
      Variable* private_name_var = property->private_name_var();
      DCHECK_NOT_NULL(private_name_var);
      BuildVariableLoad(private_name_var, HoleCheckMode::kElided);
2030
      builder()->StoreAccumulatorInRegister(key);
2031 2032 2033 2034
    } else {
      BuildLoadPropertyKey(property, key);
    }

2035
    builder()->SetExpressionAsStatementPosition(property->value());
2036 2037 2038
    VisitForRegisterValue(property->value(), value);
    VisitSetHomeObject(value, constructor, property);

2039
    Runtime::FunctionId function_id =
2040 2041
        property->kind() == ClassLiteral::Property::FIELD &&
                !property->is_private()
2042 2043 2044
            ? Runtime::kCreateDataProperty
            : Runtime::kAddPrivateField;
    builder()->CallRuntime(function_id, args);
2045 2046 2047
  }
}

2048 2049
void BytecodeGenerator::BuildInstanceMemberInitialization(Register constructor,
                                                          Register instance) {
2050 2051
  RegisterList args = register_allocator()->NewRegisterList(1);
  Register initializer = register_allocator()->NewRegister();
2052 2053 2054

  FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
  BytecodeLabel done;
2055 2056

  builder()
2057 2058 2059 2060 2061 2062 2063
      ->LoadClassFieldsInitializer(constructor, feedback_index(slot))
      // TODO(gsathya): This jump can be elided for the base
      // constructor and derived constructor. This is only required
      // when called from an arrow function.
      .JumpIfUndefined(&done)
      .StoreAccumulatorInRegister(initializer)
      .MoveRegister(instance, args[0])
2064
      .CallProperty(initializer, args,
2065 2066
                    feedback_index(feedback_spec()->AddCallICSlot()))
      .Bind(&done);
2067 2068
}

2069
void BytecodeGenerator::VisitNativeFunctionLiteral(
2070
    NativeFunctionLiteral* expr) {
2071
  size_t entry = builder()->AllocateDeferredConstantPoolEntry();
2072 2073
  FeedbackSlot slot = feedback_spec()->AddCreateClosureSlot();
  builder()->CreateClosure(entry, feedback_index(slot), NOT_TENURED);
2074
  native_function_literals_.push_back(std::make_pair(expr, entry));
2075 2076
}

2077
void BytecodeGenerator::VisitDoExpression(DoExpression* expr) {
2078 2079
  VisitBlock(expr->block());
  VisitVariableProxy(expr->result());
2080 2081
}

2082
void BytecodeGenerator::VisitConditional(Conditional* expr) {
2083 2084
  ConditionalControlFlowBuilder conditional_builder(
      builder(), block_coverage_builder_, expr);
2085

2086 2087
  if (expr->condition()->ToBooleanIsTrue()) {
    // Generate then block unconditionally as always true.
2088
    conditional_builder.Then();
2089 2090 2091
    VisitForAccumulatorValue(expr->then_expression());
  } else if (expr->condition()->ToBooleanIsFalse()) {
    // Generate else block unconditionally if it exists.
2092
    conditional_builder.Else();
2093 2094
    VisitForAccumulatorValue(expr->else_expression());
  } else {
2095 2096
    VisitForTest(expr->condition(), conditional_builder.then_labels(),
                 conditional_builder.else_labels(), TestFallthrough::kThen);
2097

2098
    conditional_builder.Then();
2099
    VisitForAccumulatorValue(expr->then_expression());
2100
    conditional_builder.JumpToEnd();
2101

2102
    conditional_builder.Else();
2103 2104
    VisitForAccumulatorValue(expr->else_expression());
  }
2105
}
2106 2107

void BytecodeGenerator::VisitLiteral(Literal* expr) {
2108 2109 2110
  if (execution_result()->IsEffect()) return;
  switch (expr->type()) {
    case Literal::kSmi:
2111
      builder()->LoadLiteral(expr->AsSmiLiteral());
2112 2113
      break;
    case Literal::kHeapNumber:
2114
      builder()->LoadLiteral(expr->AsNumber());
2115 2116
      break;
    case Literal::kUndefined:
2117
      builder()->LoadUndefined();
2118 2119 2120 2121 2122 2123
      break;
    case Literal::kBoolean:
      builder()->LoadBoolean(expr->ToBooleanIsTrue());
      execution_result()->SetResultIsBoolean();
      break;
    case Literal::kNull:
2124
      builder()->LoadNull();
2125 2126
      break;
    case Literal::kTheHole:
2127
      builder()->LoadTheHole();
2128 2129
      break;
    case Literal::kString:
2130
      builder()->LoadLiteral(expr->AsRawString());
2131
      execution_result()->SetResultIsString();
2132 2133
      break;
    case Literal::kSymbol:
2134
      builder()->LoadLiteral(expr->AsSymbol());
2135 2136
      break;
    case Literal::kBigInt:
2137
      builder()->LoadLiteral(expr->AsBigInt());
2138
      break;
2139 2140 2141
  }
}

2142
void BytecodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
2143
  // Materialize a regular expression literal.
2144
  builder()->CreateRegExpLiteral(
2145 2146
      expr->raw_pattern(), feedback_index(feedback_spec()->AddLiteralSlot()),
      expr->flags());
2147 2148
}

2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169
void BytecodeGenerator::BuildCreateObjectLiteral(Register literal,
                                                 uint8_t flags, size_t entry) {
  if (ShouldOptimizeAsOneShot()) {
    RegisterList args = register_allocator()->NewRegisterList(2);
    builder()
        ->LoadConstantPoolEntry(entry)
        .StoreAccumulatorInRegister(args[0])
        .LoadLiteral(Smi::FromInt(flags))
        .StoreAccumulatorInRegister(args[1])
        .CallRuntime(Runtime::kCreateObjectLiteralWithoutAllocationSite, args)
        .StoreAccumulatorInRegister(literal);

  } else {
    // TODO(cbruni): Directly generate runtime call for literals we cannot
    // optimize once the CreateShallowObjectLiteral stub is in sync with the TF
    // optimizations.
    int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
    builder()->CreateObjectLiteral(entry, literal_index, flags, literal);
  }
}

2170
void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
2171 2172
  expr->InitDepthAndFlags();

2173 2174 2175 2176
  // Fast path for the empty object literal which doesn't need an
  // AllocationSite.
  if (expr->IsEmptyObjectLiteral()) {
    DCHECK(expr->IsFastCloningSupported());
2177
    builder()->CreateEmptyObjectLiteral();
2178 2179 2180
    return;
  }

2181
  // Deep-copy the literal boilerplate.
2182
  uint8_t flags = CreateObjectLiteralFlags::Encode(
2183
      expr->ComputeFlags(), expr->IsFastCloningSupported());
2184 2185

  Register literal = register_allocator()->NewRegister();
2186 2187 2188

  // Create literal object.
  int property_index = 0;
2189
  bool clone_object_spread =
2190
      expr->properties()->first()->kind() == ObjectLiteral::Property::SPREAD;
2191
  if (clone_object_spread) {
2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209
    // Avoid the slow path for spreads in the following common cases:
    //   1) `let obj = { ...source }`
    //   2) `let obj = { ...source, override: 1 }`
    //   3) `let obj = { ...source, ...overrides }`
    RegisterAllocationScope register_scope(this);
    Expression* property = expr->properties()->first()->value();
    Register from_value = VisitForRegisterValue(property);

    BytecodeLabels clone_object(zone());
    builder()->JumpIfUndefined(clone_object.New());
    builder()->JumpIfNull(clone_object.New());
    builder()->ToObject(from_value);

    clone_object.Bind(builder());
    int clone_index = feedback_index(feedback_spec()->AddCloneObjectSlot());
    builder()->CloneObject(from_value, flags, clone_index);
    builder()->StoreAccumulatorInRegister(literal);
    property_index++;
2210
  } else {
2211 2212 2213 2214 2215 2216 2217 2218 2219
    size_t entry;
    // If constant properties is an empty fixed array, use a cached empty fixed
    // array to ensure it's only added to the constant pool once.
    if (expr->properties_count() == 0) {
      entry = builder()->EmptyObjectBoilerplateDescriptionConstantPoolEntry();
    } else {
      entry = builder()->AllocateDeferredConstantPoolEntry();
      object_literals_.push_back(std::make_pair(expr, entry));
    }
2220
    BuildCreateObjectLiteral(literal, flags, entry);
2221
  }
2222 2223 2224 2225 2226 2227

  // Store computed values into the literal.
  AccessorTable accessor_table(zone());
  for (; property_index < expr->properties()->length(); property_index++) {
    ObjectLiteral::Property* property = expr->properties()->at(property_index);
    if (property->is_computed_name()) break;
2228
    if (!clone_object_spread && property->IsCompileTimeValue()) continue;
2229

2230
    RegisterAllocationScope inner_register_scope(this);
2231
    Literal* key = property->key()->AsLiteral();
2232
    switch (property->kind()) {
2233
      case ObjectLiteral::Property::SPREAD:
2234
        UNREACHABLE();
2235
      case ObjectLiteral::Property::CONSTANT:
2236
      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2237
        DCHECK(clone_object_spread || !property->value()->IsCompileTimeValue());
2238
        V8_FALLTHROUGH;
2239 2240 2241
      case ObjectLiteral::Property::COMPUTED: {
        // It is safe to use [[Put]] here because the boilerplate already
        // contains computed properties with an uninitialized value.
2242 2243
        if (key->IsStringLiteral()) {
          DCHECK(key->IsPropertyName());
2244
          if (property->emit_store()) {
2245
            builder()->SetExpressionPosition(property->value());
2246
            VisitForAccumulatorValue(property->value());
2247
            FeedbackSlot slot = feedback_spec()->AddStoreOwnICSlot();
2248 2249 2250 2251
            if (FunctionLiteral::NeedsHomeObject(property->value())) {
              RegisterAllocationScope register_scope(this);
              Register value = register_allocator()->NewRegister();
              builder()->StoreAccumulatorInRegister(value);
2252
              builder()->StoreNamedOwnProperty(
2253 2254
                  literal, key->AsRawPropertyName(), feedback_index(slot));
              VisitSetHomeObject(value, literal, property);
2255
            } else {
2256
              builder()->StoreNamedOwnProperty(
2257
                  literal, key->AsRawPropertyName(), feedback_index(slot));
2258
            }
2259
          } else {
2260
            builder()->SetExpressionPosition(property->value());
2261
            VisitForEffect(property->value());
2262 2263
          }
        } else {
2264 2265 2266
          RegisterList args = register_allocator()->NewRegisterList(4);

          builder()->MoveRegister(literal, args[0]);
2267
          builder()->SetExpressionPosition(property->key());
2268
          VisitForRegisterValue(property->key(), args[1]);
2269
          builder()->SetExpressionPosition(property->value());
2270
          VisitForRegisterValue(property->value(), args[2]);
2271 2272
          if (property->emit_store()) {
            builder()
2273
                ->LoadLiteral(Smi::FromEnum(LanguageMode::kSloppy))
2274
                .StoreAccumulatorInRegister(args[3])
2275
                .CallRuntime(Runtime::kSetKeyedProperty, args);
2276
            Register value = args[2];
2277 2278 2279 2280 2281 2282
            VisitSetHomeObject(value, literal, property);
          }
        }
        break;
      }
      case ObjectLiteral::Property::PROTOTYPE: {
2283 2284
        // __proto__:null is handled by CreateObjectLiteral.
        if (property->IsNullPrototype()) break;
2285
        DCHECK(property->emit_store());
2286
        DCHECK(!property->NeedsSetFunctionName());
2287 2288
        RegisterList args = register_allocator()->NewRegisterList(2);
        builder()->MoveRegister(literal, args[0]);
2289
        builder()->SetExpressionPosition(property->value());
2290 2291
        VisitForRegisterValue(property->value(), args[1]);
        builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2292 2293 2294 2295
        break;
      }
      case ObjectLiteral::Property::GETTER:
        if (property->emit_store()) {
2296
          accessor_table.lookup(key)->second->getter = property;
2297 2298 2299 2300
        }
        break;
      case ObjectLiteral::Property::SETTER:
        if (property->emit_store()) {
2301
          accessor_table.lookup(key)->second->setter = property;
2302 2303 2304 2305 2306
        }
        break;
    }
  }

2307 2308
  // Define accessors, using only a single call to the runtime for each pair of
  // corresponding getters and setters.
2309 2310
  for (AccessorTable::Iterator it = accessor_table.begin();
       it != accessor_table.end(); ++it) {
2311
    RegisterAllocationScope inner_register_scope(this);
2312 2313 2314 2315 2316
    RegisterList args = register_allocator()->NewRegisterList(5);
    builder()->MoveRegister(literal, args[0]);
    VisitForRegisterValue(it->first, args[1]);
    VisitObjectLiteralAccessor(literal, it->second->getter, args[2]);
    VisitObjectLiteralAccessor(literal, it->second->setter, args[3]);
2317 2318
    builder()
        ->LoadLiteral(Smi::FromInt(NONE))
2319 2320
        .StoreAccumulatorInRegister(args[4])
        .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, args);
2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332
  }

  // Object literals have two parts. The "static" part on the left contains no
  // computed property names, and so we can compute its map ahead of time; see
  // Runtime_CreateObjectLiteralBoilerplate. The second "dynamic" part starts
  // with the first computed property name and continues with all properties to
  // its right. All the code from above initializes the static component of the
  // object literal, and arranges for the map of the result to reflect the
  // static order in which the keys appear. For the dynamic properties, we
  // compile them into a series of "SetOwnProperty" runtime calls. This will
  // preserve insertion order.
  for (; property_index < expr->properties()->length(); property_index++) {
2333 2334
    ObjectLiteral::Property* property = expr->properties()->at(property_index);
    RegisterAllocationScope inner_register_scope(this);
2335

2336 2337 2338
    if (property->IsPrototype()) {
      // __proto__:null is handled by CreateObjectLiteral.
      if (property->IsNullPrototype()) continue;
2339
      DCHECK(property->emit_store());
2340
      DCHECK(!property->NeedsSetFunctionName());
2341 2342
      RegisterList args = register_allocator()->NewRegisterList(2);
      builder()->MoveRegister(literal, args[0]);
2343
      builder()->SetExpressionPosition(property->value());
2344 2345
      VisitForRegisterValue(property->value(), args[1]);
      builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2346 2347 2348 2349 2350 2351
      continue;
    }

    switch (property->kind()) {
      case ObjectLiteral::Property::CONSTANT:
      case ObjectLiteral::Property::COMPUTED:
2352
      case ObjectLiteral::Property::MATERIALIZED_LITERAL: {
2353
        Register key = register_allocator()->NewRegister();
2354
        BuildLoadPropertyKey(property, key);
2355
        builder()->SetExpressionPosition(property->value());
2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369
        Register value;

        // Static class fields require the name property to be set on
        // the class, meaning we can't wait until the
        // StoreDataPropertyInLiteral call later to set the name.
        if (property->value()->IsClassLiteral() &&
            property->value()->AsClassLiteral()->static_fields_initializer() !=
                nullptr) {
          value = register_allocator()->NewRegister();
          VisitClassLiteral(property->value()->AsClassLiteral(), key);
          builder()->StoreAccumulatorInRegister(value);
        } else {
          value = VisitForRegisterValue(property->value());
        }
2370 2371
        VisitSetHomeObject(value, literal, property);

2372 2373 2374 2375 2376
        DataPropertyInLiteralFlags data_property_flags =
            DataPropertyInLiteralFlag::kNoFlags;
        if (property->NeedsSetFunctionName()) {
          data_property_flags |= DataPropertyInLiteralFlag::kSetFunctionName;
        }
2377

2378 2379
        FeedbackSlot slot =
            feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
2380 2381 2382 2383
        builder()
            ->LoadAccumulatorWithRegister(value)
            .StoreDataPropertyInLiteral(literal, key, data_property_flags,
                                        feedback_index(slot));
2384
        break;
2385
      }
2386
      case ObjectLiteral::Property::GETTER:
2387 2388 2389
      case ObjectLiteral::Property::SETTER: {
        RegisterList args = register_allocator()->NewRegisterList(4);
        builder()->MoveRegister(literal, args[0]);
2390
        BuildLoadPropertyKey(property, args[1]);
2391
        builder()->SetExpressionPosition(property->value());
2392 2393 2394 2395 2396 2397 2398 2399 2400 2401
        VisitForRegisterValue(property->value(), args[2]);
        VisitSetHomeObject(args[2], literal, property);
        builder()
            ->LoadLiteral(Smi::FromInt(NONE))
            .StoreAccumulatorInRegister(args[3]);
        Runtime::FunctionId function_id =
            property->kind() == ObjectLiteral::Property::GETTER
                ? Runtime::kDefineGetterPropertyUnchecked
                : Runtime::kDefineSetterPropertyUnchecked;
        builder()->CallRuntime(function_id, args);
2402
        break;
2403
      }
2404 2405 2406
      case ObjectLiteral::Property::SPREAD: {
        RegisterList args = register_allocator()->NewRegisterList(2);
        builder()->MoveRegister(literal, args[0]);
2407
        builder()->SetExpressionPosition(property->value());
2408 2409 2410 2411
        VisitForRegisterValue(property->value(), args[1]);
        builder()->CallRuntime(Runtime::kCopyDataProperties, args);
        break;
      }
2412 2413
      case ObjectLiteral::Property::PROTOTYPE:
        UNREACHABLE();  // Handled specially above.
2414 2415 2416 2417
        break;
    }
  }

2418
  builder()->LoadAccumulatorWithRegister(literal);
2419 2420
}

2421 2422 2423 2424
void BytecodeGenerator::BuildArrayLiteralSpread(Spread* spread, Register array,
                                                Register index,
                                                FeedbackSlot index_slot,
                                                FeedbackSlot element_slot) {
2425
  RegisterAllocationScope register_scope(this);
2426
  Register value = register_allocator()->NewRegister();
2427 2428 2429 2430

  builder()->SetExpressionAsStatementPosition(spread->expression());
  IteratorRecord iterator =
      BuildGetIteratorRecord(spread->expression(), IteratorType::kNormal);
2431

2432 2433 2434 2435 2436 2437
  LoopBuilder loop_builder(builder(), nullptr, nullptr);
  loop_builder.LoopHeader();

  // Call the iterator's .next() method. Break from the loop if the `done`
  // property is truthy, otherwise load the value from the iterator result and
  // append the argument.
2438
  BuildIteratorNext(iterator, value);
2439
  builder()->LoadNamedProperty(
2440
      value, ast_string_constants()->done_string(),
2441 2442 2443 2444 2445
      feedback_index(feedback_spec()->AddLoadICSlot()));
  loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);

  loop_builder.LoopBody();
  builder()
2446 2447
      // value = value.value
      ->LoadNamedProperty(value, ast_string_constants()->value_string(),
2448
                          feedback_index(feedback_spec()->AddLoadICSlot()))
2449 2450 2451 2452 2453 2454 2455
      .StoreAccumulatorInRegister(value)
      // array[index] = value
      .StoreInArrayLiteral(array, index, feedback_index(element_slot))
      // index++
      .LoadAccumulatorWithRegister(index)
      .UnaryOperation(Token::INC, feedback_index(index_slot))
      .StoreAccumulatorInRegister(index);
2456 2457
  loop_builder.BindContinueTarget();
  loop_builder.JumpToHeader(loop_depth_);
2458 2459
}

2460
void BytecodeGenerator::BuildCreateArrayLiteral(
2461
    const ZonePtrList<Expression>* elements, ArrayLiteral* expr) {
2462 2463 2464 2465 2466 2467 2468 2469 2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487 2488 2489 2490 2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513 2514 2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536 2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551 2552 2553 2554 2555 2556 2557 2558 2559 2560 2561 2562 2563 2564 2565 2566 2567 2568 2569 2570 2571 2572 2573 2574 2575 2576 2577 2578 2579 2580 2581 2582 2583 2584 2585 2586 2587 2588 2589 2590 2591 2592 2593 2594 2595 2596 2597 2598 2599 2600 2601 2602 2603 2604 2605 2606 2607
  RegisterAllocationScope register_scope(this);
  Register index = register_allocator()->NewRegister();
  Register array = register_allocator()->NewRegister();
  SharedFeedbackSlot element_slot(feedback_spec(),
                                  FeedbackSlotKind::kStoreInArrayLiteral);
  ZonePtrList<Expression>::iterator current = elements->begin();
  ZonePtrList<Expression>::iterator end = elements->end();
  bool is_empty = elements->is_empty();

  if (!is_empty && (*current)->IsSpread()) {
    // If we have a leading spread, use CreateArrayFromIterable to create
    // an array from it and then add the remaining components to that array.
    VisitForAccumulatorValue(*current);
    builder()->CreateArrayFromIterable().StoreAccumulatorInRegister(array);

    if (++current != end) {
      // If there are remaning elements, prepare the index register that is
      // used for adding those elements. The next index is the length of the
      // newly created array.
      auto length = ast_string_constants()->length_string();
      int length_load_slot = feedback_index(feedback_spec()->AddLoadICSlot());
      builder()
          ->LoadNamedProperty(array, length, length_load_slot)
          .StoreAccumulatorInRegister(index);
    }
  } else if (expr != nullptr) {
    // There are some elements before the first (if any) spread, and we can
    // use a boilerplate when creating the initial array from those elements.

    // First, allocate a constant pool entry for the boilerplate that will
    // be created during finalization, and will contain all the constant
    // elements before the first spread. This also handle the empty array case
    // and one-shot optimization.
    uint8_t flags = CreateArrayLiteralFlags::Encode(
        expr->IsFastCloningSupported(), expr->ComputeFlags());
    bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
    size_t entry;
    if (is_empty && optimize_as_one_shot) {
      entry = builder()->EmptyArrayBoilerplateDescriptionConstantPoolEntry();
    } else if (!is_empty) {
      entry = builder()->AllocateDeferredConstantPoolEntry();
      array_literals_.push_back(std::make_pair(expr, entry));
    }

    if (optimize_as_one_shot) {
      RegisterList args = register_allocator()->NewRegisterList(2);
      builder()
          ->LoadConstantPoolEntry(entry)
          .StoreAccumulatorInRegister(args[0])
          .LoadLiteral(Smi::FromInt(flags))
          .StoreAccumulatorInRegister(args[1])
          .CallRuntime(Runtime::kCreateArrayLiteralWithoutAllocationSite, args);
    } else if (is_empty) {
      // Empty array literal fast-path.
      int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
      DCHECK(expr->IsFastCloningSupported());
      builder()->CreateEmptyArrayLiteral(literal_index);
    } else {
      // Create array literal from boilerplate.
      int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
      builder()->CreateArrayLiteral(entry, literal_index, flags);
    }
    builder()->StoreAccumulatorInRegister(array);

    // Insert the missing non-constant elements, up until the first spread
    // index, into the initial array (the remaining elements will be inserted
    // below).
    DCHECK_EQ(current, elements->begin());
    ZonePtrList<Expression>::iterator first_spread_or_end =
        expr->first_spread_index() >= 0 ? current + expr->first_spread_index()
                                        : end;
    int array_index = 0;
    for (; current != first_spread_or_end; ++current, array_index++) {
      Expression* subexpr = *current;
      DCHECK(!subexpr->IsSpread());
      // Skip the constants.
      if (subexpr->IsCompileTimeValue()) continue;

      builder()
          ->LoadLiteral(Smi::FromInt(array_index))
          .StoreAccumulatorInRegister(index);
      VisitForAccumulatorValue(subexpr);
      builder()->StoreInArrayLiteral(array, index,
                                     feedback_index(element_slot.Get()));
    }

    if (current != end) {
      // If there are remaining elements, prepare the index register
      // to store the next element, which comes from the first spread.
      builder()->LoadLiteral(array_index).StoreAccumulatorInRegister(index);
    }
  } else {
    // In other cases, we prepare an empty array to be filled in below.
    DCHECK(!elements->is_empty());
    int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
    builder()
        ->CreateEmptyArrayLiteral(literal_index)
        .StoreAccumulatorInRegister(array);
    // Prepare the index for the first element.
    builder()->LoadLiteral(Smi::FromInt(0)).StoreAccumulatorInRegister(index);
  }

  // Now build insertions for the remaining elements from current to end.
  SharedFeedbackSlot index_slot(feedback_spec(), FeedbackSlotKind::kBinaryOp);
  SharedFeedbackSlot length_slot(
      feedback_spec(), feedback_spec()->GetStoreICSlot(LanguageMode::kStrict));
  for (; current != end; ++current) {
    Expression* subexpr = *current;
    if (subexpr->IsSpread()) {
      FeedbackSlot real_index_slot = index_slot.Get();
      BuildArrayLiteralSpread(subexpr->AsSpread(), array, index,
                              real_index_slot, element_slot.Get());
    } else if (!subexpr->IsTheHoleLiteral()) {
      // literal[index++] = subexpr
      VisitForAccumulatorValue(subexpr);
      builder()
          ->StoreInArrayLiteral(array, index,
                                feedback_index(element_slot.Get()))
          .LoadAccumulatorWithRegister(index);
      // Only increase the index if we are not the last element.
      if (current + 1 != end) {
        builder()
            ->UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
            .StoreAccumulatorInRegister(index);
      }
    } else {
      // literal.length = ++index
      // length_slot is only used when there are holes.
      auto length = ast_string_constants()->length_string();
      builder()
          ->LoadAccumulatorWithRegister(index)
          .UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
          .StoreAccumulatorInRegister(index)
          .StoreNamedProperty(array, length, feedback_index(length_slot.Get()),
                              LanguageMode::kStrict);
    }
  }

  builder()->LoadAccumulatorWithRegister(array);
}

void BytecodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
  expr->InitDepthAndFlags();
  BuildCreateArrayLiteral(expr->values(), expr);
}

2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 2618 2619 2620
void BytecodeGenerator::VisitStoreInArrayLiteral(StoreInArrayLiteral* expr) {
  builder()->SetExpressionAsStatementPosition(expr);
  RegisterAllocationScope register_scope(this);
  Register array = register_allocator()->NewRegister();
  Register index = register_allocator()->NewRegister();
  VisitForRegisterValue(expr->array(), array);
  VisitForRegisterValue(expr->index(), index);
  VisitForAccumulatorValue(expr->value());
  builder()->StoreInArrayLiteral(
      array, index,
      feedback_index(feedback_spec()->AddStoreInArrayLiteralICSlot()));
}

2621
void BytecodeGenerator::VisitVariableProxy(VariableProxy* proxy) {
2622
  builder()->SetExpressionPosition(proxy);
2623
  BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
2624 2625
}

2626
void BytecodeGenerator::BuildVariableLoad(Variable* variable,
2627
                                          HoleCheckMode hole_check_mode,
2628
                                          TypeofMode typeof_mode) {
2629 2630
  switch (variable->location()) {
    case VariableLocation::LOCAL: {
2631
      Register source(builder()->Local(variable->index()));
2632 2633 2634
      // We need to load the variable into the accumulator, even when in a
      // VisitForRegisterScope, in order to avoid register aliasing if
      // subsequent expressions assign to the same variable.
2635
      builder()->LoadAccumulatorWithRegister(source);
2636
      if (hole_check_mode == HoleCheckMode::kRequired) {
2637
        BuildThrowIfHole(variable);
2638
      }
2639 2640
      break;
    }
2641
    case VariableLocation::PARAMETER: {
2642 2643 2644 2645 2646 2647
      Register source;
      if (variable->IsReceiver()) {
        source = builder()->Receiver();
      } else {
        source = builder()->Parameter(variable->index());
      }
2648 2649 2650
      // We need to load the variable into the accumulator, even when in a
      // VisitForRegisterScope, in order to avoid register aliasing if
      // subsequent expressions assign to the same variable.
2651
      builder()->LoadAccumulatorWithRegister(source);
2652
      if (hole_check_mode == HoleCheckMode::kRequired) {
2653
        BuildThrowIfHole(variable);
2654
      }
2655 2656
      break;
    }
2657
    case VariableLocation::UNALLOCATED: {
2658 2659 2660
      // The global identifier "undefined" is immutable. Everything
      // else could be reassigned. For performance, we do a pointer comparison
      // rather than checking if the raw_name is really "undefined".
2661
      if (variable->raw_name() == ast_string_constants()->undefined_string()) {
2662 2663
        builder()->LoadUndefined();
      } else {
2664
        FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2665
        builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
2666 2667
                              typeof_mode);
      }
2668 2669
      break;
    }
2670
    case VariableLocation::CONTEXT: {
2671 2672 2673
      int depth = execution_context()->ContextChainDepth(variable->scope());
      ContextScope* context = execution_context()->Previous(depth);
      Register context_reg;
2674
      if (context) {
2675
        context_reg = context->reg();
2676
        depth = 0;
2677
      } else {
2678
        context_reg = execution_context()->reg();
2679
      }
2680

2681 2682 2683 2684 2685 2686 2687
      BytecodeArrayBuilder::ContextSlotMutability immutable =
          (variable->maybe_assigned() == kNotAssigned)
              ? BytecodeArrayBuilder::kImmutableSlot
              : BytecodeArrayBuilder::kMutableSlot;

      builder()->LoadContextSlot(context_reg, variable->index(), depth,
                                 immutable);
2688
      if (hole_check_mode == HoleCheckMode::kRequired) {
2689
        BuildThrowIfHole(variable);
2690
      }
2691 2692
      break;
    }
2693
    case VariableLocation::LOOKUP: {
2694
      switch (variable->mode()) {
2695
        case VariableMode::kDynamicLocal: {
2696 2697 2698
          Variable* local_variable = variable->local_if_not_shadowed();
          int depth =
              execution_context()->ContextChainDepth(local_variable->scope());
2699
          builder()->LoadLookupContextSlot(variable->raw_name(), typeof_mode,
2700
                                           local_variable->index(), depth);
2701
          if (hole_check_mode == HoleCheckMode::kRequired) {
2702
            BuildThrowIfHole(variable);
2703
          }
2704 2705
          break;
        }
2706
        case VariableMode::kDynamicGlobal: {
2707
          int depth =
2708
              current_scope()->ContextChainLengthUntilOutermostSloppyEval();
2709
          FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2710
          builder()->LoadLookupGlobalSlot(variable->raw_name(), typeof_mode,
2711 2712 2713
                                          feedback_index(slot), depth);
          break;
        }
2714
        default:
2715
          builder()->LoadLookupSlot(variable->raw_name(), typeof_mode);
2716
      }
2717 2718
      break;
    }
2719
    case VariableLocation::MODULE: {
2720 2721
      int depth = execution_context()->ContextChainDepth(variable->scope());
      builder()->LoadModuleVariable(variable->index(), depth);
2722
      if (hole_check_mode == HoleCheckMode::kRequired) {
2723
        BuildThrowIfHole(variable);
2724
      }
2725 2726
      break;
    }
2727 2728 2729
  }
}

2730
void BytecodeGenerator::BuildVariableLoadForAccumulatorValue(
2731
    Variable* variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode) {
2732
  ValueResultScope accumulator_result(this);
2733
  BuildVariableLoad(variable, hole_check_mode, typeof_mode);
2734 2735
}

2736
void BytecodeGenerator::BuildReturn(int source_position) {
2737 2738 2739 2740 2741 2742 2743
  if (FLAG_trace) {
    RegisterAllocationScope register_scope(this);
    Register result = register_allocator()->NewRegister();
    // Runtime returns {result} value, preserving accumulator.
    builder()->StoreAccumulatorInRegister(result).CallRuntime(
        Runtime::kTraceExit, result);
  }
2744
  if (info()->collect_type_profile()) {
2745
    builder()->CollectTypeProfile(info()->literal()->return_position());
2746
  }
2747
  builder()->SetReturnPosition(source_position, info()->literal());
2748 2749 2750
  builder()->Return();
}

2751
void BytecodeGenerator::BuildAsyncReturn(int source_position) {
2752
  RegisterAllocationScope register_scope(this);
2753 2754 2755 2756

  if (IsAsyncGeneratorFunction(info()->literal()->kind())) {
    RegisterList args = register_allocator()->NewRegisterList(3);
    builder()
2757 2758
        ->MoveRegister(generator_object(), args[0])  // generator
        .StoreAccumulatorInRegister(args[1])         // value
2759
        .LoadTrue()
2760
        .StoreAccumulatorInRegister(args[2])  // done
2761 2762 2763
        .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args);
  } else {
    DCHECK(IsAsyncFunction(info()->literal()->kind()));
2764 2765
    RegisterList args = register_allocator()->NewRegisterList(3);
    builder()
2766 2767
        ->MoveRegister(generator_object(), args[0])  // generator
        .StoreAccumulatorInRegister(args[1])         // value
2768
        .LoadBoolean(info()->literal()->CanSuspend())
2769 2770
        .StoreAccumulatorInRegister(args[2])  // can_suspend
        .CallRuntime(Runtime::kInlineAsyncFunctionResolve, args);
2771 2772
  }

2773
  BuildReturn(source_position);
2774 2775
}

2776 2777
void BytecodeGenerator::BuildReThrow() { builder()->ReThrow(); }

2778 2779
void BytecodeGenerator::BuildThrowIfHole(Variable* variable) {
  if (variable->is_this()) {
2780
    DCHECK(variable->mode() == VariableMode::kConst);
2781
    builder()->ThrowSuperNotCalledIfHole();
2782
  } else {
2783
    builder()->ThrowReferenceErrorIfHole(variable->raw_name());
2784
  }
2785 2786 2787 2788
}

void BytecodeGenerator::BuildHoleCheckForVariableAssignment(Variable* variable,
                                                            Token::Value op) {
2789 2790
  if (variable->is_this() && variable->mode() == VariableMode::kConst &&
      op == Token::INIT) {
2791 2792 2793
    // Perform an initialization check for 'this'. 'this' variable is the
    // only variable able to trigger bind operations outside the TDZ
    // via 'super' calls.
2794
    builder()->ThrowSuperAlreadyCalledIfNotHole();
2795 2796 2797 2798
  } else {
    // Perform an initialization check for let/const declared variables.
    // E.g. let x = (x = 20); is not allowed.
    DCHECK(IsLexicalVariableMode(variable->mode()));
2799
    BuildThrowIfHole(variable);
2800 2801
  }
}
2802

2803
void BytecodeGenerator::BuildVariableAssignment(
2804 2805
    Variable* variable, Token::Value op, HoleCheckMode hole_check_mode,
    LookupHoistingMode lookup_hoisting_mode) {
2806 2807 2808
  VariableMode mode = variable->mode();
  RegisterAllocationScope assignment_register_scope(this);
  BytecodeLabel end_label;
2809
  switch (variable->location()) {
2810
    case VariableLocation::PARAMETER:
2811
    case VariableLocation::LOCAL: {
2812 2813
      Register destination;
      if (VariableLocation::PARAMETER == variable->location()) {
2814
        if (variable->IsReceiver()) {
2815
          destination = builder()->Receiver();
2816
        } else {
2817
          destination = builder()->Parameter(variable->index());
2818
        }
2819
      } else {
2820
        destination = builder()->Local(variable->index());
2821 2822
      }

2823
      if (hole_check_mode == HoleCheckMode::kRequired) {
2824 2825 2826 2827 2828 2829
        // Load destination to check for hole.
        Register value_temp = register_allocator()->NewRegister();
        builder()
            ->StoreAccumulatorInRegister(value_temp)
            .LoadAccumulatorWithRegister(destination);

2830 2831
        BuildHoleCheckForVariableAssignment(variable, op);
        builder()->LoadAccumulatorWithRegister(value_temp);
2832
      }
2833

2834
      if (mode != VariableMode::kConst || op == Token::INIT) {
2835 2836
        builder()->StoreAccumulatorInRegister(destination);
      } else if (variable->throw_on_const_assignment(language_mode())) {
2837
        builder()->CallRuntime(Runtime::kThrowConstAssignError);
2838
      }
2839 2840 2841
      break;
    }
    case VariableLocation::UNALLOCATED: {
2842
      FeedbackSlot slot = GetCachedStoreGlobalICSlot(language_mode(), variable);
2843
      builder()->StoreGlobal(variable->raw_name(), feedback_index(slot));
2844 2845
      break;
    }
2846
    case VariableLocation::CONTEXT: {
2847 2848 2849
      int depth = execution_context()->ContextChainDepth(variable->scope());
      ContextScope* context = execution_context()->Previous(depth);
      Register context_reg;
2850

2851
      if (context) {
2852
        context_reg = context->reg();
2853
        depth = 0;
2854
      } else {
2855
        context_reg = execution_context()->reg();
2856
      }
2857

2858
      if (hole_check_mode == HoleCheckMode::kRequired) {
2859 2860 2861 2862
        // Load destination to check for hole.
        Register value_temp = register_allocator()->NewRegister();
        builder()
            ->StoreAccumulatorInRegister(value_temp)
2863 2864
            .LoadContextSlot(context_reg, variable->index(), depth,
                             BytecodeArrayBuilder::kMutableSlot);
2865

2866 2867
        BuildHoleCheckForVariableAssignment(variable, op);
        builder()->LoadAccumulatorWithRegister(value_temp);
2868 2869
      }

2870
      if (mode != VariableMode::kConst || op == Token::INIT) {
2871
        builder()->StoreContextSlot(context_reg, variable->index(), depth);
2872
      } else if (variable->throw_on_const_assignment(language_mode())) {
2873
        builder()->CallRuntime(Runtime::kThrowConstAssignError);
2874
      }
2875 2876
      break;
    }
2877
    case VariableLocation::LOOKUP: {
2878 2879
      builder()->StoreLookupSlot(variable->raw_name(), language_mode(),
                                 lookup_hoisting_mode);
2880 2881
      break;
    }
2882 2883 2884
    case VariableLocation::MODULE: {
      DCHECK(IsDeclaredVariableMode(mode));

2885
      if (mode == VariableMode::kConst && op != Token::INIT) {
2886
        builder()->CallRuntime(Runtime::kThrowConstAssignError);
2887 2888 2889 2890 2891 2892 2893 2894
        break;
      }

      // If we don't throw above, we know that we're dealing with an
      // export because imports are const and we do not generate initializing
      // assignments for them.
      DCHECK(variable->IsExport());

2895
      int depth = execution_context()->ContextChainDepth(variable->scope());
2896
      if (hole_check_mode == HoleCheckMode::kRequired) {
2897 2898 2899 2900
        Register value_temp = register_allocator()->NewRegister();
        builder()
            ->StoreAccumulatorInRegister(value_temp)
            .LoadModuleVariable(variable->index(), depth);
2901
        BuildHoleCheckForVariableAssignment(variable, op);
2902
        builder()->LoadAccumulatorWithRegister(value_temp);
2903
      }
2904
      builder()->StoreModuleVariable(variable->index(), depth);
2905 2906
      break;
    }
2907 2908 2909
  }
}

2910 2911 2912 2913
void BytecodeGenerator::BuildLoadNamedProperty(Property* property,
                                               Register object,
                                               const AstRawString* name) {
  if (ShouldOptimizeAsOneShot()) {
2914
    builder()->LoadNamedPropertyNoFeedback(object, name);
2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930
  } else {
    FeedbackSlot slot = GetCachedLoadICSlot(property->obj(), name);
    builder()->LoadNamedProperty(object, name, feedback_index(slot));
  }
}

void BytecodeGenerator::BuildStoreNamedProperty(Property* property,
                                                Register object,
                                                const AstRawString* name) {
  Register value;
  if (!execution_result()->IsEffect()) {
    value = register_allocator()->NewRegister();
    builder()->StoreAccumulatorInRegister(value);
  }

  if (ShouldOptimizeAsOneShot()) {
2931
    builder()->StoreNamedPropertyNoFeedback(object, name, language_mode());
2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942
  } else {
    FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
    builder()->StoreNamedProperty(object, name, feedback_index(slot),
                                  language_mode());
  }

  if (!execution_result()->IsEffect()) {
    builder()->LoadAccumulatorWithRegister(value);
  }
}

2943
void BytecodeGenerator::VisitAssignment(Assignment* expr) {
2944 2945 2946
  DCHECK(expr->target()->IsValidReferenceExpression() ||
         (expr->op() == Token::INIT && expr->target()->IsVariableProxy() &&
          expr->target()->AsVariableProxy()->is_this()));
2947 2948
  Register object, key;
  RegisterList super_property_args;
2949
  const AstRawString* name;
2950 2951 2952

  // Left-hand side can only be a property, a global or a variable slot.
  Property* property = expr->target()->AsProperty();
2953
  LhsKind assign_type = Property::GetAssignType(property);
2954

2955 2956 2957 2958 2959
  // Evaluate LHS expression.
  switch (assign_type) {
    case VARIABLE:
      // Nothing to do to evaluate variable assignment LHS.
      break;
2960 2961
    case NAMED_PROPERTY: {
      object = VisitForRegisterValue(property->obj());
2962
      name = property->key()->AsLiteral()->AsRawPropertyName();
2963
      break;
2964 2965 2966
    }
    case KEYED_PROPERTY: {
      object = VisitForRegisterValue(property->obj());
2967
      key = VisitForRegisterValue(property->key());
2968
      break;
2969
    }
2970
    case NAMED_SUPER_PROPERTY: {
2971
      super_property_args = register_allocator()->NewRegisterList(4);
2972 2973
      SuperPropertyReference* super_property =
          property->obj()->AsSuperPropertyReference();
2974 2975 2976
      VisitForRegisterValue(super_property->this_var(), super_property_args[0]);
      VisitForRegisterValue(super_property->home_object(),
                            super_property_args[1]);
2977
      builder()
2978
          ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
2979
          .StoreAccumulatorInRegister(super_property_args[2]);
2980 2981 2982
      break;
    }
    case KEYED_SUPER_PROPERTY: {
2983
      super_property_args = register_allocator()->NewRegisterList(4);
2984 2985
      SuperPropertyReference* super_property =
          property->obj()->AsSuperPropertyReference();
2986 2987 2988 2989
      VisitForRegisterValue(super_property->this_var(), super_property_args[0]);
      VisitForRegisterValue(super_property->home_object(),
                            super_property_args[1]);
      VisitForRegisterValue(property->key(), super_property_args[2]);
2990 2991
      break;
    }
2992
  }
2993

2994 2995
  // Evaluate the value and potentially handle compound assignments by loading
  // the left-hand side value and performing a binary operation.
2996
  if (expr->IsCompoundAssignment()) {
2997 2998 2999
    switch (assign_type) {
      case VARIABLE: {
        VariableProxy* proxy = expr->target()->AsVariableProxy();
3000
        BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
3001 3002 3003
        break;
      }
      case NAMED_PROPERTY: {
3004
        BuildLoadNamedProperty(property, object, name);
3005 3006 3007 3008 3009
        break;
      }
      case KEYED_PROPERTY: {
        // Key is already in accumulator at this point due to evaluating the
        // LHS above.
3010
        FeedbackSlot slot = feedback_spec()->AddKeyedLoadICSlot();
3011
        builder()->LoadKeyedProperty(object, feedback_index(slot));
3012 3013
        break;
      }
3014
      case NAMED_SUPER_PROPERTY: {
3015 3016
        builder()->CallRuntime(Runtime::kLoadFromSuper,
                               super_property_args.Truncate(3));
3017 3018 3019
        break;
      }
      case KEYED_SUPER_PROPERTY: {
3020 3021
        builder()->CallRuntime(Runtime::kLoadKeyedFromSuper,
                               super_property_args.Truncate(3));
3022
        break;
3023
      }
3024
    }
3025
    BinaryOperation* binop = expr->AsCompoundAssignment()->binary_operation();
3026
    FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
3027 3028
    if (expr->value()->IsSmiLiteral()) {
      builder()->BinaryOperationSmiLiteral(
3029
          binop->op(), expr->value()->AsLiteral()->AsSmiLiteral(),
3030 3031 3032 3033 3034
          feedback_index(slot));
    } else {
      Register old_value = register_allocator()->NewRegister();
      builder()->StoreAccumulatorInRegister(old_value);
      VisitForAccumulatorValue(expr->value());
3035
      builder()->BinaryOperation(binop->op(), old_value, feedback_index(slot));
3036
    }
3037
  } else {
3038
    VisitForAccumulatorValue(expr->value());
3039 3040 3041
  }

  // Store the value.
3042
  builder()->SetExpressionPosition(expr);
3043 3044
  switch (assign_type) {
    case VARIABLE: {
3045
      // TODO(oth): The BuildVariableAssignment() call is hard to reason about.
3046
      // Is the value in the accumulator safe? Yes, but scary.
3047
      VariableProxy* proxy = expr->target()->AsVariableProxy();
3048
      BuildVariableAssignment(proxy->var(), expr->op(),
3049 3050
                              proxy->hole_check_mode(),
                              expr->lookup_hoisting_mode());
3051 3052
      break;
    }
3053
    case NAMED_PROPERTY: {
3054
      BuildStoreNamedProperty(property, object, name);
3055
      break;
3056 3057 3058
    }
    case KEYED_PROPERTY: {
      FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
3059 3060 3061 3062 3063
      Register value;
      if (!execution_result()->IsEffect()) {
        value = register_allocator()->NewRegister();
        builder()->StoreAccumulatorInRegister(value);
      }
3064 3065
      builder()->StoreKeyedProperty(object, key, feedback_index(slot),
                                    language_mode());
3066 3067 3068
      if (!execution_result()->IsEffect()) {
        builder()->LoadAccumulatorWithRegister(value);
      }
3069
      break;
3070
    }
3071
    case NAMED_SUPER_PROPERTY: {
3072 3073 3074
      builder()
          ->StoreAccumulatorInRegister(super_property_args[3])
          .CallRuntime(StoreToSuperRuntimeId(), super_property_args);
3075 3076 3077
      break;
    }
    case KEYED_SUPER_PROPERTY: {
3078 3079 3080
      builder()
          ->StoreAccumulatorInRegister(super_property_args[3])
          .CallRuntime(StoreKeyedToSuperRuntimeId(), super_property_args);
3081 3082
      break;
    }
3083 3084 3085
  }
}

3086 3087 3088 3089
void BytecodeGenerator::VisitCompoundAssignment(CompoundAssignment* expr) {
  VisitAssignment(expr);
}

3090 3091 3092 3093 3094 3095
// Suspends the generator to resume at the next suspend_id, with output stored
// in the accumulator. When the generator is resumed, the sent value is loaded
// in the accumulator.
void BytecodeGenerator::BuildSuspendPoint(Expression* suspend_expr) {
  const int suspend_id = suspend_count_++;

3096
  RegisterList registers = register_allocator()->AllLiveRegisters();
3097

3098 3099 3100
  // Save context, registers, and state. This bytecode then returns the value
  // in the accumulator.
  builder()->SetExpressionPosition(suspend_expr);
3101
  builder()->SuspendGenerator(generator_object(), registers, suspend_id);
3102 3103 3104

  // Upon resume, we continue here.
  builder()->Bind(generator_jump_table_, suspend_id);
3105

3106 3107 3108
  // Clobbers all registers and sets the accumulator to the
  // [[input_or_debug_pos]] slot of the generator object.
  builder()->ResumeGenerator(generator_object(), registers);
3109
}
3110

3111 3112 3113
void BytecodeGenerator::VisitYield(Yield* expr) {
  builder()->SetExpressionPosition(expr);
  VisitForAccumulatorValue(expr->expression());
3114

3115 3116
  // If this is not the first yield
  if (suspend_count_ > 0) {
3117 3118
    if (IsAsyncGeneratorFunction(function_kind())) {
      // AsyncGenerator yields (with the exception of the initial yield)
3119 3120
      // delegate work to the AsyncGeneratorYield stub, which Awaits the operand
      // and on success, wraps the value in an IteratorResult.
3121 3122 3123
      RegisterAllocationScope register_scope(this);
      RegisterList args = register_allocator()->NewRegisterList(3);
      builder()
3124 3125
          ->MoveRegister(generator_object(), args[0])  // generator
          .StoreAccumulatorInRegister(args[1])         // value
3126
          .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
3127
          .StoreAccumulatorInRegister(args[2])  // is_caught
3128
          .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
3129 3130 3131 3132 3133 3134
    } else {
      // Generator yields (with the exception of the initial yield) wrap the
      // value into IteratorResult.
      RegisterAllocationScope register_scope(this);
      RegisterList args = register_allocator()->NewRegisterList(2);
      builder()
3135
          ->StoreAccumulatorInRegister(args[0])  // value
3136
          .LoadFalse()
3137
          .StoreAccumulatorInRegister(args[1])   // done
3138 3139 3140
          .CallRuntime(Runtime::kInlineCreateIterResultObject, args);
    }
  }
3141

3142
  BuildSuspendPoint(expr);
3143 3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3154
  // At this point, the generator has been resumed, with the received value in
  // the accumulator.

  // TODO(caitp): remove once yield* desugaring for async generators is handled
  // in BytecodeGenerator.
  if (expr->on_abrupt_resume() == Yield::kNoControl) {
    DCHECK(IsAsyncGeneratorFunction(function_kind()));
    return;
  }

  Register input = register_allocator()->NewRegister();
  builder()->StoreAccumulatorInRegister(input).CallRuntime(
3155
      Runtime::kInlineGeneratorGetResumeMode, generator_object());
3156

3157 3158 3159 3160
  // Now dispatch on resume mode.
  STATIC_ASSERT(JSGeneratorObject::kNext + 1 == JSGeneratorObject::kReturn);
  BytecodeJumpTable* jump_table =
      builder()->AllocateJumpTable(2, JSGeneratorObject::kNext);
3161

3162
  builder()->SwitchOnSmiNoFeedback(jump_table);
3163

3164 3165 3166 3167 3168 3169
  {
    // Resume with throw (switch fallthrough).
    // TODO(leszeks): Add a debug-only check that the accumulator is
    // JSGeneratorObject::kThrow.
    builder()->SetExpressionPosition(expr);
    builder()->LoadAccumulatorWithRegister(input);
3170
    builder()->Throw();
3171
  }
3172

3173 3174 3175 3176
  {
    // Resume with return.
    builder()->Bind(jump_table, JSGeneratorObject::kReturn);
    builder()->LoadAccumulatorWithRegister(input);
3177
    if (IsAsyncGeneratorFunction(function_kind())) {
3178 3179 3180
      execution_control()->AsyncReturnAccumulator();
    } else {
      execution_control()->ReturnAccumulator();
3181
    }
3182
  }
3183 3184 3185 3186

  {
    // Resume with next.
    builder()->Bind(jump_table, JSGeneratorObject::kNext);
3187 3188
    BuildIncrementBlockCoverageCounterIfEnabled(expr,
                                                SourceRangeKind::kContinuation);
3189 3190
    builder()->LoadAccumulatorWithRegister(input);
  }
3191 3192
}

3193 3194 3195 3196 3197 3198 3199 3200 3201
// Desugaring of (yield* iterable)
//
//   do {
//     const kNext = 0;
//     const kReturn = 1;
//     const kThrow = 2;
//
//     let output; // uninitialized
//
3202 3203 3204
//     let iteratorRecord = GetIterator(iterable);
//     let iterator = iteratorRecord.[[Iterator]];
//     let next = iteratorRecord.[[NextMethod]];
3205 3206 3207 3208 3209 3210 3211 3212
//     let input = undefined;
//     let resumeMode = kNext;
//
//     while (true) {
//       // From the generator to the iterator:
//       // Forward input according to resumeMode and obtain output.
//       switch (resumeMode) {
//         case kNext:
3213
//           output = next.[[Call]](iterator, « »);;
3214 3215 3216 3217
//           break;
//         case kReturn:
//           let iteratorReturn = iterator.return;
//           if (IS_NULL_OR_UNDEFINED(iteratorReturn)) return input;
3218
//           output = iteratorReturn.[[Call]](iterator, «input»);
3219 3220 3221 3222 3223 3224
//           break;
//         case kThrow:
//           let iteratorThrow = iterator.throw;
//           if (IS_NULL_OR_UNDEFINED(iteratorThrow)) {
//             let iteratorReturn = iterator.return;
//             if (!IS_NULL_OR_UNDEFINED(iteratorReturn)) {
3225
//               output = iteratorReturn.[[Call]](iterator, « »);
3226
//               if (IS_ASYNC_GENERATOR) output = await output;
3227 3228 3229 3230
//               if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
//             }
//             throw MakeTypeError(kThrowMethodMissing);
//           }
3231
//           output = iteratorThrow.[[Call]](iterator, «input»);
3232 3233
//           break;
//       }
3234 3235
//
//       if (IS_ASYNC_GENERATOR) output = await output;
3236 3237 3238 3239 3240
//       if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
//       if (output.done) break;
//
//       // From the generator to its user:
//       // Forward output, receive new input, and determine resume mode.
3241 3242 3243 3244 3245
//       if (IS_ASYNC_GENERATOR) {
//         // AsyncGeneratorYield abstract operation awaits the operand before
//         // resolving the promise for the current AsyncGeneratorRequest.
//         %_AsyncGeneratorYield(output.value)
//       }
3246 3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257
//       input = Suspend(output);
//       resumeMode = %GeneratorGetResumeMode();
//     }
//
//     if (resumeMode === kReturn) {
//       return output.value;
//     }
//     output.value
//   }
void BytecodeGenerator::VisitYieldStar(YieldStar* expr) {
  Register output = register_allocator()->NewRegister();
  Register resume_mode = register_allocator()->NewRegister();
3258 3259 3260
  IteratorType iterator_type = IsAsyncGeneratorFunction(function_kind())
                                   ? IteratorType::kAsync
                                   : IteratorType::kNormal;
3261 3262 3263 3264

  {
    RegisterAllocationScope register_scope(this);
    RegisterList iterator_and_input = register_allocator()->NewRegisterList(2);
3265 3266 3267 3268
    IteratorRecord iterator = BuildGetIteratorRecord(
        expr->expression(),
        register_allocator()->NewRegister() /* next method */,
        iterator_and_input[0], iterator_type);
3269

3270 3271 3272 3273 3274 3275 3276
    Register input = iterator_and_input[1];
    builder()->LoadUndefined().StoreAccumulatorInRegister(input);
    builder()
        ->LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
        .StoreAccumulatorInRegister(resume_mode);

    {
3277 3278 3279
      // This loop builder does not construct counters as the loop is not
      // visible to the user, and we therefore neither pass the block coverage
      // builder nor the expression.
3280
      //
3281 3282 3283 3284 3285 3286 3287 3288
      // In addition to the normal suspend for yield*, a yield* in an async
      // generator has 2 additional suspends:
      //   - One for awaiting the iterator result of closing the generator when
      //     resumed with a "throw" completion, and a throw method is not
      //     present on the delegated iterator
      //   - One for awaiting the iterator result yielded by the delegated
      //     iterator

3289
      LoopBuilder loop(builder(), nullptr, nullptr);
3290
      loop.LoopHeader();
3291 3292 3293 3294 3295 3296 3297 3298 3299 3300 3301 3302 3303 3304 3305

      {
        BytecodeLabels after_switch(zone());
        BytecodeJumpTable* switch_jump_table =
            builder()->AllocateJumpTable(2, 1);

        builder()
            ->LoadAccumulatorWithRegister(resume_mode)
            .SwitchOnSmiNoFeedback(switch_jump_table);

        // Fallthrough to default case.
        // TODO(tebbi): Add debug code to check that {resume_mode} really is
        // {JSGeneratorObject::kNext} in this case.
        STATIC_ASSERT(JSGeneratorObject::kNext == 0);
        {
3306 3307 3308 3309
          FeedbackSlot slot = feedback_spec()->AddCallICSlot();
          builder()->CallProperty(iterator.next(), iterator_and_input,
                                  feedback_index(slot));
          builder()->Jump(after_switch.New());
3310 3311 3312 3313 3314
        }

        STATIC_ASSERT(JSGeneratorObject::kReturn == 1);
        builder()->Bind(switch_jump_table, JSGeneratorObject::kReturn);
        {
3315 3316 3317 3318 3319 3320 3321 3322 3323 3324 3325 3326 3327
          const AstRawString* return_string =
              ast_string_constants()->return_string();
          BytecodeLabels no_return_method(zone());

          BuildCallIteratorMethod(iterator.object(), return_string,
                                  iterator_and_input, after_switch.New(),
                                  &no_return_method);
          no_return_method.Bind(builder());
          builder()->LoadAccumulatorWithRegister(input);
          if (iterator_type == IteratorType::kAsync) {
            execution_control()->AsyncReturnAccumulator();
          } else {
            execution_control()->ReturnAccumulator();
3328 3329 3330 3331 3332 3333
          }
        }

        STATIC_ASSERT(JSGeneratorObject::kThrow == 2);
        builder()->Bind(switch_jump_table, JSGeneratorObject::kThrow);
        {
3334 3335 3336 3337 3338 3339 3340 3341 3342 3343
          const AstRawString* throw_string =
              ast_string_constants()->throw_string();
          BytecodeLabels no_throw_method(zone());
          BuildCallIteratorMethod(iterator.object(), throw_string,
                                  iterator_and_input, after_switch.New(),
                                  &no_throw_method);

          // If there is no "throw" method, perform IteratorClose, and finally
          // throw a TypeError.
          no_throw_method.Bind(builder());
3344
          BuildIteratorClose(iterator, expr);
3345
          builder()->CallRuntime(Runtime::kThrowThrowMethodMissing);
3346 3347 3348 3349 3350
        }

        after_switch.Bind(builder());
      }

3351 3352
      if (iterator_type == IteratorType::kAsync) {
        // Await the result of the method invocation.
3353
        BuildAwait(expr);
3354 3355
      }

3356 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366
      // Check that output is an object.
      BytecodeLabel check_if_done;
      builder()
          ->StoreAccumulatorInRegister(output)
          .JumpIfJSReceiver(&check_if_done)
          .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, output);

      builder()->Bind(&check_if_done);
      // Break once output.done is true.
      builder()->LoadNamedProperty(
          output, ast_string_constants()->done_string(),
3367
          feedback_index(feedback_spec()->AddLoadICSlot()));
3368 3369 3370 3371

      loop.BreakIfTrue(ToBooleanMode::kConvertToBoolean);

      // Suspend the current generator.
3372 3373 3374 3375
      if (iterator_type == IteratorType::kNormal) {
        builder()->LoadAccumulatorWithRegister(output);
      } else {
        RegisterAllocationScope register_scope(this);
3376
        DCHECK_EQ(iterator_type, IteratorType::kAsync);
3377 3378 3379 3380 3381
        // If generatorKind is async, perform AsyncGeneratorYield(output.value),
        // which will await `output.value` before resolving the current
        // AsyncGeneratorRequest's promise.
        builder()->LoadNamedProperty(
            output, ast_string_constants()->value_string(),
3382
            feedback_index(feedback_spec()->AddLoadICSlot()));
3383 3384 3385 3386 3387 3388 3389 3390 3391 3392

        RegisterList args = register_allocator()->NewRegisterList(3);
        builder()
            ->MoveRegister(generator_object(), args[0])  // generator
            .StoreAccumulatorInRegister(args[1])         // value
            .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
            .StoreAccumulatorInRegister(args[2])  // is_caught
            .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
      }

3393
      BuildSuspendPoint(expr);
3394 3395 3396
      builder()->StoreAccumulatorInRegister(input);
      builder()
          ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode,
3397
                        generator_object())
3398 3399 3400 3401 3402 3403 3404 3405 3406 3407 3408 3409 3410
          .StoreAccumulatorInRegister(resume_mode);

      loop.BindContinueTarget();
      loop.JumpToHeader(loop_depth_);
    }
  }

  // Decide if we trigger a return or if the yield* expression should just
  // produce a value.
  BytecodeLabel completion_is_output_value;
  Register output_value = register_allocator()->NewRegister();
  builder()
      ->LoadNamedProperty(output, ast_string_constants()->value_string(),
3411
                          feedback_index(feedback_spec()->AddLoadICSlot()))
3412 3413
      .StoreAccumulatorInRegister(output_value)
      .LoadLiteral(Smi::FromInt(JSGeneratorObject::kReturn))
3414
      .CompareReference(resume_mode)
3415 3416
      .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &completion_is_output_value)
      .LoadAccumulatorWithRegister(output_value);
3417 3418 3419 3420 3421
  if (iterator_type == IteratorType::kAsync) {
    execution_control()->AsyncReturnAccumulator();
  } else {
    execution_control()->ReturnAccumulator();
  }
3422 3423

  builder()->Bind(&completion_is_output_value);
3424 3425
  BuildIncrementBlockCoverageCounterIfEnabled(expr,
                                              SourceRangeKind::kContinuation);
3426
  builder()->LoadAccumulatorWithRegister(output_value);
3427
}
3428

3429
void BytecodeGenerator::BuildAwait(Expression* await_expr) {
3430 3431 3432
  // Rather than HandlerTable::UNCAUGHT, async functions use
  // HandlerTable::ASYNC_AWAIT to communicate that top-level exceptions are
  // transformed into promise rejections. This is necessary to prevent emitting
3433
  // multiple debug events for the same uncaught exception. There is no point
3434 3435 3436 3437 3438 3439 3440 3441
  // in the body of an async function where catch prediction is
  // HandlerTable::UNCAUGHT.
  DCHECK(catch_prediction() != HandlerTable::UNCAUGHT);

  {
    // Await(operand) and suspend.
    RegisterAllocationScope register_scope(this);

3442
    Runtime::FunctionId await_intrinsic_id;
3443
    if (IsAsyncGeneratorFunction(function_kind())) {
3444 3445 3446
      await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
                               ? Runtime::kInlineAsyncGeneratorAwaitUncaught
                               : Runtime::kInlineAsyncGeneratorAwaitCaught;
3447
    } else {
3448 3449 3450
      await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
                               ? Runtime::kInlineAsyncFunctionAwaitUncaught
                               : Runtime::kInlineAsyncFunctionAwaitCaught;
3451
    }
3452 3453 3454 3455 3456
    RegisterList args = register_allocator()->NewRegisterList(2);
    builder()
        ->MoveRegister(generator_object(), args[0])
        .StoreAccumulatorInRegister(args[1])
        .CallRuntime(await_intrinsic_id, args);
3457 3458
  }

3459
  BuildSuspendPoint(await_expr);
3460 3461 3462 3463 3464 3465 3466 3467

  Register input = register_allocator()->NewRegister();
  Register resume_mode = register_allocator()->NewRegister();

  // Now dispatch on resume mode.
  BytecodeLabel resume_next;
  builder()
      ->StoreAccumulatorInRegister(input)
3468
      .CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator_object())
3469 3470
      .StoreAccumulatorInRegister(resume_mode)
      .LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
3471
      .CompareReference(resume_mode)
3472 3473 3474 3475 3476 3477 3478 3479 3480
      .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &resume_next);

  // Resume with "throw" completion (rethrow the received value).
  // TODO(leszeks): Add a debug-only check that the accumulator is
  // JSGeneratorObject::kThrow.
  builder()->LoadAccumulatorWithRegister(input).ReThrow();

  // Resume with next.
  builder()->Bind(&resume_next);
3481 3482 3483 3484 3485 3486
  builder()->LoadAccumulatorWithRegister(input);
}

void BytecodeGenerator::VisitAwait(Await* expr) {
  builder()->SetExpressionPosition(expr);
  VisitForAccumulatorValue(expr->expression());
3487
  BuildAwait(expr);
3488 3489
  BuildIncrementBlockCoverageCounterIfEnabled(expr,
                                              SourceRangeKind::kContinuation);
3490 3491
}

3492
void BytecodeGenerator::VisitThrow(Throw* expr) {
3493
  AllocateBlockCoverageSlotIfEnabled(expr, SourceRangeKind::kContinuation);
3494
  VisitForAccumulatorValue(expr->exception());
3495
  builder()->SetExpressionPosition(expr);
3496 3497
  builder()->Throw();
}
3498

3499 3500
void BytecodeGenerator::VisitPropertyLoad(Register obj, Property* property) {
  LhsKind property_kind = Property::GetAssignType(property);
3501 3502 3503 3504
  switch (property_kind) {
    case VARIABLE:
      UNREACHABLE();
    case NAMED_PROPERTY: {
3505
      builder()->SetExpressionPosition(property);
3506 3507 3508
      const AstRawString* name =
          property->key()->AsLiteral()->AsRawPropertyName();
      BuildLoadNamedProperty(property, obj, name);
3509 3510 3511
      break;
    }
    case KEYED_PROPERTY: {
3512 3513
      VisitForAccumulatorValue(property->key());
      builder()->SetExpressionPosition(property);
3514 3515
      builder()->LoadKeyedProperty(
          obj, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
3516 3517 3518
      break;
    }
    case NAMED_SUPER_PROPERTY:
3519
      VisitNamedSuperPropertyLoad(property, Register::invalid_value());
3520
      break;
3521
    case KEYED_SUPER_PROPERTY:
3522
      VisitKeyedSuperPropertyLoad(property, Register::invalid_value());
3523
      break;
3524 3525
  }
}
3526

3527 3528 3529
void BytecodeGenerator::VisitPropertyLoadForRegister(Register obj,
                                                     Property* expr,
                                                     Register destination) {
3530
  ValueResultScope result_scope(this);
3531
  VisitPropertyLoad(obj, expr);
3532
  builder()->StoreAccumulatorInRegister(destination);
3533 3534
}

3535 3536 3537
void BytecodeGenerator::VisitNamedSuperPropertyLoad(Property* property,
                                                    Register opt_receiver_out) {
  RegisterAllocationScope register_scope(this);
3538 3539
  SuperPropertyReference* super_property =
      property->obj()->AsSuperPropertyReference();
3540 3541 3542
  RegisterList args = register_allocator()->NewRegisterList(3);
  VisitForRegisterValue(super_property->this_var(), args[0]);
  VisitForRegisterValue(super_property->home_object(), args[1]);
3543 3544

  builder()->SetExpressionPosition(property);
3545
  builder()
3546
      ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
3547 3548
      .StoreAccumulatorInRegister(args[2])
      .CallRuntime(Runtime::kLoadFromSuper, args);
3549

3550
  if (opt_receiver_out.is_valid()) {
3551
    builder()->MoveRegister(args[0], opt_receiver_out);
3552 3553
  }
}
3554

3555 3556 3557
void BytecodeGenerator::VisitKeyedSuperPropertyLoad(Property* property,
                                                    Register opt_receiver_out) {
  RegisterAllocationScope register_scope(this);
3558 3559
  SuperPropertyReference* super_property =
      property->obj()->AsSuperPropertyReference();
3560 3561 3562 3563
  RegisterList args = register_allocator()->NewRegisterList(3);
  VisitForRegisterValue(super_property->this_var(), args[0]);
  VisitForRegisterValue(super_property->home_object(), args[1]);
  VisitForRegisterValue(property->key(), args[2]);
3564 3565

  builder()->SetExpressionPosition(property);
3566
  builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, args);
3567

3568
  if (opt_receiver_out.is_valid()) {
3569
    builder()->MoveRegister(args[0], opt_receiver_out);
3570
  }
3571 3572
}

3573
void BytecodeGenerator::VisitProperty(Property* expr) {
3574
  LhsKind property_kind = Property::GetAssignType(expr);
3575 3576 3577 3578 3579 3580 3581 3582
  if (property_kind != NAMED_SUPER_PROPERTY &&
      property_kind != KEYED_SUPER_PROPERTY) {
    Register obj = VisitForRegisterValue(expr->obj());
    VisitPropertyLoad(obj, expr);
  } else {
    VisitPropertyLoad(Register::invalid_value(), expr);
  }
}
3583

3584 3585 3586 3587 3588
void BytecodeGenerator::VisitResolvedProperty(ResolvedProperty* expr) {
  // Handled by VisitCall().
  UNREACHABLE();
}

3589
void BytecodeGenerator::VisitArguments(const ZonePtrList<Expression>* args,
3590
                                       RegisterList* arg_regs) {
3591 3592
  // Visit arguments.
  for (int i = 0; i < static_cast<int>(args->length()); i++) {
3593
    VisitAndPushIntoRegisterList(args->at(i), arg_regs);
3594 3595 3596
  }
}

3597 3598
void BytecodeGenerator::VisitCall(Call* expr) {
  Expression* callee_expr = expr->expression();
3599
  Call::CallType call_type = expr->GetCallType();
3600

3601 3602 3603 3604
  if (call_type == Call::SUPER_CALL) {
    return VisitCallSuper(expr);
  }

3605 3606 3607 3608
  // Grow the args list as we visit receiver / arguments to avoid allocating all
  // the registers up-front. Otherwise these registers are unavailable during
  // receiver / argument visiting and we can end up with memory leaks due to
  // registers keeping objects alive.
3609 3610
  Register callee = register_allocator()->NewRegister();
  RegisterList args = register_allocator()->NewGrowableRegisterList();
3611

3612 3613 3614 3615
  bool implicit_undefined_receiver = false;
  // When a call contains a spread, a Call AST node is only created if there is
  // exactly one spread, and it is the last argument.
  bool is_spread_call = expr->only_last_arg_is_spread();
3616
  bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
3617

3618 3619 3620
  // TODO(petermarshall): We have a lot of call bytecodes that are very similar,
  // see if we can reduce the number by adding a separate argument which
  // specifies the call type (e.g., property, spread, tailcall, etc.).
3621

3622 3623
  // Prepare the callee and the receiver to the function call. This depends on
  // the semantics of the underlying call type.
3624
  switch (call_type) {
3625 3626
    case Call::NAMED_PROPERTY_CALL:
    case Call::KEYED_PROPERTY_CALL: {
3627
      Property* property = callee_expr->AsProperty();
3628
      VisitAndPushIntoRegisterList(property->obj(), &args);
3629
      VisitPropertyLoadForRegister(args.last_register(), property, callee);
3630 3631
      break;
    }
3632 3633 3634 3635 3636 3637 3638
    case Call::RESOLVED_PROPERTY_CALL: {
      ResolvedProperty* resolved = callee_expr->AsResolvedProperty();
      VisitAndPushIntoRegisterList(resolved->object(), &args);
      VisitForAccumulatorValue(resolved->property());
      builder()->StoreAccumulatorInRegister(callee);
      break;
    }
3639 3640
    case Call::GLOBAL_CALL: {
      // Receiver is undefined for global calls.
3641
      if (!is_spread_call && !optimize_as_one_shot) {
3642 3643 3644 3645 3646 3647
        implicit_undefined_receiver = true;
      } else {
        // TODO(leszeks): There's no special bytecode for tail calls or spread
        // calls with an undefined receiver, so just push undefined ourselves.
        BuildPushUndefinedIntoRegisterList(&args);
      }
3648 3649
      // Load callee as a global variable.
      VariableProxy* proxy = callee_expr->AsVariableProxy();
3650
      BuildVariableLoadForAccumulatorValue(proxy->var(),
3651
                                           proxy->hole_check_mode());
3652
      builder()->StoreAccumulatorInRegister(callee);
3653 3654
      break;
    }
3655
    case Call::WITH_CALL: {
3656
      Register receiver = register_allocator()->GrowRegisterList(&args);
3657
      DCHECK(callee_expr->AsVariableProxy()->var()->IsLookupSlot());
3658 3659 3660 3661 3662
      {
        RegisterAllocationScope inner_register_scope(this);
        Register name = register_allocator()->NewRegister();

        // Call %LoadLookupSlotForCall to get the callee and receiver.
3663
        RegisterList result_pair = register_allocator()->NewRegisterList(2);
3664 3665
        Variable* variable = callee_expr->AsVariableProxy()->var();
        builder()
3666
            ->LoadLiteral(variable->raw_name())
3667 3668
            .StoreAccumulatorInRegister(name)
            .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name,
3669 3670 3671
                                result_pair)
            .MoveRegister(result_pair[0], callee)
            .MoveRegister(result_pair[1], receiver);
3672
      }
3673
      break;
3674
    }
3675
    case Call::OTHER_CALL: {
3676
      // Receiver is undefined for other calls.
3677
      if (!is_spread_call && !optimize_as_one_shot) {
3678 3679 3680 3681 3682 3683
        implicit_undefined_receiver = true;
      } else {
        // TODO(leszeks): There's no special bytecode for tail calls or spread
        // calls with an undefined receiver, so just push undefined ourselves.
        BuildPushUndefinedIntoRegisterList(&args);
      }
3684
      VisitForRegisterValue(callee_expr, callee);
3685
      break;
3686
    }
3687
    case Call::NAMED_SUPER_PROPERTY_CALL: {
3688
      Register receiver = register_allocator()->GrowRegisterList(&args);
3689 3690 3691 3692 3693 3694
      Property* property = callee_expr->AsProperty();
      VisitNamedSuperPropertyLoad(property, receiver);
      builder()->StoreAccumulatorInRegister(callee);
      break;
    }
    case Call::KEYED_SUPER_PROPERTY_CALL: {
3695
      Register receiver = register_allocator()->GrowRegisterList(&args);
3696 3697 3698 3699 3700
      Property* property = callee_expr->AsProperty();
      VisitKeyedSuperPropertyLoad(property, receiver);
      builder()->StoreAccumulatorInRegister(callee);
      break;
    }
3701
    case Call::SUPER_CALL:
3702 3703
      UNREACHABLE();
      break;
3704 3705
  }

3706
  // Evaluate all arguments to the function call and store in sequential args
3707
  // registers.
3708
  VisitArguments(expr->arguments(), &args);
3709 3710 3711
  int reciever_arg_count = implicit_undefined_receiver ? 0 : 1;
  CHECK_EQ(reciever_arg_count + expr->arguments()->length(),
           args.register_count());
3712

3713 3714
  // Resolve callee for a potential direct eval call. This block will mutate the
  // callee value.
3715
  if (expr->is_possibly_eval() && expr->arguments()->length() > 0) {
3716
    RegisterAllocationScope inner_register_scope(this);
3717 3718 3719
    // Set up arguments for ResolvePossiblyDirectEval by copying callee, source
    // strings and function closure, and loading language and
    // position.
3720
    Register first_arg = args[reciever_arg_count];
3721
    RegisterList runtime_call_args = register_allocator()->NewRegisterList(6);
3722
    builder()
3723
        ->MoveRegister(callee, runtime_call_args[0])
3724
        .MoveRegister(first_arg, runtime_call_args[1])
3725
        .MoveRegister(Register::function_closure(), runtime_call_args[2])
3726
        .LoadLiteral(Smi::FromEnum(language_mode()))
3727
        .StoreAccumulatorInRegister(runtime_call_args[3])
3728
        .LoadLiteral(Smi::FromInt(current_scope()->start_position()))
3729
        .StoreAccumulatorInRegister(runtime_call_args[4])
3730
        .LoadLiteral(Smi::FromInt(expr->position()))
3731
        .StoreAccumulatorInRegister(runtime_call_args[5]);
3732 3733 3734

    // Call ResolvePossiblyDirectEval and modify the callee.
    builder()
3735
        ->CallRuntime(Runtime::kResolvePossiblyDirectEval, runtime_call_args)
3736 3737 3738
        .StoreAccumulatorInRegister(callee);
  }

3739
  builder()->SetExpressionPosition(expr);
3740

3741 3742
  if (is_spread_call) {
    DCHECK(!implicit_undefined_receiver);
3743 3744 3745 3746 3747
    builder()->CallWithSpread(callee, args,
                              feedback_index(feedback_spec()->AddCallICSlot()));
  } else if (optimize_as_one_shot) {
    DCHECK(!implicit_undefined_receiver);
    builder()->CallNoFeedback(callee, args);
3748
  } else if (call_type == Call::NAMED_PROPERTY_CALL ||
3749 3750
             call_type == Call::KEYED_PROPERTY_CALL ||
             call_type == Call::RESOLVED_PROPERTY_CALL) {
3751
    DCHECK(!implicit_undefined_receiver);
3752 3753
    builder()->CallProperty(callee, args,
                            feedback_index(feedback_spec()->AddCallICSlot()));
3754
  } else if (implicit_undefined_receiver) {
3755 3756
    builder()->CallUndefinedReceiver(
        callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
3757
  } else {
3758 3759
    builder()->CallAnyReceiver(
        callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
3760
  }
3761
}
3762

3763 3764 3765
void BytecodeGenerator::VisitCallSuper(Call* expr) {
  RegisterAllocationScope register_scope(this);
  SuperCallReference* super = expr->expression()->AsSuperCallReference();
3766
  const ZonePtrList<Expression>* args = expr->arguments();
3767 3768 3769 3770 3771

  int first_spread_index = 0;
  for (; first_spread_index < args->length(); first_spread_index++) {
    if (args->at(first_spread_index)->IsSpread()) break;
  }
3772 3773

  // Prepare the constructor to the super call.
3774
  Register this_function = VisitForRegisterValue(super->this_function_var());
3775
  Register constructor = register_allocator()->NewRegister();
3776 3777 3778
  builder()
      ->LoadAccumulatorWithRegister(this_function)
      .GetSuperConstructor(constructor);
3779

3780 3781 3782 3783 3784 3785 3786 3787 3788
  if (first_spread_index < expr->arguments()->length() - 1) {
    // We rewrite something like
    //    super(1, ...x, 2)
    // to
    //    %reflect_construct(constructor, [1, ...x, 2], new_target)
    // That is, we implement (non-last-arg) spreads in super calls via our
    // mechanism for spreads in array literals.

    // First generate the array containing all arguments.
3789
    BuildCreateArrayLiteral(args, nullptr);
3790 3791 3792

    // Now pass that array to %reflect_construct.
    RegisterList construct_args = register_allocator()->NewRegisterList(3);
3793
    builder()->StoreAccumulatorInRegister(construct_args[1]);
3794 3795 3796 3797 3798 3799 3800 3801 3802 3803
    builder()->MoveRegister(constructor, construct_args[0]);
    VisitForRegisterValue(super->new_target_var(), construct_args[2]);
    builder()->CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, construct_args);
  } else {
    RegisterList args_regs = register_allocator()->NewGrowableRegisterList();
    VisitArguments(args, &args_regs);
    // The new target is loaded into the accumulator from the
    // {new.target} variable.
    VisitForAccumulatorValue(super->new_target_var());
    builder()->SetExpressionPosition(expr);
3804

3805
    int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
3806

3807 3808 3809 3810 3811 3812 3813 3814 3815 3816 3817 3818 3819 3820
    if (first_spread_index == expr->arguments()->length() - 1) {
      builder()->ConstructWithSpread(constructor, args_regs,
                                     feedback_slot_index);
    } else {
      DCHECK_EQ(first_spread_index, expr->arguments()->length());
      // Call construct.
      // TODO(turbofan): For now we do gather feedback on super constructor
      // calls, utilizing the existing machinery to inline the actual call
      // target and the JSCreate for the implicit receiver allocation. This
      // is not an ideal solution for super constructor calls, but it gets
      // the job done for now. In the long run we might want to revisit this
      // and come up with a better way.
      builder()->Construct(constructor, args_regs, feedback_slot_index);
    }
3821
  }
3822

3823 3824 3825 3826 3827 3828 3829 3830 3831 3832
  // Explicit calls to the super constructor using super() perform an
  // implicit binding assignment to the 'this' variable.
  //
  // Default constructors don't need have to do the assignment because
  // 'this' isn't accessed in default constructors.
  if (!IsDefaultConstructor(info()->literal()->kind())) {
    BuildVariableAssignment(super->this_var()->var(), Token::INIT,
                            HoleCheckMode::kRequired);
  }

3833 3834 3835 3836 3837 3838 3839 3840 3841 3842
  // The derived constructor has the correct bit set always, so we
  // don't emit code to load and call the initializer if not
  // required.
  //
  // For the arrow function or eval case, we always emit code to load
  // and call the initializer.
  //
  // TODO(gsathya): In the future, we could tag nested arrow functions
  // or eval with the correct bit so that we do the load conditionally
  // if required.
3843
  if (info()->literal()->requires_instance_members_initializer() ||
3844 3845 3846
      !IsDerivedConstructor(info()->literal()->kind())) {
    Register instance = register_allocator()->NewRegister();
    builder()->StoreAccumulatorInRegister(instance);
3847
    BuildInstanceMemberInitialization(this_function, instance);
3848 3849
    builder()->LoadAccumulatorWithRegister(instance);
  }
3850
}
3851

3852
void BytecodeGenerator::VisitCallNew(CallNew* expr) {
3853
  Register constructor = VisitForRegisterValue(expr->expression());
3854 3855
  RegisterList args = register_allocator()->NewGrowableRegisterList();
  VisitArguments(expr->arguments(), &args);
3856 3857 3858

  // The accumulator holds new target which is the same as the
  // constructor for CallNew.
3859 3860 3861
  builder()->SetExpressionPosition(expr);
  builder()->LoadAccumulatorWithRegister(constructor);

3862
  int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
3863
  if (expr->only_last_arg_is_spread()) {
3864
    builder()->ConstructWithSpread(constructor, args, feedback_slot_index);
3865
  } else {
3866
    builder()->Construct(constructor, args, feedback_slot_index);
3867
  }
3868
}
3869

3870 3871
void BytecodeGenerator::VisitCallRuntime(CallRuntime* expr) {
  if (expr->is_jsruntime()) {
3872 3873
    RegisterList args = register_allocator()->NewGrowableRegisterList();
    VisitArguments(expr->arguments(), &args);
3874
    builder()->CallJSRuntime(expr->context_index(), args);
3875
  } else {
3876
    // Evaluate all arguments to the runtime call.
3877 3878
    RegisterList args = register_allocator()->NewGrowableRegisterList();
    VisitArguments(expr->arguments(), &args);
3879
    Runtime::FunctionId function_id = expr->function()->function_id;
3880
    builder()->CallRuntime(function_id, args);
3881
  }
3882
}
3883

3884
void BytecodeGenerator::VisitVoid(UnaryOperation* expr) {
3885
  VisitForEffect(expr->expression());
3886 3887 3888
  builder()->LoadUndefined();
}

3889 3890
void BytecodeGenerator::VisitForTypeOfValue(Expression* expr) {
  if (expr->IsVariableProxy()) {
3891 3892
    // Typeof does not throw a reference error on global variables, hence we
    // perform a non-contextual load in case the operand is a variable proxy.
3893
    VariableProxy* proxy = expr->AsVariableProxy();
3894 3895
    BuildVariableLoadForAccumulatorValue(proxy->var(), proxy->hole_check_mode(),
                                         INSIDE_TYPEOF);
3896
  } else {
3897
    VisitForAccumulatorValue(expr);
3898
  }
3899 3900 3901 3902
}

void BytecodeGenerator::VisitTypeOf(UnaryOperation* expr) {
  VisitForTypeOfValue(expr->expression());
3903 3904 3905 3906
  builder()->TypeOf();
}

void BytecodeGenerator::VisitNot(UnaryOperation* expr) {
3907 3908 3909
  if (execution_result()->IsEffect()) {
    VisitForEffect(expr->expression());
  } else if (execution_result()->IsTest()) {
3910 3911 3912
    // No actual logical negation happening, we just swap the control flow, by
    // swapping the target labels and the fallthrough branch, and visit in the
    // same test result context.
3913
    TestResultScope* test_result = execution_result()->AsTest();
3914 3915
    test_result->InvertControlFlow();
    VisitInSameTestExecutionScope(expr->expression());
3916
  } else {
3917 3918
    TypeHint type_hint = VisitForAccumulatorValue(expr->expression());
    builder()->LogicalNot(ToBooleanModeFromTypeHint(type_hint));
3919
    // Always returns a boolean value.
3920
    execution_result()->SetResultIsBoolean();
3921
  }
3922 3923
}

3924
void BytecodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3925 3926 3927 3928 3929 3930 3931 3932 3933 3934 3935
  switch (expr->op()) {
    case Token::Value::NOT:
      VisitNot(expr);
      break;
    case Token::Value::TYPEOF:
      VisitTypeOf(expr);
      break;
    case Token::Value::VOID:
      VisitVoid(expr);
      break;
    case Token::Value::DELETE:
3936 3937 3938 3939
      VisitDelete(expr);
      break;
    case Token::Value::ADD:
    case Token::Value::SUB:
3940
    case Token::Value::BIT_NOT:
3941 3942 3943
      VisitForAccumulatorValue(expr->expression());
      builder()->SetExpressionPosition(expr);
      builder()->UnaryOperation(
3944
          expr->op(), feedback_index(feedback_spec()->AddBinaryOpICSlot()));
3945
      break;
3946 3947 3948
    default:
      UNREACHABLE();
  }
3949 3950
}

3951 3952 3953 3954 3955 3956 3957 3958 3959 3960
void BytecodeGenerator::VisitDelete(UnaryOperation* expr) {
  if (expr->expression()->IsProperty()) {
    // Delete of an object property is allowed both in sloppy
    // and strict modes.
    Property* property = expr->expression()->AsProperty();
    Register object = VisitForRegisterValue(property->obj());
    VisitForAccumulatorValue(property->key());
    builder()->Delete(object, language_mode());
  } else if (expr->expression()->IsVariableProxy()) {
    // Delete of an unqualified identifier is allowed in sloppy mode but is
3961 3962
    // not allowed in strict mode. Deleting 'this' and 'new.target' is allowed
    // in both modes.
3963
    VariableProxy* proxy = expr->expression()->AsVariableProxy();
3964 3965 3966
    DCHECK(is_sloppy(language_mode()) || proxy->is_this() ||
           proxy->is_new_target());
    if (proxy->is_this() || proxy->is_new_target()) {
3967 3968
      builder()->LoadTrue();
    } else {
3969
      Variable* variable = proxy->var();
3970 3971 3972 3973 3974 3975
      switch (variable->location()) {
        case VariableLocation::PARAMETER:
        case VariableLocation::LOCAL:
        case VariableLocation::CONTEXT: {
          // Deleting local var/let/const, context variables, and arguments
          // does not have any effect.
3976
          builder()->LoadFalse();
3977
          break;
3978
        }
3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3991 3992 3993 3994 3995
        case VariableLocation::UNALLOCATED:
        // TODO(adamk): Falling through to the runtime results in correct
        // behavior, but does unnecessary context-walking (since scope
        // analysis has already proven that the variable doesn't exist in
        // any non-global scope). Consider adding a DeleteGlobal bytecode
        // that knows how to deal with ScriptContexts as well as global
        // object properties.
        case VariableLocation::LOOKUP: {
          Register name_reg = register_allocator()->NewRegister();
          builder()
              ->LoadLiteral(variable->raw_name())
              .StoreAccumulatorInRegister(name_reg)
              .CallRuntime(Runtime::kDeleteLookupSlot, name_reg);
          break;
        }
        default:
          UNREACHABLE();
3996 3997 3998 3999 4000 4001 4002 4003 4004
      }
    }
  } else {
    // Delete of an unresolvable reference returns true.
    VisitForEffect(expr->expression());
    builder()->LoadTrue();
  }
}

4005
void BytecodeGenerator::VisitCountOperation(CountOperation* expr) {
4006
  DCHECK(expr->expression()->IsValidReferenceExpression());
4007 4008 4009

  // Left-hand side can only be a property, a global or a variable slot.
  Property* property = expr->expression()->AsProperty();
4010
  LhsKind assign_type = Property::GetAssignType(property);
4011

4012
  bool is_postfix = expr->is_postfix() && !execution_result()->IsEffect();
4013 4014

  // Evaluate LHS expression and get old value.
4015 4016
  Register object, key, old_value;
  RegisterList super_property_args;
4017
  const AstRawString* name;
4018 4019 4020
  switch (assign_type) {
    case VARIABLE: {
      VariableProxy* proxy = expr->expression()->AsVariableProxy();
4021
      BuildVariableLoadForAccumulatorValue(proxy->var(),
4022
                                           proxy->hole_check_mode());
4023 4024 4025
      break;
    }
    case NAMED_PROPERTY: {
4026
      object = VisitForRegisterValue(property->obj());
4027
      name = property->key()->AsLiteral()->AsRawPropertyName();
4028
      builder()->LoadNamedProperty(
4029 4030
          object, name,
          feedback_index(GetCachedLoadICSlot(property->obj(), name)));
4031 4032 4033
      break;
    }
    case KEYED_PROPERTY: {
4034
      object = VisitForRegisterValue(property->obj());
4035 4036
      // Use visit for accumulator here since we need the key in the accumulator
      // for the LoadKeyedProperty.
4037
      key = register_allocator()->NewRegister();
4038 4039
      VisitForAccumulatorValue(property->key());
      builder()->StoreAccumulatorInRegister(key).LoadKeyedProperty(
4040
          object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
4041 4042 4043
      break;
    }
    case NAMED_SUPER_PROPERTY: {
4044 4045
      super_property_args = register_allocator()->NewRegisterList(4);
      RegisterList load_super_args = super_property_args.Truncate(3);
4046 4047
      SuperPropertyReference* super_property =
          property->obj()->AsSuperPropertyReference();
4048 4049
      VisitForRegisterValue(super_property->this_var(), load_super_args[0]);
      VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
4050
      builder()
4051
          ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
4052 4053
          .StoreAccumulatorInRegister(load_super_args[2])
          .CallRuntime(Runtime::kLoadFromSuper, load_super_args);
4054 4055 4056
      break;
    }
    case KEYED_SUPER_PROPERTY: {
4057 4058
      super_property_args = register_allocator()->NewRegisterList(4);
      RegisterList load_super_args = super_property_args.Truncate(3);
4059 4060
      SuperPropertyReference* super_property =
          property->obj()->AsSuperPropertyReference();
4061 4062 4063 4064
      VisitForRegisterValue(super_property->this_var(), load_super_args[0]);
      VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
      VisitForRegisterValue(property->key(), load_super_args[2]);
      builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, load_super_args);
4065 4066 4067 4068 4069
      break;
    }
  }

  // Save result for postfix expressions.
4070
  FeedbackSlot count_slot = feedback_spec()->AddBinaryOpICSlot();
4071
  if (is_postfix) {
4072
    old_value = register_allocator()->NewRegister();
4073
    // Convert old value into a number before saving it.
4074
    // TODO(ignition): Think about adding proper PostInc/PostDec bytecodes
4075
    // instead of this ToNumeric + Inc/Dec dance.
4076
    builder()
4077
        ->ToNumeric(feedback_index(count_slot))
4078
        .StoreAccumulatorInRegister(old_value);
4079 4080 4081
  }

  // Perform +1/-1 operation.
4082
  builder()->UnaryOperation(expr->op(), feedback_index(count_slot));
4083 4084

  // Store the value.
4085
  builder()->SetExpressionPosition(expr);
4086 4087
  switch (assign_type) {
    case VARIABLE: {
4088
      VariableProxy* proxy = expr->expression()->AsVariableProxy();
4089
      BuildVariableAssignment(proxy->var(), expr->op(),
4090
                              proxy->hole_check_mode());
4091 4092 4093
      break;
    }
    case NAMED_PROPERTY: {
4094
      FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
4095 4096 4097 4098 4099
      Register value;
      if (!execution_result()->IsEffect()) {
        value = register_allocator()->NewRegister();
        builder()->StoreAccumulatorInRegister(value);
      }
4100
      builder()->StoreNamedProperty(object, name, feedback_index(slot),
4101
                                    language_mode());
4102 4103 4104
      if (!execution_result()->IsEffect()) {
        builder()->LoadAccumulatorWithRegister(value);
      }
4105 4106 4107
      break;
    }
    case KEYED_PROPERTY: {
4108
      FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
4109 4110 4111 4112 4113
      Register value;
      if (!execution_result()->IsEffect()) {
        value = register_allocator()->NewRegister();
        builder()->StoreAccumulatorInRegister(value);
      }
4114
      builder()->StoreKeyedProperty(object, key, feedback_index(slot),
4115
                                    language_mode());
4116 4117 4118
      if (!execution_result()->IsEffect()) {
        builder()->LoadAccumulatorWithRegister(value);
      }
4119 4120
      break;
    }
4121
    case NAMED_SUPER_PROPERTY: {
4122 4123 4124
      builder()
          ->StoreAccumulatorInRegister(super_property_args[3])
          .CallRuntime(StoreToSuperRuntimeId(), super_property_args);
4125 4126 4127
      break;
    }
    case KEYED_SUPER_PROPERTY: {
4128 4129 4130
      builder()
          ->StoreAccumulatorInRegister(super_property_args[3])
          .CallRuntime(StoreKeyedToSuperRuntimeId(), super_property_args);
4131 4132
      break;
    }
4133 4134 4135 4136
  }

  // Restore old value for postfix expressions.
  if (is_postfix) {
4137
    builder()->LoadAccumulatorWithRegister(old_value);
4138
  }
4139 4140 4141 4142 4143
}

void BytecodeGenerator::VisitBinaryOperation(BinaryOperation* binop) {
  switch (binop->op()) {
    case Token::COMMA:
4144 4145
      VisitCommaExpression(binop);
      break;
4146
    case Token::OR:
4147 4148
      VisitLogicalOrExpression(binop);
      break;
4149
    case Token::AND:
4150
      VisitLogicalAndExpression(binop);
4151 4152 4153 4154 4155 4156 4157
      break;
    default:
      VisitArithmeticExpression(binop);
      break;
  }
}

4158 4159 4160 4161 4162 4163 4164 4165 4166 4167 4168 4169 4170 4171 4172 4173 4174
void BytecodeGenerator::VisitNaryOperation(NaryOperation* expr) {
  switch (expr->op()) {
    case Token::COMMA:
      VisitNaryCommaExpression(expr);
      break;
    case Token::OR:
      VisitNaryLogicalOrExpression(expr);
      break;
    case Token::AND:
      VisitNaryLogicalAndExpression(expr);
      break;
    default:
      VisitNaryArithmeticExpression(expr);
      break;
  }
}

4175 4176
void BytecodeGenerator::BuildLiteralCompareNil(
    Token::Value op, BytecodeArrayBuilder::NilValue nil) {
4177 4178 4179 4180 4181 4182 4183 4184 4185 4186 4187 4188 4189 4190 4191 4192 4193 4194 4195 4196
  if (execution_result()->IsTest()) {
    TestResultScope* test_result = execution_result()->AsTest();
    switch (test_result->fallthrough()) {
      case TestFallthrough::kThen:
        builder()->JumpIfNotNil(test_result->NewElseLabel(), op, nil);
        break;
      case TestFallthrough::kElse:
        builder()->JumpIfNil(test_result->NewThenLabel(), op, nil);
        break;
      case TestFallthrough::kNone:
        builder()
            ->JumpIfNil(test_result->NewThenLabel(), op, nil)
            .Jump(test_result->NewElseLabel());
    }
    test_result->SetResultConsumedByTest();
  } else {
    builder()->CompareNil(op, nil);
  }
}

4197
void BytecodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4198
  Expression* sub_expr;
4199
  Literal* literal;
4200 4201 4202 4203
  if (expr->IsLiteralCompareTypeof(&sub_expr, &literal)) {
    // Emit a fast literal comparion for expressions of the form:
    // typeof(x) === 'string'.
    VisitForTypeOfValue(sub_expr);
4204 4205 4206 4207 4208 4209 4210 4211
    builder()->SetExpressionPosition(expr);
    TestTypeOfFlags::LiteralFlag literal_flag =
        TestTypeOfFlags::GetFlagForLiteral(ast_string_constants(), literal);
    if (literal_flag == TestTypeOfFlags::LiteralFlag::kOther) {
      builder()->LoadFalse();
    } else {
      builder()->CompareTypeOf(literal_flag);
    }
4212 4213 4214
  } else if (expr->IsLiteralCompareUndefined(&sub_expr)) {
    VisitForAccumulatorValue(sub_expr);
    builder()->SetExpressionPosition(expr);
4215
    BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kUndefinedValue);
4216 4217 4218
  } else if (expr->IsLiteralCompareNull(&sub_expr)) {
    VisitForAccumulatorValue(sub_expr);
    builder()->SetExpressionPosition(expr);
4219
    BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kNullValue);
4220 4221 4222 4223
  } else {
    Register lhs = VisitForRegisterValue(expr->left());
    VisitForAccumulatorValue(expr->right());
    builder()->SetExpressionPosition(expr);
4224
    if (expr->op() == Token::IN) {
4225
      builder()->CompareOperation(expr->op(), lhs);
4226 4227 4228
    } else if (expr->op() == Token::INSTANCEOF) {
      FeedbackSlot slot = feedback_spec()->AddInstanceOfSlot();
      builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
4229
    } else {
4230
      FeedbackSlot slot = feedback_spec()->AddCompareICSlot();
4231 4232
      builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
    }
4233
  }
4234
  // Always returns a boolean value.
4235
  execution_result()->SetResultIsBoolean();
4236 4237 4238
}

void BytecodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
4239
  FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
4240
  Expression* subexpr;
4241
  Smi literal;
4242
  if (expr->IsSmiLiteralOperation(&subexpr, &literal)) {
4243
    TypeHint type_hint = VisitForAccumulatorValue(subexpr);
4244
    builder()->SetExpressionPosition(expr);
4245 4246
    builder()->BinaryOperationSmiLiteral(expr->op(), literal,
                                         feedback_index(slot));
4247 4248 4249
    if (expr->op() == Token::ADD && type_hint == TypeHint::kString) {
      execution_result()->SetResultIsString();
    }
4250
  } else {
4251 4252 4253 4254 4255 4256 4257 4258 4259
    TypeHint lhs_type = VisitForAccumulatorValue(expr->left());
    Register lhs = register_allocator()->NewRegister();
    builder()->StoreAccumulatorInRegister(lhs);
    TypeHint rhs_type = VisitForAccumulatorValue(expr->right());
    if (expr->op() == Token::ADD &&
        (lhs_type == TypeHint::kString || rhs_type == TypeHint::kString)) {
      execution_result()->SetResultIsString();
    }

4260
    builder()->SetExpressionPosition(expr);
4261
    builder()->BinaryOperation(expr->op(), lhs, feedback_index(slot));
4262
  }
4263 4264
}

4265 4266
void BytecodeGenerator::VisitNaryArithmeticExpression(NaryOperation* expr) {
  // TODO(leszeks): Add support for lhs smi in commutative ops.
4267
  TypeHint type_hint = VisitForAccumulatorValue(expr->first());
4268 4269 4270 4271 4272 4273 4274 4275 4276 4277 4278

  for (size_t i = 0; i < expr->subsequent_length(); ++i) {
    RegisterAllocationScope register_scope(this);
    if (expr->subsequent(i)->IsSmiLiteral()) {
      builder()->SetExpressionPosition(expr->subsequent_op_position(i));
      builder()->BinaryOperationSmiLiteral(
          expr->op(), expr->subsequent(i)->AsLiteral()->AsSmiLiteral(),
          feedback_index(feedback_spec()->AddBinaryOpICSlot()));
    } else {
      Register lhs = register_allocator()->NewRegister();
      builder()->StoreAccumulatorInRegister(lhs);
4279 4280
      TypeHint rhs_hint = VisitForAccumulatorValue(expr->subsequent(i));
      if (rhs_hint == TypeHint::kString) type_hint = TypeHint::kString;
4281 4282 4283 4284 4285 4286
      builder()->SetExpressionPosition(expr->subsequent_op_position(i));
      builder()->BinaryOperation(
          expr->op(), lhs,
          feedback_index(feedback_spec()->AddBinaryOpICSlot()));
    }
  }
4287 4288 4289 4290 4291

  if (type_hint == TypeHint::kString && expr->op() == Token::ADD) {
    // If any operand of an ADD is a String, a String is produced.
    execution_result()->SetResultIsString();
  }
4292 4293
}

4294 4295
// Note: the actual spreading is performed by the surrounding expression's
// visitor.
4296
void BytecodeGenerator::VisitSpread(Spread* expr) { Visit(expr->expression()); }
4297

4298
void BytecodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
4299 4300
  UNREACHABLE();
}
4301

4302 4303 4304 4305 4306 4307 4308 4309
void BytecodeGenerator::VisitImportCallExpression(ImportCallExpression* expr) {
  RegisterList args = register_allocator()->NewRegisterList(2);
  VisitForRegisterValue(expr->argument(), args[1]);
  builder()
      ->MoveRegister(Register::function_closure(), args[0])
      .CallRuntime(Runtime::kDynamicImportCall, args);
}

4310
void BytecodeGenerator::BuildGetIterator(Expression* iterable,
4311
                                         IteratorType hint) {
4312 4313 4314 4315
  RegisterList args = register_allocator()->NewRegisterList(1);
  Register method = register_allocator()->NewRegister();
  Register obj = args[0];

4316
  VisitForAccumulatorValue(iterable);
4317

4318
  if (hint == IteratorType::kAsync) {
4319 4320
    // Set method to GetMethod(obj, @@asyncIterator)
    builder()->StoreAccumulatorInRegister(obj).LoadAsyncIteratorProperty(
4321
        obj, feedback_index(feedback_spec()->AddLoadICSlot()));
4322 4323 4324 4325 4326 4327 4328

    BytecodeLabel async_iterator_undefined, async_iterator_null, done;
    // TODO(ignition): Add a single opcode for JumpIfNullOrUndefined
    builder()->JumpIfUndefined(&async_iterator_undefined);
    builder()->JumpIfNull(&async_iterator_null);

    // Let iterator be Call(method, obj)
4329
    builder()->StoreAccumulatorInRegister(method).CallProperty(
4330
        method, args, feedback_index(feedback_spec()->AddCallICSlot()));
4331 4332 4333 4334 4335 4336 4337 4338 4339 4340

    // If Type(iterator) is not Object, throw a TypeError exception.
    builder()->JumpIfJSReceiver(&done);
    builder()->CallRuntime(Runtime::kThrowSymbolAsyncIteratorInvalid);

    builder()->Bind(&async_iterator_undefined);
    builder()->Bind(&async_iterator_null);
    // If method is undefined,
    //     Let syncMethod be GetMethod(obj, @@iterator)
    builder()
4341 4342
        ->LoadIteratorProperty(obj,
                               feedback_index(feedback_spec()->AddLoadICSlot()))
4343 4344 4345
        .StoreAccumulatorInRegister(method);

    //     Let syncIterator be Call(syncMethod, obj)
4346 4347
    builder()->CallProperty(method, args,
                            feedback_index(feedback_spec()->AddCallICSlot()));
4348 4349 4350 4351 4352 4353 4354 4355 4356 4357 4358 4359

    // Return CreateAsyncFromSyncIterator(syncIterator)
    // alias `method` register as it's no longer used
    Register sync_iter = method;
    builder()->StoreAccumulatorInRegister(sync_iter).CallRuntime(
        Runtime::kInlineCreateAsyncFromSyncIterator, sync_iter);

    builder()->Bind(&done);
  } else {
    // Let method be GetMethod(obj, @@iterator).
    builder()
        ->StoreAccumulatorInRegister(obj)
4360 4361
        .LoadIteratorProperty(obj,
                              feedback_index(feedback_spec()->AddLoadICSlot()))
4362 4363 4364
        .StoreAccumulatorInRegister(method);

    // Let iterator be Call(method, obj).
4365 4366
    builder()->CallProperty(method, args,
                            feedback_index(feedback_spec()->AddCallICSlot()));
4367 4368 4369 4370 4371 4372 4373

    // If Type(iterator) is not Object, throw a TypeError exception.
    BytecodeLabel no_type_error;
    builder()->JumpIfJSReceiver(&no_type_error);
    builder()->CallRuntime(Runtime::kThrowSymbolIteratorInvalid);
    builder()->Bind(&no_type_error);
  }
4374 4375
}

4376 4377 4378
// Returns an IteratorRecord which is valid for the lifetime of the current
// register_allocation_scope.
BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
4379 4380
    Expression* iterable, Register next, Register object, IteratorType hint) {
  DCHECK(next.is_valid() && object.is_valid());
4381 4382 4383 4384 4385 4386 4387 4388 4389 4390
  BuildGetIterator(iterable, hint);

  builder()
      ->StoreAccumulatorInRegister(object)
      .LoadNamedProperty(object, ast_string_constants()->next_string(),
                         feedback_index(feedback_spec()->AddLoadICSlot()))
      .StoreAccumulatorInRegister(next);
  return IteratorRecord(object, next, hint);
}

4391 4392 4393 4394 4395 4396 4397
BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
    Expression* iterable, IteratorType hint) {
  Register next = register_allocator()->NewRegister();
  Register object = register_allocator()->NewRegister();
  return BuildGetIteratorRecord(iterable, next, object, hint);
}

4398 4399 4400 4401 4402 4403 4404 4405 4406 4407 4408 4409 4410 4411 4412 4413
void BytecodeGenerator::BuildIteratorNext(const IteratorRecord& iterator,
                                          Register next_result) {
  DCHECK(next_result.is_valid());
  builder()->CallProperty(iterator.next(), RegisterList(iterator.object()),
                          feedback_index(feedback_spec()->AddCallICSlot()));

  // TODO(caitp): support async IteratorNext here.

  BytecodeLabel is_object;
  builder()
      ->StoreAccumulatorInRegister(next_result)
      .JumpIfJSReceiver(&is_object)
      .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, next_result)
      .Bind(&is_object);
}

4414 4415 4416 4417 4418 4419 4420 4421 4422 4423 4424 4425 4426 4427 4428 4429 4430 4431 4432 4433
void BytecodeGenerator::BuildCallIteratorMethod(Register iterator,
                                                const AstRawString* method_name,
                                                RegisterList receiver_and_args,
                                                BytecodeLabel* if_called,
                                                BytecodeLabels* if_notcalled) {
  RegisterAllocationScope register_scope(this);

  Register method = register_allocator()->NewRegister();
  FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
  builder()
      ->LoadNamedProperty(iterator, method_name, feedback_index(slot))
      .JumpIfUndefined(if_notcalled->New())
      .JumpIfNull(if_notcalled->New())
      .StoreAccumulatorInRegister(method)
      .CallProperty(method, receiver_and_args,
                    feedback_index(feedback_spec()->AddCallICSlot()))
      .Jump(if_called);
}

void BytecodeGenerator::BuildIteratorClose(const IteratorRecord& iterator,
4434
                                           Expression* expr) {
4435 4436 4437 4438 4439 4440 4441 4442 4443 4444
  RegisterAllocationScope register_scope(this);
  BytecodeLabels done(zone());
  BytecodeLabel if_called;
  RegisterList args = RegisterList(iterator.object());
  BuildCallIteratorMethod(iterator.object(),
                          ast_string_constants()->return_string(), args,
                          &if_called, &done);
  builder()->Bind(&if_called);

  if (iterator.type() == IteratorType::kAsync) {
4445
    DCHECK_NOT_NULL(expr);
4446
    BuildAwait(expr);
4447 4448 4449 4450 4451 4452 4453 4454 4455 4456 4457 4458 4459 4460
  }

  builder()->JumpIfJSReceiver(done.New());
  {
    RegisterAllocationScope register_scope(this);
    Register return_result = register_allocator()->NewRegister();
    builder()
        ->StoreAccumulatorInRegister(return_result)
        .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, return_result);
  }

  done.Bind(builder());
}

4461 4462
void BytecodeGenerator::VisitGetIterator(GetIterator* expr) {
  builder()->SetExpressionPosition(expr);
4463
  BuildGetIterator(expr->iterable(), expr->hint());
4464 4465
}

4466 4467 4468 4469
void BytecodeGenerator::VisitGetTemplateObject(GetTemplateObject* expr) {
  builder()->SetExpressionPosition(expr);
  size_t entry = builder()->AllocateDeferredConstantPoolEntry();
  template_objects_.push_back(std::make_pair(expr, entry));
4470
  FeedbackSlot literal_slot = feedback_spec()->AddLiteralSlot();
4471
  builder()->GetTemplateObject(entry, feedback_index(literal_slot));
4472 4473
}

4474
void BytecodeGenerator::VisitTemplateLiteral(TemplateLiteral* expr) {
4475 4476
  const ZonePtrList<const AstRawString>& parts = *expr->string_parts();
  const ZonePtrList<Expression>& substitutions = *expr->substitutions();
4477 4478 4479 4480 4481 4482 4483 4484 4485 4486 4487 4488 4489 4490 4491 4492 4493 4494 4495 4496 4497 4498 4499 4500 4501 4502 4503 4504 4505 4506 4507 4508 4509 4510 4511 4512 4513 4514 4515 4516 4517 4518 4519 4520
  // Template strings with no substitutions are turned into StringLiterals.
  DCHECK_GT(substitutions.length(), 0);
  DCHECK_EQ(parts.length(), substitutions.length() + 1);

  // Generate string concatenation
  // TODO(caitp): Don't generate feedback slot if it's not used --- introduce
  // a simple, concise, reusable mechanism to lazily create reusable slots.
  FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
  Register last_part = register_allocator()->NewRegister();
  bool last_part_valid = false;

  builder()->SetExpressionPosition(expr);
  for (int i = 0; i < substitutions.length(); ++i) {
    if (i != 0) {
      builder()->StoreAccumulatorInRegister(last_part);
      last_part_valid = true;
    }

    if (!parts[i]->IsEmpty()) {
      builder()->LoadLiteral(parts[i]);
      if (last_part_valid) {
        builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
      }
      builder()->StoreAccumulatorInRegister(last_part);
      last_part_valid = true;
    }

    TypeHint type_hint = VisitForAccumulatorValue(substitutions[i]);
    if (type_hint != TypeHint::kString) {
      builder()->ToString();
    }
    if (last_part_valid) {
      builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
    }
    last_part_valid = false;
  }

  if (!parts.last()->IsEmpty()) {
    builder()->StoreAccumulatorInRegister(last_part);
    builder()->LoadLiteral(parts.last());
    builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
  }
}

4521
void BytecodeGenerator::VisitThisFunction(ThisFunction* expr) {
4522
  builder()->LoadAccumulatorWithRegister(Register::function_closure());
4523 4524
}

4525
void BytecodeGenerator::VisitSuperCallReference(SuperCallReference* expr) {
4526 4527
  // Handled by VisitCall().
  UNREACHABLE();
4528 4529 4530
}

void BytecodeGenerator::VisitSuperPropertyReference(
4531
    SuperPropertyReference* expr) {
4532
  builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError);
4533 4534
}

4535 4536 4537 4538 4539
void BytecodeGenerator::VisitCommaExpression(BinaryOperation* binop) {
  VisitForEffect(binop->left());
  Visit(binop->right());
}

4540
void BytecodeGenerator::VisitNaryCommaExpression(NaryOperation* expr) {
4541 4542 4543 4544 4545 4546 4547
  DCHECK_GT(expr->subsequent_length(), 0);

  VisitForEffect(expr->first());
  for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
    VisitForEffect(expr->subsequent(i));
  }
  Visit(expr->subsequent(expr->subsequent_length() - 1));
4548 4549
}

4550 4551
void BytecodeGenerator::VisitLogicalTestSubExpression(
    Token::Value token, Expression* expr, BytecodeLabels* then_labels,
4552
    BytecodeLabels* else_labels, int coverage_slot) {
4553 4554 4555 4556 4557 4558 4559 4560 4561 4562
  DCHECK(token == Token::OR || token == Token::AND);

  BytecodeLabels test_next(zone());
  if (token == Token::OR) {
    VisitForTest(expr, then_labels, &test_next, TestFallthrough::kElse);
  } else {
    DCHECK_EQ(Token::AND, token);
    VisitForTest(expr, &test_next, else_labels, TestFallthrough::kThen);
  }
  test_next.Bind(builder());
4563 4564

  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
4565 4566 4567
}

void BytecodeGenerator::VisitLogicalTest(Token::Value token, Expression* left,
4568 4569
                                         Expression* right,
                                         int right_coverage_slot) {
4570 4571 4572 4573 4574
  DCHECK(token == Token::OR || token == Token::AND);
  TestResultScope* test_result = execution_result()->AsTest();
  BytecodeLabels* then_labels = test_result->then_labels();
  BytecodeLabels* else_labels = test_result->else_labels();
  TestFallthrough fallthrough = test_result->fallthrough();
4575

4576 4577
  VisitLogicalTestSubExpression(token, left, then_labels, else_labels,
                                right_coverage_slot);
4578
  // The last test has the same then, else and fallthrough as the parent test.
4579 4580 4581
  VisitForTest(right, then_labels, else_labels, fallthrough);
}

4582 4583 4584
void BytecodeGenerator::VisitNaryLogicalTest(
    Token::Value token, NaryOperation* expr,
    const NaryCodeCoverageSlots* coverage_slots) {
4585 4586 4587 4588 4589 4590 4591 4592
  DCHECK(token == Token::OR || token == Token::AND);
  DCHECK_GT(expr->subsequent_length(), 0);

  TestResultScope* test_result = execution_result()->AsTest();
  BytecodeLabels* then_labels = test_result->then_labels();
  BytecodeLabels* else_labels = test_result->else_labels();
  TestFallthrough fallthrough = test_result->fallthrough();

4593 4594
  VisitLogicalTestSubExpression(token, expr->first(), then_labels, else_labels,
                                coverage_slots->GetSlotFor(0));
4595 4596
  for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
    VisitLogicalTestSubExpression(token, expr->subsequent(i), then_labels,
4597 4598
                                  else_labels,
                                  coverage_slots->GetSlotFor(i + 1));
4599 4600 4601 4602 4603 4604
  }
  // The last test has the same then, else and fallthrough as the parent test.
  VisitForTest(expr->subsequent(expr->subsequent_length() - 1), then_labels,
               else_labels, fallthrough);
}

4605 4606 4607
bool BytecodeGenerator::VisitLogicalOrSubExpression(Expression* expr,
                                                    BytecodeLabels* end_labels,
                                                    int coverage_slot) {
4608 4609 4610 4611 4612 4613 4614 4615 4616
  if (expr->ToBooleanIsTrue()) {
    VisitForAccumulatorValue(expr);
    end_labels->Bind(builder());
    return true;
  } else if (!expr->ToBooleanIsFalse()) {
    TypeHint type_hint = VisitForAccumulatorValue(expr);
    builder()->JumpIfTrue(ToBooleanModeFromTypeHint(type_hint),
                          end_labels->New());
  }
4617 4618 4619

  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);

4620 4621 4622
  return false;
}

4623 4624 4625
bool BytecodeGenerator::VisitLogicalAndSubExpression(Expression* expr,
                                                     BytecodeLabels* end_labels,
                                                     int coverage_slot) {
4626 4627 4628 4629 4630 4631 4632 4633 4634
  if (expr->ToBooleanIsFalse()) {
    VisitForAccumulatorValue(expr);
    end_labels->Bind(builder());
    return true;
  } else if (!expr->ToBooleanIsTrue()) {
    TypeHint type_hint = VisitForAccumulatorValue(expr);
    builder()->JumpIfFalse(ToBooleanModeFromTypeHint(type_hint),
                           end_labels->New());
  }
4635 4636 4637

  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);

4638 4639 4640
  return false;
}

4641 4642 4643 4644
void BytecodeGenerator::VisitLogicalOrExpression(BinaryOperation* binop) {
  Expression* left = binop->left();
  Expression* right = binop->right();

4645 4646 4647
  int right_coverage_slot =
      AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);

4648 4649
  if (execution_result()->IsTest()) {
    TestResultScope* test_result = execution_result()->AsTest();
4650
    if (left->ToBooleanIsTrue()) {
4651 4652
      builder()->Jump(test_result->NewThenLabel());
    } else if (left->ToBooleanIsFalse() && right->ToBooleanIsFalse()) {
4653
      BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
4654 4655
      builder()->Jump(test_result->NewElseLabel());
    } else {
4656
      VisitLogicalTest(Token::OR, left, right, right_coverage_slot);
4657 4658
    }
    test_result->SetResultConsumedByTest();
4659
  } else {
4660
    BytecodeLabels end_labels(zone());
4661 4662 4663
    if (VisitLogicalOrSubExpression(left, &end_labels, right_coverage_slot)) {
      return;
    }
4664 4665
    VisitForAccumulatorValue(right);
    end_labels.Bind(builder());
4666 4667
  }
}
4668

4669
void BytecodeGenerator::VisitNaryLogicalOrExpression(NaryOperation* expr) {
4670 4671 4672
  Expression* first = expr->first();
  DCHECK_GT(expr->subsequent_length(), 0);

4673 4674
  NaryCodeCoverageSlots coverage_slots(this, expr);

4675 4676 4677 4678 4679
  if (execution_result()->IsTest()) {
    TestResultScope* test_result = execution_result()->AsTest();
    if (first->ToBooleanIsTrue()) {
      builder()->Jump(test_result->NewThenLabel());
    } else {
4680
      VisitNaryLogicalTest(Token::OR, expr, &coverage_slots);
4681 4682 4683 4684
    }
    test_result->SetResultConsumedByTest();
  } else {
    BytecodeLabels end_labels(zone());
4685 4686 4687 4688
    if (VisitLogicalOrSubExpression(first, &end_labels,
                                    coverage_slots.GetSlotFor(0))) {
      return;
    }
4689
    for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4690 4691
      if (VisitLogicalOrSubExpression(expr->subsequent(i), &end_labels,
                                      coverage_slots.GetSlotFor(i + 1))) {
4692 4693 4694 4695 4696 4697 4698 4699
        return;
      }
    }
    // We have to visit the last value even if it's true, because we need its
    // actual value.
    VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
    end_labels.Bind(builder());
  }
4700
}
4701 4702 4703 4704 4705

void BytecodeGenerator::VisitLogicalAndExpression(BinaryOperation* binop) {
  Expression* left = binop->left();
  Expression* right = binop->right();

4706 4707 4708
  int right_coverage_slot =
      AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);

4709 4710
  if (execution_result()->IsTest()) {
    TestResultScope* test_result = execution_result()->AsTest();
4711
    if (left->ToBooleanIsFalse()) {
4712 4713
      builder()->Jump(test_result->NewElseLabel());
    } else if (left->ToBooleanIsTrue() && right->ToBooleanIsTrue()) {
4714
      BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
4715 4716
      builder()->Jump(test_result->NewThenLabel());
    } else {
4717
      VisitLogicalTest(Token::AND, left, right, right_coverage_slot);
4718 4719
    }
    test_result->SetResultConsumedByTest();
4720
  } else {
4721
    BytecodeLabels end_labels(zone());
4722 4723 4724
    if (VisitLogicalAndSubExpression(left, &end_labels, right_coverage_slot)) {
      return;
    }
4725 4726
    VisitForAccumulatorValue(right);
    end_labels.Bind(builder());
4727 4728
  }
}
4729

4730
void BytecodeGenerator::VisitNaryLogicalAndExpression(NaryOperation* expr) {
4731 4732 4733
  Expression* first = expr->first();
  DCHECK_GT(expr->subsequent_length(), 0);

4734 4735
  NaryCodeCoverageSlots coverage_slots(this, expr);

4736 4737 4738 4739 4740
  if (execution_result()->IsTest()) {
    TestResultScope* test_result = execution_result()->AsTest();
    if (first->ToBooleanIsFalse()) {
      builder()->Jump(test_result->NewElseLabel());
    } else {
4741
      VisitNaryLogicalTest(Token::AND, expr, &coverage_slots);
4742 4743 4744 4745
    }
    test_result->SetResultConsumedByTest();
  } else {
    BytecodeLabels end_labels(zone());
4746 4747 4748 4749
    if (VisitLogicalAndSubExpression(first, &end_labels,
                                     coverage_slots.GetSlotFor(0))) {
      return;
    }
4750
    for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4751 4752
      if (VisitLogicalAndSubExpression(expr->subsequent(i), &end_labels,
                                       coverage_slots.GetSlotFor(i + 1))) {
4753 4754 4755 4756 4757 4758 4759 4760
        return;
      }
    }
    // We have to visit the last value even if it's false, because we need its
    // actual value.
    VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
    end_labels.Bind(builder());
  }
4761
}
4762

4763
void BytecodeGenerator::VisitRewritableExpression(RewritableExpression* expr) {
4764 4765 4766
  Visit(expr->expression());
}

4767
void BytecodeGenerator::BuildNewLocalActivationContext() {
4768
  ValueResultScope value_execution_result(this);
4769
  Scope* scope = closure_scope();
4770
  DCHECK_EQ(current_scope(), closure_scope());
4771

4772
  // Create the appropriate context.
4773
  if (scope->is_script_scope()) {
4774
    Register scope_reg = register_allocator()->NewRegister();
4775
    builder()
4776 4777 4778
        ->LoadLiteral(scope)
        .StoreAccumulatorInRegister(scope_reg)
        .CallRuntime(Runtime::kNewScriptContext, scope_reg);
4779 4780 4781 4782 4783
  } else if (scope->is_module_scope()) {
    // We don't need to do anything for the outer script scope.
    DCHECK(scope->outer_scope()->is_script_scope());

    // A JSFunction representing a module is called with the module object as
4784 4785
    // its sole argument.
    RegisterList args = register_allocator()->NewRegisterList(2);
4786
    builder()
4787
        ->MoveRegister(builder()->Parameter(0), args[0])
4788
        .LoadLiteral(scope)
4789
        .StoreAccumulatorInRegister(args[1])
4790
        .CallRuntime(Runtime::kPushModuleContext, args);
4791
  } else {
4792
    DCHECK(scope->is_function_scope() || scope->is_eval_scope());
4793
    int slot_count = scope->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
4794
    if (slot_count <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
4795 4796
      switch (scope->scope_type()) {
        case EVAL_SCOPE:
4797
          builder()->CreateEvalContext(scope, slot_count);
4798 4799
          break;
        case FUNCTION_SCOPE:
4800
          builder()->CreateFunctionContext(scope, slot_count);
4801 4802 4803 4804
          break;
        default:
          UNREACHABLE();
      }
4805
    } else {
4806 4807 4808
      Register arg = register_allocator()->NewRegister();
      builder()->LoadLiteral(scope).StoreAccumulatorInRegister(arg).CallRuntime(
          Runtime::kNewFunctionContext, arg);
4809
    }
4810
  }
4811 4812
}

4813
void BytecodeGenerator::BuildLocalActivationContextInitialization() {
4814
  DeclarationScope* scope = closure_scope();
4815 4816

  if (scope->has_this_declaration() && scope->receiver()->IsContextSlot()) {
4817
    Variable* variable = scope->receiver();
4818
    Register receiver(builder()->Receiver());
4819 4820 4821
    // Context variable (at bottom of the context chain).
    DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
    builder()->LoadAccumulatorWithRegister(receiver).StoreContextSlot(
4822
        execution_context()->reg(), variable->index(), 0);
4823 4824 4825 4826 4827 4828
  }

  // Copy parameters into context if necessary.
  int num_parameters = scope->num_parameters();
  for (int i = 0; i < num_parameters; i++) {
    Variable* variable = scope->parameter(i);
4829 4830
    if (!variable->IsContextSlot()) continue;

4831
    Register parameter(builder()->Parameter(i));
4832 4833
    // Context variable (at bottom of the context chain).
    DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
4834 4835
    builder()->LoadAccumulatorWithRegister(parameter).StoreContextSlot(
        execution_context()->reg(), variable->index(), 0);
4836 4837 4838
  }
}

4839
void BytecodeGenerator::BuildNewLocalBlockContext(Scope* scope) {
4840
  ValueResultScope value_execution_result(this);
4841 4842
  DCHECK(scope->is_block_scope());

4843
  builder()->CreateBlockContext(scope);
4844 4845
}

4846
void BytecodeGenerator::BuildNewLocalWithContext(Scope* scope) {
4847
  ValueResultScope value_execution_result(this);
4848

4849
  Register extension_object = register_allocator()->NewRegister();
4850

4851
  builder()->ToObject(extension_object);
4852
  builder()->CreateWithContext(extension_object, scope);
4853
}
4854

4855
void BytecodeGenerator::BuildNewLocalCatchContext(Scope* scope) {
4856
  ValueResultScope value_execution_result(this);
4857
  DCHECK(scope->catch_variable()->IsContextSlot());
4858

4859 4860
  Register exception = register_allocator()->NewRegister();
  builder()->StoreAccumulatorInRegister(exception);
4861
  builder()->CreateCatchContext(exception, scope);
4862 4863
}

4864 4865 4866 4867 4868
void BytecodeGenerator::VisitObjectLiteralAccessor(
    Register home_object, ObjectLiteralProperty* property, Register value_out) {
  if (property == nullptr) {
    builder()->LoadNull().StoreAccumulatorInRegister(value_out);
  } else {
4869
    VisitForRegisterValue(property->value(), value_out);
4870 4871 4872 4873 4874
    VisitSetHomeObject(value_out, home_object, property);
  }
}

void BytecodeGenerator::VisitSetHomeObject(Register value, Register home_object,
4875
                                           LiteralProperty* property) {
4876
  Expression* expr = property->value();
4877
  if (FunctionLiteral::NeedsHomeObject(expr)) {
4878
    FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
4879 4880
    builder()
        ->LoadAccumulatorWithRegister(home_object)
4881
        .StoreHomeObjectProperty(value, feedback_index(slot), language_mode());
4882
  }
4883 4884
}

4885 4886 4887 4888 4889 4890 4891
void BytecodeGenerator::VisitArgumentsObject(Variable* variable) {
  if (variable == nullptr) return;

  DCHECK(variable->IsContextSlot() || variable->IsStackAllocated());

  // Allocate and initialize a new arguments object and assign to the
  // {arguments} variable.
4892
  builder()->CreateArguments(closure_scope()->GetArgumentsType());
4893
  BuildVariableAssignment(variable, Token::ASSIGN, HoleCheckMode::kElided);
4894 4895
}

4896
void BytecodeGenerator::VisitRestArgumentsArray(Variable* rest) {
4897 4898
  if (rest == nullptr) return;

4899
  // Allocate and initialize a new rest parameter and assign to the {rest}
4900
  // variable.
4901
  builder()->CreateArguments(CreateArgumentsType::kRestParameter);
4902
  DCHECK(rest->IsContextSlot() || rest->IsStackAllocated());
4903
  BuildVariableAssignment(rest, Token::ASSIGN, HoleCheckMode::kElided);
4904
}
4905 4906 4907 4908

void BytecodeGenerator::VisitThisFunctionVariable(Variable* variable) {
  if (variable == nullptr) return;

4909
  // Store the closure we were called with in the given variable.
4910
  builder()->LoadAccumulatorWithRegister(Register::function_closure());
4911
  BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
4912 4913 4914 4915 4916
}

void BytecodeGenerator::VisitNewTargetVariable(Variable* variable) {
  if (variable == nullptr) return;

4917 4918 4919 4920
  // The generator resume trampoline abuses the new.target register
  // to pass in the generator object.  In ordinary calls, new.target is always
  // undefined because generator functions are non-constructible, so don't
  // assign anything to the new.target variable.
4921
  if (IsResumableFunction(info()->literal()->kind())) return;
4922 4923 4924 4925 4926 4927 4928 4929

  if (variable->location() == VariableLocation::LOCAL) {
    // The new.target register was already assigned by entry trampoline.
    DCHECK_EQ(incoming_new_target_or_generator_.index(),
              GetRegisterForLocalVariable(variable).index());
    return;
  }

4930
  // Store the new target we were called with in the given variable.
4931
  builder()->LoadAccumulatorWithRegister(incoming_new_target_or_generator_);
4932
  BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
4933 4934
}

4935 4936 4937
void BytecodeGenerator::BuildGeneratorObjectVariableInitialization() {
  DCHECK(IsResumableFunction(info()->literal()->kind()));

4938
  Variable* generator_object_var = closure_scope()->generator_object_var();
4939 4940
  RegisterAllocationScope register_scope(this);
  RegisterList args = register_allocator()->NewRegisterList(2);
4941 4942 4943 4944 4945
  Runtime::FunctionId function_id =
      (IsAsyncFunction(info()->literal()->kind()) &&
       !IsAsyncGeneratorFunction(info()->literal()->kind()))
          ? Runtime::kInlineAsyncFunctionEnter
          : Runtime::kInlineCreateJSGeneratorObject;
4946 4947 4948
  builder()
      ->MoveRegister(Register::function_closure(), args[0])
      .MoveRegister(builder()->Receiver(), args[1])
4949
      .CallRuntime(function_id, args)
4950 4951 4952 4953 4954 4955 4956 4957 4958
      .StoreAccumulatorInRegister(generator_object());

  if (generator_object_var->location() == VariableLocation::LOCAL) {
    // The generator object register is already set to the variable's local
    // register.
    DCHECK_EQ(generator_object().index(),
              GetRegisterForLocalVariable(generator_object_var).index());
  } else {
    BuildVariableAssignment(generator_object_var, Token::INIT,
4959
                            HoleCheckMode::kElided);
4960
  }
4961 4962
}

4963 4964 4965 4966 4967 4968 4969 4970 4971 4972 4973 4974 4975 4976 4977 4978
void BytecodeGenerator::BuildPushUndefinedIntoRegisterList(
    RegisterList* reg_list) {
  Register reg = register_allocator()->GrowRegisterList(reg_list);
  builder()->LoadUndefined().StoreAccumulatorInRegister(reg);
}

void BytecodeGenerator::BuildLoadPropertyKey(LiteralProperty* property,
                                             Register out_reg) {
  if (property->key()->IsStringLiteral()) {
    VisitForRegisterValue(property->key(), out_reg);
  } else {
    VisitForAccumulatorValue(property->key());
    builder()->ToName(out_reg);
  }
}

4979
int BytecodeGenerator::AllocateBlockCoverageSlotIfEnabled(
4980
    AstNode* node, SourceRangeKind kind) {
4981 4982
  return (block_coverage_builder_ == nullptr)
             ? BlockCoverageBuilder::kNoCoverageArraySlot
4983
             : block_coverage_builder_->AllocateBlockCoverageSlot(node, kind);
4984 4985
}

4986 4987 4988 4989 4990 4991 4992 4993
int BytecodeGenerator::AllocateNaryBlockCoverageSlotIfEnabled(
    NaryOperation* node, size_t index) {
  return (block_coverage_builder_ == nullptr)
             ? BlockCoverageBuilder::kNoCoverageArraySlot
             : block_coverage_builder_->AllocateNaryBlockCoverageSlot(node,
                                                                      index);
}

4994
void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
4995 4996
    AstNode* node, SourceRangeKind kind) {
  if (block_coverage_builder_ == nullptr) return;
4997
  block_coverage_builder_->IncrementBlockCounter(node, kind);
4998 4999
}

5000
void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
5001
    int coverage_array_slot) {
5002
  if (block_coverage_builder_ != nullptr) {
5003
    block_coverage_builder_->IncrementBlockCounter(coverage_array_slot);
5004 5005 5006
  }
}

5007
// Visits the expression |expr| and places the result in the accumulator.
5008 5009
BytecodeGenerator::TypeHint BytecodeGenerator::VisitForAccumulatorValue(
    Expression* expr) {
5010
  ValueResultScope accumulator_scope(this);
5011
  Visit(expr);
5012
  return accumulator_scope.type_hint();
5013 5014
}

5015 5016 5017 5018 5019 5020 5021
void BytecodeGenerator::VisitForAccumulatorValueOrTheHole(Expression* expr) {
  if (expr == nullptr) {
    builder()->LoadTheHole();
  } else {
    VisitForAccumulatorValue(expr);
  }
}
5022 5023 5024 5025 5026 5027 5028 5029 5030 5031

// Visits the expression |expr| and discards the result.
void BytecodeGenerator::VisitForEffect(Expression* expr) {
  EffectResultScope effect_scope(this);
  Visit(expr);
}

// Visits the expression |expr| and returns the register containing
// the expression result.
Register BytecodeGenerator::VisitForRegisterValue(Expression* expr) {
5032 5033 5034 5035
  VisitForAccumulatorValue(expr);
  Register result = register_allocator()->NewRegister();
  builder()->StoreAccumulatorInRegister(result);
  return result;
5036 5037
}

5038 5039 5040 5041
// Visits the expression |expr| and stores the expression result in
// |destination|.
void BytecodeGenerator::VisitForRegisterValue(Expression* expr,
                                              Register destination) {
5042
  ValueResultScope register_scope(this);
5043 5044 5045
  Visit(expr);
  builder()->StoreAccumulatorInRegister(destination);
}
5046

5047 5048 5049 5050 5051 5052 5053 5054 5055 5056 5057 5058 5059 5060 5061 5062
// Visits the expression |expr| and pushes the result into a new register
// added to the end of |reg_list|.
void BytecodeGenerator::VisitAndPushIntoRegisterList(Expression* expr,
                                                     RegisterList* reg_list) {
  {
    ValueResultScope register_scope(this);
    Visit(expr);
  }
  // Grow the register list after visiting the expression to avoid reserving
  // the register across the expression evaluation, which could cause memory
  // leaks for deep expressions due to dead objects being kept alive by pointers
  // in registers.
  Register destination = register_allocator()->GrowRegisterList(reg_list);
  builder()->StoreAccumulatorInRegister(destination);
}

5063 5064 5065 5066 5067 5068 5069 5070 5071 5072 5073 5074 5075 5076
void BytecodeGenerator::BuildTest(ToBooleanMode mode,
                                  BytecodeLabels* then_labels,
                                  BytecodeLabels* else_labels,
                                  TestFallthrough fallthrough) {
  switch (fallthrough) {
    case TestFallthrough::kThen:
      builder()->JumpIfFalse(mode, else_labels->New());
      break;
    case TestFallthrough::kElse:
      builder()->JumpIfTrue(mode, then_labels->New());
      break;
    case TestFallthrough::kNone:
      builder()->JumpIfTrue(mode, then_labels->New());
      builder()->Jump(else_labels->New());
5077
      break;
5078 5079 5080
  }
}

5081 5082 5083 5084 5085 5086 5087
// Visits the expression |expr| for testing its boolean value and jumping to the
// |then| or |other| label depending on value and short-circuit semantics
void BytecodeGenerator::VisitForTest(Expression* expr,
                                     BytecodeLabels* then_labels,
                                     BytecodeLabels* else_labels,
                                     TestFallthrough fallthrough) {
  bool result_consumed;
5088
  TypeHint type_hint;
5089 5090 5091 5092 5093 5094
  {
    // To make sure that all temporary registers are returned before generating
    // jumps below, we ensure that the result scope is deleted before doing so.
    // Dead registers might be materialized otherwise.
    TestResultScope test_result(this, then_labels, else_labels, fallthrough);
    Visit(expr);
5095 5096
    result_consumed = test_result.result_consumed_by_test();
    type_hint = test_result.type_hint();
5097 5098 5099 5100 5101
    // Labels and fallthrough might have been mutated, so update based on
    // TestResultScope.
    then_labels = test_result.then_labels();
    else_labels = test_result.else_labels();
    fallthrough = test_result.fallthrough();
5102 5103
  }
  if (!result_consumed) {
5104 5105 5106 5107 5108 5109 5110 5111 5112 5113 5114 5115 5116 5117 5118 5119 5120
    BuildTest(ToBooleanModeFromTypeHint(type_hint), then_labels, else_labels,
              fallthrough);
  }
}

void BytecodeGenerator::VisitInSameTestExecutionScope(Expression* expr) {
  DCHECK(execution_result()->IsTest());
  {
    RegisterAllocationScope reg_scope(this);
    Visit(expr);
  }
  if (!execution_result()->AsTest()->result_consumed_by_test()) {
    TestResultScope* result_scope = execution_result()->AsTest();
    BuildTest(ToBooleanModeFromTypeHint(result_scope->type_hint()),
              result_scope->then_labels(), result_scope->else_labels(),
              result_scope->fallthrough());
    result_scope->SetResultConsumedByTest();
5121 5122 5123
  }
}

5124 5125
void BytecodeGenerator::VisitInScope(Statement* stmt, Scope* scope) {
  DCHECK(scope->declarations()->is_empty());
5126 5127
  CurrentScope current_scope(this, scope);
  ContextScope context_scope(this, scope);
5128 5129 5130
  Visit(stmt);
}

5131 5132 5133 5134 5135
Register BytecodeGenerator::GetRegisterForLocalVariable(Variable* variable) {
  DCHECK_EQ(VariableLocation::LOCAL, variable->location());
  return builder()->Local(variable->index());
}

5136 5137 5138 5139
FunctionKind BytecodeGenerator::function_kind() const {
  return info()->literal()->kind();
}

5140
LanguageMode BytecodeGenerator::language_mode() const {
5141
  return current_scope()->language_mode();
5142 5143
}

5144
Register BytecodeGenerator::generator_object() const {
5145
  DCHECK(IsResumableFunction(info()->literal()->kind()));
5146 5147 5148
  return incoming_new_target_or_generator_;
}

5149 5150 5151 5152
FeedbackVectorSpec* BytecodeGenerator::feedback_spec() {
  return info()->feedback_vector_spec();
}

5153
int BytecodeGenerator::feedback_index(FeedbackSlot slot) const {
5154
  DCHECK(!slot.IsInvalid());
5155
  return FeedbackVector::GetIndex(slot);
5156 5157
}

5158 5159
FeedbackSlot BytecodeGenerator::GetCachedLoadGlobalICSlot(
    TypeofMode typeof_mode, Variable* variable) {
5160 5161 5162 5163 5164
  FeedbackSlotKind slot_kind =
      typeof_mode == INSIDE_TYPEOF
          ? FeedbackSlotKind::kLoadGlobalInsideTypeof
          : FeedbackSlotKind::kLoadGlobalNotInsideTypeof;
  FeedbackSlot slot = feedback_slot_cache()->Get(slot_kind, variable);
5165 5166 5167 5168
  if (!slot.IsInvalid()) {
    return slot;
  }
  slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
5169 5170 5171 5172 5173 5174 5175 5176 5177 5178 5179 5180 5181 5182 5183
  feedback_slot_cache()->Put(slot_kind, variable, slot);
  return slot;
}

FeedbackSlot BytecodeGenerator::GetCachedStoreGlobalICSlot(
    LanguageMode language_mode, Variable* variable) {
  FeedbackSlotKind slot_kind = is_strict(language_mode)
                                   ? FeedbackSlotKind::kStoreGlobalStrict
                                   : FeedbackSlotKind::kStoreGlobalSloppy;
  FeedbackSlot slot = feedback_slot_cache()->Get(slot_kind, variable);
  if (!slot.IsInvalid()) {
    return slot;
  }
  slot = feedback_spec()->AddStoreGlobalICSlot(language_mode);
  feedback_slot_cache()->Put(slot_kind, variable, slot);
5184 5185 5186
  return slot;
}

5187 5188 5189 5190 5191 5192 5193 5194 5195 5196 5197 5198 5199 5200 5201 5202 5203 5204 5205 5206 5207 5208 5209 5210 5211 5212 5213 5214 5215 5216 5217 5218 5219 5220 5221 5222 5223 5224 5225 5226 5227 5228
FeedbackSlot BytecodeGenerator::GetCachedLoadICSlot(const Expression* expr,
                                                    const AstRawString* name) {
  if (!FLAG_ignition_share_named_property_feedback) {
    return feedback_spec()->AddLoadICSlot();
  }
  FeedbackSlotKind slot_kind = FeedbackSlotKind::kLoadProperty;
  if (!expr->IsVariableProxy()) {
    return feedback_spec()->AddLoadICSlot();
  }
  const VariableProxy* proxy = expr->AsVariableProxy();
  FeedbackSlot slot =
      feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name);
  if (!slot.IsInvalid()) {
    return slot;
  }
  slot = feedback_spec()->AddLoadICSlot();
  feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name, slot);
  return slot;
}

FeedbackSlot BytecodeGenerator::GetCachedStoreICSlot(const Expression* expr,
                                                     const AstRawString* name) {
  if (!FLAG_ignition_share_named_property_feedback) {
    return feedback_spec()->AddStoreICSlot(language_mode());
  }
  FeedbackSlotKind slot_kind = is_strict(language_mode())
                                   ? FeedbackSlotKind::kStoreNamedStrict
                                   : FeedbackSlotKind::kStoreNamedSloppy;
  if (!expr->IsVariableProxy()) {
    return feedback_spec()->AddStoreICSlot(language_mode());
  }
  const VariableProxy* proxy = expr->AsVariableProxy();
  FeedbackSlot slot =
      feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name);
  if (!slot.IsInvalid()) {
    return slot;
  }
  slot = feedback_spec()->AddStoreICSlot(language_mode());
  feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name, slot);
  return slot;
}

5229 5230
FeedbackSlot BytecodeGenerator::GetCachedCreateClosureSlot(
    FunctionLiteral* literal) {
5231 5232
  FeedbackSlotKind slot_kind = FeedbackSlotKind::kCreateClosure;
  FeedbackSlot slot = feedback_slot_cache()->Get(slot_kind, literal);
5233 5234 5235 5236
  if (!slot.IsInvalid()) {
    return slot;
  }
  slot = feedback_spec()->AddCreateClosureSlot();
5237
  feedback_slot_cache()->Put(slot_kind, literal, slot);
5238 5239 5240
  return slot;
}

5241
FeedbackSlot BytecodeGenerator::GetDummyCompareICSlot() {
5242
  return dummy_feedback_slot_.Get();
5243 5244
}

5245 5246 5247 5248 5249 5250 5251 5252 5253 5254
Runtime::FunctionId BytecodeGenerator::StoreToSuperRuntimeId() {
  return is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
                                    : Runtime::kStoreToSuper_Sloppy;
}

Runtime::FunctionId BytecodeGenerator::StoreKeyedToSuperRuntimeId() {
  return is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
                                    : Runtime::kStoreKeyedToSuper_Sloppy;
}

5255 5256 5257
}  // namespace interpreter
}  // namespace internal
}  // namespace v8