full-codegen-mips.cc 130 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#if V8_TARGET_ARCH_MIPS
6

7 8 9 10 11 12 13 14
// Note on Mips implementation:
//
// The result_register() for mips is the 'v0' register, which is defined
// by the ABI to contain function return values. However, the first
// parameter to a function is defined to be 'a0'. So there are many
// places where we have to move a previous result in v0 to a0 for the
// next call: mov(a0, v0). This is not needed on the other architectures.

15
#include "src/ast/scopes.h"
16
#include "src/code-factory.h"
17 18
#include "src/code-stubs.h"
#include "src/codegen.h"
19
#include "src/debug/debug.h"
20
#include "src/full-codegen/full-codegen.h"
21
#include "src/ic/ic.h"
22
#include "src/parsing/parser.h"
23 24 25

#include "src/mips/code-stubs-mips.h"
#include "src/mips/macro-assembler-mips.h"
26 27 28 29

namespace v8 {
namespace internal {

30
#define __ ACCESS_MASM(masm())
31 32 33 34

// A patch site is a location in the code which it is possible to patch. This
// class has a number of methods to emit the code which is patchable and the
// method EmitPatchInfo to record a marker back to the patchable code. This
35 36
// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
// (raw 16 bit immediate value is used) is the delta from the pc to the first
37
// instruction of the patchable code.
38 39
// The marker instruction is effectively a NOP (dest is zero_reg) and will
// never be emitted by normal code.
40 41 42 43 44 45 46 47 48
class JumpPatchSite BASE_EMBEDDED {
 public:
  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
#ifdef DEBUG
    info_emitted_ = false;
#endif
  }

  ~JumpPatchSite() {
49
    DCHECK(patch_site_.is_bound() == info_emitted_);
50 51 52 53 54
  }

  // When initially emitting this ensure that a jump is always generated to skip
  // the inlined smi code.
  void EmitJumpIfNotSmi(Register reg, Label* target) {
55
    DCHECK(!patch_site_.is_bound() && !info_emitted_);
56 57 58 59
    Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
    __ bind(&patch_site_);
    __ andi(at, reg, 0);
    // Always taken before patched.
60
    __ BranchShort(target, eq, at, Operand(zero_reg));
61 62 63 64 65 66
  }

  // When initially emitting this ensure that a jump is never generated to skip
  // the inlined smi code.
  void EmitJumpIfSmi(Register reg, Label* target) {
    Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
67
    DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 69 70
    __ bind(&patch_site_);
    __ andi(at, reg, 0);
    // Never taken before patched.
71
    __ BranchShort(target, ne, at, Operand(zero_reg));
72 73 74
  }

  void EmitPatchInfo() {
75 76 77
    if (patch_site_.is_bound()) {
      int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
      Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
78
      __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
79
#ifdef DEBUG
80
      info_emitted_ = true;
81
#endif
82 83 84
    } else {
      __ nop();  // Signals no inlined code.
    }
85 86 87
  }

 private:
88
  MacroAssembler* masm() { return masm_; }
89 90 91 92 93 94 95 96
  MacroAssembler* masm_;
  Label patch_site_;
#ifdef DEBUG
  bool info_emitted_;
#endif
};


97 98 99 100 101 102
// Generate code for a JS function.  On entry to the function the receiver
// and arguments have been pushed on the stack left to right.  The actual
// argument count matches the formal parameter count expected by the
// function.
//
// The live registers are:
103
//   o a1: the JS function object being called (i.e. ourselves)
104
//   o a3: the new target value
105 106 107 108 109 110 111
//   o cp: our context
//   o fp: our caller's frame pointer
//   o sp: stack pointer
//   o ra: return address
//
// The function builds a JS frame.  Please see JavaScriptFrameConstants in
// frames-mips.h for its layout.
112 113
void FullCodeGenerator::Generate() {
  CompilationInfo* info = info_;
114
  profiling_counter_ = isolate()->factory()->NewCell(
115
      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116
  SetFunctionPosition(literal());
117 118
  Comment cmnt(masm_, "[ function compiled by full code generator");

119 120
  ProfileEntryHookStub::MaybeCallEntryHook(masm_);

121
  if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
122
    int receiver_offset = info->scope()->num_parameters() * kPointerSize;
123 124 125 126
    __ lw(a2, MemOperand(sp, receiver_offset));
    __ AssertNotSmi(a2);
    __ GetObjectType(a2, a2, a2);
    __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
127
             Operand(FIRST_JS_RECEIVER_TYPE));
128 129
  }

130 131 132 133 134
  // Open a frame scope to indicate that there is a frame on the stack.  The
  // MANUAL indicates that the scope shouldn't actually generate code to set up
  // the frame (that is done below).
  FrameScope frame_scope(masm_, StackFrame::MANUAL);

135
  info->set_prologue_offset(masm_->pc_offset());
136
  __ Prologue(info->GeneratePreagedPrologue());
137 138

  { Comment cmnt(masm_, "[ Allocate locals");
139 140
    int locals_count = info->scope()->num_stack_slots();
    // Generators allocate locals, if any, in context slots.
141
    DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
142
    OperandStackDepthIncrement(locals_count);
143
    if (locals_count > 0) {
144
      if (locals_count >= 128) {
145 146 147 148
        Label ok;
        __ Subu(t5, sp, Operand(locals_count * kPointerSize));
        __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
        __ Branch(&ok, hs, t5, Operand(a2));
149
        __ CallRuntime(Runtime::kThrowStackOverflow);
150
        __ bind(&ok);
151
      }
152
      __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
153 154 155 156 157 158 159 160 161
      int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
      if (locals_count >= kMaxPushes) {
        int loop_iterations = locals_count / kMaxPushes;
        __ li(a2, Operand(loop_iterations));
        Label loop_header;
        __ bind(&loop_header);
        // Do pushes.
        __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
        for (int i = 0; i < kMaxPushes; i++) {
162
          __ sw(t5, MemOperand(sp, i * kPointerSize));
163
        }
164 165 166 167 168 169 170 171 172
        // Continue loop if not done.
        __ Subu(a2, a2, Operand(1));
        __ Branch(&loop_header, ne, a2, Operand(zero_reg));
      }
      int remaining = locals_count % kMaxPushes;
      // Emit the remaining pushes.
      __ Subu(sp, sp, Operand(remaining * kPointerSize));
      for (int i  = 0; i < remaining; i++) {
        __ sw(t5, MemOperand(sp, i * kPointerSize));
173
      }
174 175 176
    }
  }

177
  bool function_in_register_a1 = true;
178 179

  // Possibly allocate a local context.
180
  if (info->scope()->num_heap_slots() > 0) {
181 182
    Comment cmnt(masm_, "[ Allocate context");
    // Argument to NewContext is the function, which is still in a1.
183
    bool need_write_barrier = true;
184
    int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
185
    if (info->scope()->is_script_scope()) {
186
      __ push(a1);
187
      __ Push(info->scope()->GetScopeInfo(info->isolate()));
188
      __ CallRuntime(Runtime::kNewScriptContext);
189 190
      PrepareForBailoutForId(BailoutId::ScriptContext(),
                             BailoutState::TOS_REGISTER);
191 192
      // The new target value is not used, clobbering is safe.
      DCHECK_NULL(info->scope()->new_target_var());
193
    } else {
194 195 196
      if (info->scope()->new_target_var() != nullptr) {
        __ push(a3);  // Preserve new target.
      }
197 198 199 200 201
      FastNewFunctionContextStub stub(isolate());
      __ li(FastNewFunctionContextDescriptor::SlotsRegister(), Operand(slots));
      __ CallStub(&stub);
      // Result of FastNewFunctionContextStub is always in new space.
      need_write_barrier = false;
202 203 204
      if (info->scope()->new_target_var() != nullptr) {
        __ pop(a3);  // Restore new target.
      }
205
    }
206
    function_in_register_a1 = false;
207 208 209 210
    // Context is returned in v0. It replaces the context passed to us.
    // It's saved in the stack and kept live in cp.
    __ mov(cp, v0);
    __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
211
    // Copy any necessary parameters into the context.
212
    int num_parameters = info->scope()->num_parameters();
213 214
    int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
    for (int i = first_parameter; i < num_parameters; i++) {
215 216
      Variable* var =
          (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
217
      if (var->IsContextSlot()) {
218 219 220 221 222
        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
                                 (num_parameters - 1 - i) * kPointerSize;
        // Load parameter from stack.
        __ lw(a0, MemOperand(fp, parameter_offset));
        // Store it in the context.
223
        MemOperand target = ContextMemOperand(cp, var->index());
224 225 226
        __ sw(a0, target);

        // Update the write barrier.
227
        if (need_write_barrier) {
228 229
          __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
                                    kRAHasBeenSaved, kDontSaveFPRegs);
230 231 232 233 234 235
        } else if (FLAG_debug_code) {
          Label done;
          __ JumpIfInNewSpace(cp, a0, &done);
          __ Abort(kExpectedNewSpaceObject);
          __ bind(&done);
        }
236 237 238 239
      }
    }
  }

240 241 242
  // Register holding this function and new target are both trashed in case we
  // bailout here. But since that can happen only when new target is not used
  // and we allocate a context, the value of |function_in_register| is correct.
243 244
  PrepareForBailoutForId(BailoutId::FunctionContext(),
                         BailoutState::NO_REGISTERS);
245

246 247
  // Possibly set up a local binding to the this function which is used in
  // derived constructors with super calls.
248
  Variable* this_function_var = info->scope()->this_function_var();
249 250
  if (this_function_var != nullptr) {
    Comment cmnt(masm_, "[ This function");
251
    if (!function_in_register_a1) {
252
      __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
253
      // The write barrier clobbers register again, keep it marked as such.
254
    }
255
    SetVar(this_function_var, a1, a0, a2);
256 257
  }

258
  // Possibly set up a local binding to the new target value.
259
  Variable* new_target_var = info->scope()->new_target_var();
260 261
  if (new_target_var != nullptr) {
    Comment cmnt(masm_, "[ new.target");
262
    SetVar(new_target_var, a3, a0, a2);
263 264
  }

265 266
  // Possibly allocate RestParameters
  int rest_index;
267
  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
268 269
  if (rest_param) {
    Comment cmnt(masm_, "[ Allocate rest parameter array");
270 271 272 273
    if (!function_in_register_a1) {
      __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    }
    FastNewRestParameterStub stub(isolate());
274
    __ CallStub(&stub);
275
    function_in_register_a1 = false;
276 277 278
    SetVar(rest_param, v0, a1, a2);
  }

279
  Variable* arguments = info->scope()->arguments();
280 281 282
  if (arguments != NULL) {
    // Function uses arguments object.
    Comment cmnt(masm_, "[ Allocate arguments object");
283
    if (!function_in_register_a1) {
284
      // Load this again, if it's used by the local context below.
285
      __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
286
    }
287 288 289
    if (is_strict(language_mode()) || !has_simple_parameters()) {
      FastNewStrictArgumentsStub stub(isolate());
      __ CallStub(&stub);
290 291 292
    } else if (literal()->has_duplicate_parameters()) {
      __ Push(a1);
      __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
293
    } else {
294
      FastNewSloppyArgumentsStub stub(isolate());
295 296
      __ CallStub(&stub);
    }
297

298
    SetVar(arguments, v0, a1, a2);
299 300 301
  }

  if (FLAG_trace) {
302
    __ CallRuntime(Runtime::kTraceEnter);
303 304 305 306
  }

  // Visit the declarations and body unless there is an illegal
  // redeclaration.
307 308
  PrepareForBailoutForId(BailoutId::FunctionEntry(),
                         BailoutState::NO_REGISTERS);
309 310 311 312
  {
    Comment cmnt(masm_, "[ Declarations");
    VisitDeclarations(scope()->declarations());
  }
313

314 315 316 317 318 319 320
  // Assert that the declarations do not use ICs. Otherwise the debugger
  // won't be able to redirect a PC at an IC to the correct IC in newly
  // recompiled code.
  DCHECK_EQ(0, ic_total_count_);

  {
    Comment cmnt(masm_, "[ Stack check");
321 322
    PrepareForBailoutForId(BailoutId::Declarations(),
                           BailoutState::NO_REGISTERS);
323 324 325 326 327 328 329 330 331
    Label ok;
    __ LoadRoot(at, Heap::kStackLimitRootIndex);
    __ Branch(&ok, hs, sp, Operand(at));
    Handle<Code> stack_check = isolate()->builtins()->StackCheck();
    PredictableCodeSizeScope predictable(
        masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
    __ Call(stack_check, RelocInfo::CODE_TARGET);
    __ bind(&ok);
  }
332

333 334 335 336 337
  {
    Comment cmnt(masm_, "[ Body");
    DCHECK(loop_depth() == 0);
    VisitStatements(literal()->body());
    DCHECK(loop_depth() == 0);
338 339 340 341 342 343 344 345
  }

  // Always emit a 'return undefined' in case control fell off the end of
  // the body.
  { Comment cmnt(masm_, "[ return <undefined>;");
    __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
  }
  EmitReturnSequence();
346 347 348 349
}


void FullCodeGenerator::ClearAccumulator() {
350
  DCHECK(Smi::FromInt(0) == 0);
351
  __ mov(v0, zero_reg);
352 353 354
}


355 356
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
  __ li(a2, Operand(profiling_counter_));
357
  __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
358
  __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
359
  __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
360 361 362 363 364
}


void FullCodeGenerator::EmitProfilingCounterReset() {
  int reset_value = FLAG_interrupt_budget;
365
  if (info_->is_debug()) {
366
    // Detect debug break requests as soon as possible.
367
    reset_value = FLAG_interrupt_budget >> 4;
368 369 370
  }
  __ li(a2, Operand(profiling_counter_));
  __ li(a3, Operand(Smi::FromInt(reset_value)));
371
  __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
372 373 374
}


375 376
void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
                                                Label* back_edge_target) {
377 378 379 380 381 382
  // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
  // to make sure it is constant. Branch may emit a skip-or-jump sequence
  // instead of the normal Branch. It seems that the "skip" part of that
  // sequence is about as long as this Branch would be so it is safe to ignore
  // that.
  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
383
  Comment cmnt(masm_, "[ Back edge bookkeeping");
384
  Label ok;
385
  DCHECK(back_edge_target->is_bound());
386 387 388
  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
  int weight = Min(kMaxBackEdgeWeight,
                   Max(1, distance / kCodeSizeMultiplier));
389 390 391
  EmitProfilingCounterDecrement(weight);
  __ slt(at, a3, zero_reg);
  __ beq(at, zero_reg, &ok);
392 393
  // Call will emit a li t9 first, so it is safe to use the delay slot.
  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
394 395 396
  // Record a mapping of this PC offset to the OSR id.  This is used to find
  // the AST id from the unoptimized code in order to use it as a key into
  // the deoptimization input data found in the optimized code.
397 398
  RecordBackEdge(stmt->OsrEntryId());
  EmitProfilingCounterReset();
399 400

  __ bind(&ok);
401
  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
402 403 404
  // Record a mapping of the OSR id to this PC.  This is used if the OSR
  // entry becomes the target of a bailout.  We don't expect it to be, but
  // we want it to work if it is.
405
  PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
406 407
}

408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
    bool is_tail_call) {
  // Pretend that the exit is a backwards jump to the entry.
  int weight = 1;
  if (info_->ShouldSelfOptimize()) {
    weight = FLAG_interrupt_budget / FLAG_self_opt_count;
  } else {
    int distance = masm_->pc_offset();
    weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
  }
  EmitProfilingCounterDecrement(weight);
  Label ok;
  __ Branch(&ok, ge, a3, Operand(zero_reg));
  // Don't need to save result register if we are going to do a tail call.
  if (!is_tail_call) {
    __ push(v0);
  }
  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
  if (!is_tail_call) {
    __ pop(v0);
  }
  EmitProfilingCounterReset();
  __ bind(&ok);
}
432

433
void FullCodeGenerator::EmitReturnSequence() {
434 435 436 437 438 439 440 441 442
  Comment cmnt(masm_, "[ Return sequence");
  if (return_label_.is_bound()) {
    __ Branch(&return_label_);
  } else {
    __ bind(&return_label_);
    if (FLAG_trace) {
      // Push the return value on the stack as the parameter.
      // Runtime::TraceExit returns its parameter in v0.
      __ push(v0);
443
      __ CallRuntime(Runtime::kTraceExit);
444
    }
445
    EmitProfilingCounterHandlingForReturnSequence(false);
446 447 448 449

    // Make sure that the constant pool is not emitted inside of the return
    // sequence.
    { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
450 451
      int32_t arg_count = info_->scope()->num_parameters() + 1;
      int32_t sp_delta = arg_count * kPointerSize;
452
      SetReturnPosition(literal());
453 454 455 456
      __ mov(sp, fp);
      __ MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
      __ Addu(sp, sp, Operand(sp_delta));
      __ Jump(ra);
457 458
    }
  }
459 460
}

461 462 463
void FullCodeGenerator::RestoreContext() {
  __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
464

465
void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
466
  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
467
  codegen()->GetVar(result_register(), var);
468
  codegen()->PushOperand(result_register());
469 470 471 472 473 474 475 476 477
}


void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
}


void FullCodeGenerator::AccumulatorValueContext::Plug(
    Heap::RootListIndex index) const {
478
  __ LoadRoot(result_register(), index);
479 480 481 482 483
}


void FullCodeGenerator::StackValueContext::Plug(
    Heap::RootListIndex index) const {
484
  __ LoadRoot(result_register(), index);
485
  codegen()->PushOperand(result_register());
486 487 488 489
}


void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
490
  codegen()->PrepareForBailoutBeforeSplit(condition(),
491 492 493 494 495 496 497 498 499 500 501
                                          true,
                                          true_label_,
                                          false_label_);
  if (index == Heap::kUndefinedValueRootIndex ||
      index == Heap::kNullValueRootIndex ||
      index == Heap::kFalseValueRootIndex) {
    if (false_label_ != fall_through_) __ Branch(false_label_);
  } else if (index == Heap::kTrueValueRootIndex) {
    if (true_label_ != fall_through_) __ Branch(true_label_);
  } else {
    __ LoadRoot(result_register(), index);
502
    codegen()->DoTest(this);
503
  }
504 505 506 507 508 509 510 511 512
}


void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
}


void FullCodeGenerator::AccumulatorValueContext::Plug(
    Handle<Object> lit) const {
513
  __ li(result_register(), Operand(lit));
514 515 516 517
}


void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
518 519
  // Immediates cannot be pushed directly.
  __ li(result_register(), Operand(lit));
520
  codegen()->PushOperand(result_register());
521 522 523 524
}


void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
525
  codegen()->PrepareForBailoutBeforeSplit(condition(),
526 527 528
                                          true,
                                          true_label_,
                                          false_label_);
529
  DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
530
         !lit->IsUndetectable());
531 532
  if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
      lit->IsFalse(isolate())) {
533
    if (false_label_ != fall_through_) __ Branch(false_label_);
534
  } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550
    if (true_label_ != fall_through_) __ Branch(true_label_);
  } else if (lit->IsString()) {
    if (String::cast(*lit)->length() == 0) {
      if (false_label_ != fall_through_) __ Branch(false_label_);
    } else {
      if (true_label_ != fall_through_) __ Branch(true_label_);
    }
  } else if (lit->IsSmi()) {
    if (Smi::cast(*lit)->value() == 0) {
      if (false_label_ != fall_through_) __ Branch(false_label_);
    } else {
      if (true_label_ != fall_through_) __ Branch(true_label_);
    }
  } else {
    // For simplicity we always test the accumulator register.
    __ li(result_register(), Operand(lit));
551
    codegen()->DoTest(this);
552
  }
553 554 555 556 557
}


void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
                                                       Register reg) const {
558
  DCHECK(count > 0);
559
  if (count > 1) codegen()->DropOperands(count - 1);
560
  __ sw(reg, MemOperand(sp, 0));
561 562 563 564 565
}


void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
                                            Label* materialize_false) const {
566
  DCHECK(materialize_true == materialize_false);
567
  __ bind(materialize_true);
568 569 570 571 572 573
}


void FullCodeGenerator::AccumulatorValueContext::Plug(
    Label* materialize_true,
    Label* materialize_false) const {
574 575 576 577 578 579 580
  Label done;
  __ bind(materialize_true);
  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
  __ Branch(&done);
  __ bind(materialize_false);
  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
  __ bind(&done);
581 582 583
}


584 585 586
void FullCodeGenerator::StackValueContext::Plug(
    Label* materialize_true,
    Label* materialize_false) const {
587
  codegen()->OperandStackDepthIncrement(1);
588 589 590
  Label done;
  __ bind(materialize_true);
  __ LoadRoot(at, Heap::kTrueValueRootIndex);
591 592
  // Push the value as the following branch can clobber at in long branch mode.
  __ push(at);
593 594 595
  __ Branch(&done);
  __ bind(materialize_false);
  __ LoadRoot(at, Heap::kFalseValueRootIndex);
596
  __ push(at);
597
  __ bind(&done);
598 599
}

600 601 602

void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
                                          Label* materialize_false) const {
603 604
  DCHECK(materialize_true == true_label_);
  DCHECK(materialize_false == false_label_);
605 606 607 608
}


void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
609 610 611
  Heap::RootListIndex value_root_index =
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
  __ LoadRoot(result_register(), value_root_index);
612 613 614
}


615
void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
616 617 618
  Heap::RootListIndex value_root_index =
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
  __ LoadRoot(at, value_root_index);
619
  codegen()->PushOperand(at);
620 621 622
}


623
void FullCodeGenerator::TestContext::Plug(bool flag) const {
624
  codegen()->PrepareForBailoutBeforeSplit(condition(),
625 626 627 628 629 630 631 632
                                          true,
                                          true_label_,
                                          false_label_);
  if (flag) {
    if (true_label_ != fall_through_) __ Branch(true_label_);
  } else {
    if (false_label_ != fall_through_) __ Branch(false_label_);
  }
633 634 635
}


636 637
void FullCodeGenerator::DoTest(Expression* condition,
                               Label* if_true,
638 639
                               Label* if_false,
                               Label* fall_through) {
640
  __ mov(a0, result_register());
641
  Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
642
  CallIC(ic, condition->test_id());
643 644
  __ LoadRoot(at, Heap::kTrueValueRootIndex);
  Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
645 646 647 648
}


void FullCodeGenerator::Split(Condition cc,
649 650
                              Register lhs,
                              const Operand&  rhs,
651 652 653
                              Label* if_true,
                              Label* if_false,
                              Label* fall_through) {
654 655 656 657 658 659 660 661
  if (if_false == fall_through) {
    __ Branch(if_true, cc, lhs, rhs);
  } else if (if_true == fall_through) {
    __ Branch(if_false, NegateCondition(cc), lhs, rhs);
  } else {
    __ Branch(if_true, cc, lhs, rhs);
    __ Branch(if_false);
  }
662 663 664
}


665
MemOperand FullCodeGenerator::StackOperand(Variable* var) {
666
  DCHECK(var->IsStackAllocated());
667 668 669 670 671 672 673 674 675 676 677 678 679
  // Offset is negative because higher indexes are at lower addresses.
  int offset = -var->index() * kPointerSize;
  // Adjust by a (parameter or local) base offset.
  if (var->IsParameter()) {
    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
  } else {
    offset += JavaScriptFrameConstants::kLocal0Offset;
  }
  return MemOperand(fp, offset);
}


MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
680
  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
681 682 683
  if (var->IsContextSlot()) {
    int context_chain_length = scope()->ContextChainLength(var->scope());
    __ LoadContext(scratch, context_chain_length);
684
    return ContextMemOperand(scratch, var->index());
685 686
  } else {
    return StackOperand(var);
687
  }
688 689 690
}


691
void FullCodeGenerator::GetVar(Register dest, Variable* var) {
692
  // Use destination as scratch.
693 694 695 696 697 698 699 700 701
  MemOperand location = VarOperand(var, dest);
  __ lw(dest, location);
}


void FullCodeGenerator::SetVar(Variable* var,
                               Register src,
                               Register scratch0,
                               Register scratch1) {
702 703 704 705
  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
  DCHECK(!scratch0.is(src));
  DCHECK(!scratch0.is(scratch1));
  DCHECK(!scratch1.is(src));
706 707 708 709
  MemOperand location = VarOperand(var, scratch0);
  __ sw(src, location);
  // Emit the write barrier code if the location is in the heap.
  if (var->IsContextSlot()) {
710 711 712 713 714 715
    __ RecordWriteContextSlot(scratch0,
                              location.offset(),
                              src,
                              scratch1,
                              kRAHasBeenSaved,
                              kDontSaveFPRegs);
716
  }
717 718 719
}


720
void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
721 722 723
                                                     bool should_normalize,
                                                     Label* if_true,
                                                     Label* if_false) {
724 725 726
  // Only prepare for bailouts before splits if we're in a test
  // context. Otherwise, we let the Visit function deal with the
  // preparation to avoid preparing with the same AST id twice.
727
  if (!context()->IsTest()) return;
728 729 730

  Label skip;
  if (should_normalize) __ Branch(&skip);
731
  PrepareForBailout(expr, BailoutState::TOS_REGISTER);
732 733
  if (should_normalize) {
    __ LoadRoot(t0, Heap::kTrueValueRootIndex);
734
    Split(eq, v0, Operand(t0), if_true, if_false, NULL);
735 736
    __ bind(&skip);
  }
737 738 739
}


740 741 742
void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
  // The variable in the declaration always resides in the current function
  // context.
743
  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
744
  if (FLAG_debug_code) {
745 746 747
    // Check that we're not inside a with or catch context.
    __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
    __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
748
    __ Check(ne, kDeclarationInWithContext,
749 750
        a1, Operand(t0));
    __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
751
    __ Check(ne, kDeclarationInCatchContext,
752 753 754 755 756 757 758 759
        a1, Operand(t0));
  }
}


void FullCodeGenerator::VisitVariableDeclaration(
    VariableDeclaration* declaration) {
  VariableProxy* proxy = declaration->proxy();
760
  Variable* variable = proxy->var();
761
  switch (variable->location()) {
762
    case VariableLocation::GLOBAL:
763
    case VariableLocation::UNALLOCATED: {
764
      DCHECK(!variable->binding_needs_init());
765 766 767
      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
      DCHECK(!slot.IsInvalid());
      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
768
      globals_->Add(isolate()->factory()->undefined_value(), zone());
769
      break;
770
    }
771 772
    case VariableLocation::PARAMETER:
    case VariableLocation::LOCAL:
773
      if (variable->binding_needs_init()) {
774 775 776
        Comment cmnt(masm_, "[ VariableDeclaration");
        __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
        __ sw(t0, StackOperand(variable));
777
      }
778
      break;
779

780
    case VariableLocation::CONTEXT:
781
      if (variable->binding_needs_init()) {
782 783
        Comment cmnt(masm_, "[ VariableDeclaration");
        EmitDebugCheckDeclarationContext(variable);
784
          __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
785
          __ sw(at, ContextMemOperand(cp, variable->index()));
786
          // No write barrier since the_hole_value is in old space.
787
          PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
788 789
      }
      break;
790

791
    case VariableLocation::LOOKUP: {
792
      Comment cmnt(masm_, "[ VariableDeclaration");
793 794
      DCHECK_EQ(VAR, variable->mode());
      DCHECK(!variable->binding_needs_init());
795 796 797
      __ li(a2, Operand(variable->name()));
      __ Push(a2);
      __ CallRuntime(Runtime::kDeclareEvalVar);
798
      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
799
      break;
800
    }
801 802 803

    case VariableLocation::MODULE:
      UNREACHABLE();
804
  }
805 806 807
}


808 809 810 811 812
void FullCodeGenerator::VisitFunctionDeclaration(
    FunctionDeclaration* declaration) {
  VariableProxy* proxy = declaration->proxy();
  Variable* variable = proxy->var();
  switch (variable->location()) {
813 814
    case VariableLocation::GLOBAL:
    case VariableLocation::UNALLOCATED: {
815 816 817
      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
      DCHECK(!slot.IsInvalid());
      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
818
      Handle<SharedFunctionInfo> function =
819
          Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
820 821
      // Check for stack-overflow exception.
      if (function.is_null()) return SetStackOverflow();
822
      globals_->Add(function, zone());
823
      break;
824
    }
825

826 827
    case VariableLocation::PARAMETER:
    case VariableLocation::LOCAL: {
828 829 830 831 832 833
      Comment cmnt(masm_, "[ FunctionDeclaration");
      VisitForAccumulatorValue(declaration->fun());
      __ sw(result_register(), StackOperand(variable));
      break;
    }

834
    case VariableLocation::CONTEXT: {
835 836 837
      Comment cmnt(masm_, "[ FunctionDeclaration");
      EmitDebugCheckDeclarationContext(variable);
      VisitForAccumulatorValue(declaration->fun());
838
      __ sw(result_register(), ContextMemOperand(cp, variable->index()));
839 840 841 842 843 844 845 846 847 848
      int offset = Context::SlotOffset(variable->index());
      // We know that we have written a function, which is not a smi.
      __ RecordWriteContextSlot(cp,
                                offset,
                                result_register(),
                                a2,
                                kRAHasBeenSaved,
                                kDontSaveFPRegs,
                                EMIT_REMEMBERED_SET,
                                OMIT_SMI_CHECK);
849
      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
850 851 852
      break;
    }

853
    case VariableLocation::LOOKUP: {
854 855
      Comment cmnt(masm_, "[ FunctionDeclaration");
      __ li(a2, Operand(variable->name()));
856
      PushOperand(a2);
857 858
      // Push initial value for function declaration.
      VisitForStackValue(declaration->fun());
859
      CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
860
      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
861 862
      break;
    }
863 864 865

    case VariableLocation::MODULE:
      UNREACHABLE();
866 867 868 869
  }
}


870
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
871
  // Call the runtime to declare the globals.
872 873
  __ li(a1, Operand(pairs));
  __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
874 875
  __ EmitLoadTypeFeedbackVector(a2);
  __ Push(a1, a0, a2);
876
  __ CallRuntime(Runtime::kDeclareGlobals);
877
  // Return value is ignored.
878 879 880
}


881
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
882 883 884 885 886 887
  Comment cmnt(masm_, "[ SwitchStatement");
  Breakable nested_statement(this, stmt);
  SetStatementPosition(stmt);

  // Keep the switch value on the stack until a case matches.
  VisitForStackValue(stmt->tag());
888
  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929

  ZoneList<CaseClause*>* clauses = stmt->cases();
  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.

  Label next_test;  // Recycled for each test.
  // Compile all the tests with branches to their bodies.
  for (int i = 0; i < clauses->length(); i++) {
    CaseClause* clause = clauses->at(i);
    clause->body_target()->Unuse();

    // The default is not a test, but remember it as final fall through.
    if (clause->is_default()) {
      default_clause = clause;
      continue;
    }

    Comment cmnt(masm_, "[ Case comparison");
    __ bind(&next_test);
    next_test.Unuse();

    // Compile the label expression.
    VisitForAccumulatorValue(clause->label());
    __ mov(a0, result_register());  // CompareStub requires args in a0, a1.

    // Perform the comparison as if via '==='.
    __ lw(a1, MemOperand(sp, 0));  // Switch value.
    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    JumpPatchSite patch_site(masm_);
    if (inline_smi_code) {
      Label slow_case;
      __ or_(a2, a1, a0);
      patch_site.EmitJumpIfNotSmi(a2, &slow_case);

      __ Branch(&next_test, ne, a1, Operand(a0));
      __ Drop(1);  // Switch value is no longer needed.
      __ Branch(clause->body_target());

      __ bind(&slow_case);
    }

    // Record position before stub call for type feedback.
930
    SetExpressionPosition(clause);
931 932
    Handle<Code> ic =
        CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
933
    CallIC(ic, clause->CompareId());
934
    patch_site.EmitPatchInfo();
935

936
    Label skip;
937
    __ Branch(&skip);
938
    PrepareForBailout(clause, BailoutState::TOS_REGISTER);
939 940 941
    __ LoadRoot(at, Heap::kTrueValueRootIndex);
    __ Branch(&next_test, ne, v0, Operand(at));
    __ Drop(1);
942
    __ Branch(clause->body_target());
943 944
    __ bind(&skip);

945 946 947 948 949 950 951 952
    __ Branch(&next_test, ne, v0, Operand(zero_reg));
    __ Drop(1);  // Switch value is no longer needed.
    __ Branch(clause->body_target());
  }

  // Discard the test value and jump to the default if present, otherwise to
  // the end of the statement.
  __ bind(&next_test);
953
  DropOperands(1);  // Switch value is no longer needed.
954
  if (default_clause == NULL) {
955
    __ Branch(nested_statement.break_label());
956 957 958 959 960 961 962 963 964
  } else {
    __ Branch(default_clause->body_target());
  }

  // Compile all the case bodies.
  for (int i = 0; i < clauses->length(); i++) {
    Comment cmnt(masm_, "[ Case body");
    CaseClause* clause = clauses->at(i);
    __ bind(clause->body_target());
965
    PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
966 967 968
    VisitStatements(clause->statements());
  }

969
  __ bind(nested_statement.break_label());
970
  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
971 972 973 974
}


void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
975
  Comment cmnt(masm_, "[ ForInStatement");
976 977
  SetStatementPosition(stmt, SKIP_BREAK);

978
  FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
979

980
  // Get the object to enumerate over.
981
  SetExpressionAsStatementPosition(stmt->enumerable());
982
  VisitForAccumulatorValue(stmt->enumerable());
983
  __ mov(a0, result_register());
984 985 986 987 988
  OperandStackDepthIncrement(5);

  Label loop, exit;
  Iteration loop_statement(this, stmt);
  increment_loop_depth();
989 990 991

  // If the object is null or undefined, skip over the loop, otherwise convert
  // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
992 993 994
  Label convert, done_convert;
  __ JumpIfSmi(a0, &convert);
  __ GetObjectType(a0, a1, a1);
995 996 997 998 999 1000
  __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
            Operand(FIRST_JS_RECEIVER_TYPE));
  __ LoadRoot(at, Heap::kNullValueRootIndex);  // In delay slot.
  __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);  // In delay slot.
  __ Branch(&exit, eq, a0, Operand(at));
1001
  __ bind(&convert);
1002 1003
  ToObjectStub stub(isolate());
  __ CallStub(&stub);
1004
  RestoreContext();
1005 1006
  __ mov(a0, v0);
  __ bind(&done_convert);
1007
  PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
1008 1009
  __ push(a0);

1010 1011 1012 1013
  // Check cache validity in generated code. If we cannot guarantee cache
  // validity, call the runtime system to check cache validity or get the
  // property names in a fixed array. Note: Proxies never have an enum cache,
  // so will always take the slow path.
1014 1015
  Label call_runtime;
  __ CheckEnumCache(&call_runtime);
1016 1017 1018 1019 1020 1021 1022 1023 1024 1025

  // The enum cache is valid.  Load the map of the object being
  // iterated over and use the cache for the iteration.
  Label use_cache;
  __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
  __ Branch(&use_cache);

  // Get the set of properties to enumerate.
  __ bind(&call_runtime);
  __ push(a0);  // Duplicate the enumerable object on the stack.
1026
  __ CallRuntime(Runtime::kForInEnumerate);
1027
  PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1028 1029 1030 1031 1032

  // If we got a map from the runtime call, we can do a fast
  // modification check. Otherwise, we got a fixed array, and we have
  // to do a slow check.
  Label fixed_array;
1033
  __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1034
  __ LoadRoot(at, Heap::kMetaMapRootIndex);
1035
  __ Branch(&fixed_array, ne, a2, Operand(at));
1036 1037

  // We got a map in register v0. Get the enumeration cache from it.
1038
  Label no_descriptors;
1039
  __ bind(&use_cache);
1040 1041 1042 1043

  __ EnumLength(a1, v0);
  __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));

1044
  __ LoadInstanceDescriptors(v0, a2);
1045 1046
  __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
  __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1047

1048
  // Set up the four remaining stack slots.
1049
  __ li(a0, Operand(Smi::FromInt(0)));
1050 1051
  // Push map, enumeration cache, enumeration cache length (as smi) and zero.
  __ Push(v0, a2, a1, a0);
1052 1053
  __ jmp(&loop);

1054 1055 1056 1057
  __ bind(&no_descriptors);
  __ Drop(1);
  __ jmp(&exit);

1058 1059
  // We got a fixed array in register v0. Iterate through that.
  __ bind(&fixed_array);
1060

1061
  __ li(a1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
1062
  __ Push(a1, v0);  // Smi and array
1063
  __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1064
  __ Push(a1);  // Fixed array length (as smi).
1065
  PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1066
  __ li(a0, Operand(Smi::FromInt(0)));
1067
  __ Push(a0);  // Initial index.
1068 1069 1070

  // Generate code for doing the condition check.
  __ bind(&loop);
1071
  SetExpressionAsStatementPosition(stmt->each());
1072

1073 1074 1075
  // Load the current count to a0, load the length to a1.
  __ lw(a0, MemOperand(sp, 0 * kPointerSize));
  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1076
  __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1077

1078
  // Get the current entry of the array into result_register.
1079 1080
  __ lw(a2, MemOperand(sp, 2 * kPointerSize));
  __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1081
  __ Lsa(t0, a2, a0, kPointerSizeLog2 - kSmiTagSize);
1082
  __ lw(result_register(), MemOperand(t0));  // Current entry.
1083

1084
  // Get the expected map from the stack or a smi in the
1085 1086 1087 1088
  // permanent slow case into register a2.
  __ lw(a2, MemOperand(sp, 3 * kPointerSize));

  // Check if the expected map still matches that of the enumerable.
1089
  // If not, we may have to filter the key.
1090 1091 1092 1093 1094
  Label update_each;
  __ lw(a1, MemOperand(sp, 4 * kPointerSize));
  __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
  __ Branch(&update_each, eq, t0, Operand(a2));

1095 1096
  // We need to filter the key, record slow-path here.
  int const vector_index = SmiFromSlot(slot)->value();
1097
  __ EmitLoadTypeFeedbackVector(a3);
1098
  __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1099
  __ sw(a2, FieldMemOperand(a3, FixedArray::OffsetOfElementAt(vector_index)));
1100

1101 1102 1103 1104 1105 1106 1107
  __ mov(a0, result_register());
  // a0 contains the key. The receiver in a1 is the second argument to the
  // ForInFilterStub. ForInFilter returns undefined if the receiver doesn't
  // have the key or returns the name-converted key.
  ForInFilterStub filter_stub(isolate());
  __ CallStub(&filter_stub);
  RestoreContext();
1108
  PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1109
  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1110 1111
  __ Branch(loop_statement.continue_label(), eq, result_register(),
            Operand(at));
1112 1113

  // Update the 'each' property or variable from the possibly filtered
1114
  // entry in the result_register.
1115 1116 1117
  __ bind(&update_each);
  // Perform the assignment as if via '='.
  { EffectContext context(this);
1118
    EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1119
    PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1120 1121
  }

1122
  // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1123
  PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1124 1125 1126 1127 1128
  // Generate code for the body of the loop.
  Visit(stmt->body());

  // Generate code for the going to the next element by incrementing
  // the index (smi) stored on top of the stack.
1129
  __ bind(loop_statement.continue_label());
1130 1131 1132 1133
  __ pop(a0);
  __ Addu(a0, a0, Operand(Smi::FromInt(1)));
  __ push(a0);

1134
  EmitBackEdgeBookkeeping(stmt, &loop);
1135 1136 1137
  __ Branch(&loop);

  // Remove the pointers stored on the stack.
1138
  __ bind(loop_statement.break_label());
1139
  DropOperands(5);
1140 1141

  // Exit and decrement the loop depth.
1142
  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1143 1144
  __ bind(&exit);
  decrement_loop_depth();
1145 1146 1147
}


1148
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1149
                                          FeedbackVectorSlot slot) {
1150 1151 1152 1153 1154 1155
  DCHECK(NeedsHomeObject(initializer));
  __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
  __ li(StoreDescriptor::NameRegister(),
        Operand(isolate()->factory()->home_object_symbol()));
  __ lw(StoreDescriptor::ValueRegister(),
        MemOperand(sp, offset * kPointerSize));
1156
  EmitLoadStoreICSlot(slot);
1157 1158 1159 1160
  CallStoreIC();
}


1161 1162 1163
void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
                                                     int offset,
                                                     FeedbackVectorSlot slot) {
1164 1165 1166 1167 1168 1169
  DCHECK(NeedsHomeObject(initializer));
  __ Move(StoreDescriptor::ReceiverRegister(), v0);
  __ li(StoreDescriptor::NameRegister(),
        Operand(isolate()->factory()->home_object_symbol()));
  __ lw(StoreDescriptor::ValueRegister(),
        MemOperand(sp, offset * kPointerSize));
1170
  EmitLoadStoreICSlot(slot);
1171
  CallStoreIC();
1172 1173 1174
}


1175
void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1176
                                                      TypeofMode typeof_mode,
1177
                                                      Label* slow) {
1178 1179 1180 1181 1182 1183 1184
  Register current = cp;
  Register next = a1;
  Register temp = a2;

  Scope* s = scope();
  while (s != NULL) {
    if (s->num_heap_slots() > 0) {
1185
      if (s->calls_sloppy_eval()) {
1186
        // Check that extension is "the hole".
1187
        __ lw(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1188
        __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1189 1190
      }
      // Load next context in chain.
1191
      __ lw(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1192 1193 1194 1195 1196
      // Walk the rest of the chain without clobbering cp.
      current = next;
    }
    // If no outer scope calls eval, we do not need to check more
    // context extensions.
1197
    if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1198 1199 1200 1201 1202 1203 1204 1205 1206
    s = s->outer_scope();
  }

  if (s->is_eval_scope()) {
    Label loop, fast;
    if (!current.is(next)) {
      __ Move(next, current);
    }
    __ bind(&loop);
1207
    // Terminate at native context.
1208
    __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1209
    __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1210
    __ Branch(&fast, eq, temp, Operand(t0));
1211
    // Check that extension is "the hole".
1212
    __ lw(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1213
    __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1214
    // Load next context in chain.
1215
    __ lw(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1216 1217 1218 1219
    __ Branch(&loop);
    __ bind(&fast);
  }

1220 1221
  // All extension objects were empty and it is safe to use a normal global
  // load machinery.
1222
  EmitGlobalVariableLoad(proxy, typeof_mode);
1223 1224 1225
}


1226 1227
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
                                                                Label* slow) {
1228
  DCHECK(var->IsContextSlot());
1229 1230 1231 1232
  Register context = cp;
  Register next = a3;
  Register temp = t0;

1233
  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1234
    if (s->num_heap_slots() > 0) {
1235
      if (s->calls_sloppy_eval()) {
1236
        // Check that extension is "the hole".
1237
        __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1238
        __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1239
      }
1240
      __ lw(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1241 1242 1243 1244
      // Walk the rest of the chain without clobbering cp.
      context = next;
    }
  }
1245
  // Check that last extension is "the hole".
1246
  __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1247
  __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1248 1249 1250 1251

  // This function is used only for loads, not stores, so it's safe to
  // return an cp-based operand (the write barrier cannot be allowed to
  // destroy the cp register).
1252
  return ContextMemOperand(context, var->index());
1253 1254 1255
}


1256
void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1257 1258
                                                  TypeofMode typeof_mode,
                                                  Label* slow, Label* done) {
1259 1260 1261 1262 1263
  // Generate fast-case code for variables that might be shadowed by
  // eval-introduced variables.  Eval is used a lot without
  // introducing variables.  In those cases, we do not want to
  // perform a runtime call for all variables in the scope
  // containing the eval.
1264
  Variable* var = proxy->var();
1265
  if (var->mode() == DYNAMIC_GLOBAL) {
1266
    EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1267
    __ Branch(done);
1268
  } else if (var->mode() == DYNAMIC_LOCAL) {
1269 1270
    Variable* local = var->local_if_not_shadowed();
    __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1271
    if (local->binding_needs_init()) {
1272 1273
      __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
      __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
1274 1275 1276 1277
      __ Branch(done, ne, at, Operand(zero_reg));
      __ li(a0, Operand(var->name()));
      __ push(a0);
      __ CallRuntime(Runtime::kThrowReferenceError);
1278 1279
    } else {
      __ Branch(done);
1280 1281
    }
  }
1282 1283 1284
}


1285
void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1286
                                               TypeofMode typeof_mode) {
1287
#ifdef DEBUG
1288 1289 1290
  Variable* var = proxy->var();
  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1291 1292
#endif
  __ li(LoadGlobalDescriptor::SlotRegister(),
1293
        Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1294
  CallLoadGlobalIC(typeof_mode);
1295 1296 1297 1298
}


void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1299
                                         TypeofMode typeof_mode) {
1300
  // Record position before possible IC call.
1301
  SetExpressionPosition(proxy);
1302
  PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1303 1304
  Variable* var = proxy->var();

1305 1306 1307
  // Three cases: global variables, lookup variables, and all other types of
  // variables.
  switch (var->location()) {
1308 1309
    case VariableLocation::GLOBAL:
    case VariableLocation::UNALLOCATED: {
1310
      Comment cmnt(masm_, "[ Global variable");
1311
      EmitGlobalVariableLoad(proxy, typeof_mode);
1312 1313 1314
      context()->Plug(v0);
      break;
    }
1315

1316 1317 1318
    case VariableLocation::PARAMETER:
    case VariableLocation::LOCAL:
    case VariableLocation::CONTEXT: {
1319
      DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1320 1321
      Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
                                               : "[ Stack variable");
1322
      if (NeedsHoleCheckForLoad(proxy)) {
1323 1324 1325
        // Throw a reference error when using an uninitialized let/const
        // binding in harmony mode.
        Label done;
1326 1327 1328
        GetVar(v0, var);
        __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
        __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
1329 1330 1331 1332 1333
        __ Branch(&done, ne, at, Operand(zero_reg));
        __ li(a0, Operand(var->name()));
        __ push(a0);
        __ CallRuntime(Runtime::kThrowReferenceError);
        __ bind(&done);
1334 1335
        context()->Plug(v0);
        break;
1336
      }
1337
      context()->Plug(var);
1338 1339
      break;
    }
1340

1341
    case VariableLocation::LOOKUP: {
1342
      Comment cmnt(masm_, "[ Lookup variable");
1343 1344 1345
      Label done, slow;
      // Generate code for loading from variables potentially shadowed
      // by eval-introduced variables.
1346
      EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1347
      __ bind(&slow);
1348
      __ Push(var->name());
1349
      Runtime::FunctionId function_id =
1350
          typeof_mode == NOT_INSIDE_TYPEOF
1351
              ? Runtime::kLoadLookupSlot
1352
              : Runtime::kLoadLookupSlotInsideTypeof;
1353
      __ CallRuntime(function_id);
1354 1355
      __ bind(&done);
      context()->Plug(v0);
1356
      break;
1357
    }
1358 1359 1360

    case VariableLocation::MODULE:
      UNREACHABLE();
1361
  }
1362 1363 1364
}


1365 1366
void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
  Expression* expression = (property == NULL) ? NULL : property->value();
1367 1368
  if (expression == NULL) {
    __ LoadRoot(a1, Heap::kNullValueRootIndex);
1369
    PushOperand(a1);
1370 1371
  } else {
    VisitForStackValue(expression);
1372 1373 1374 1375 1376 1377
    if (NeedsHomeObject(expression)) {
      DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
             property->kind() == ObjectLiteral::Property::SETTER);
      int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
      EmitSetHomeObject(expression, offset, property->GetSlot());
    }
1378 1379 1380 1381
  }
}


1382
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1383
  Comment cmnt(masm_, "[ ObjectLiteral");
1384

1385
  Handle<FixedArray> constant_properties = expr->constant_properties();
1386
  __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1387
  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1388
  __ li(a1, Operand(constant_properties));
1389 1390
  __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
  if (MustCreateObjectLiteralWithRuntime(expr)) {
1391
    __ Push(a3, a2, a1, a0);
1392
    __ CallRuntime(Runtime::kCreateObjectLiteral);
1393
  } else {
1394
    FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1395
    __ CallStub(&stub);
1396
    RestoreContext();
1397
  }
1398
  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1399 1400 1401 1402 1403

  // If result_saved is true the result is on top of the stack.  If
  // result_saved is false the result is in v0.
  bool result_saved = false;

1404
  AccessorTable accessor_table(zone());
1405 1406 1407 1408
  int property_index = 0;
  for (; property_index < expr->properties()->length(); property_index++) {
    ObjectLiteral::Property* property = expr->properties()->at(property_index);
    if (property->is_computed_name()) break;
1409 1410
    if (property->IsCompileTimeValue()) continue;

1411
    Literal* key = property->key()->AsLiteral();
1412 1413
    Expression* value = property->value();
    if (!result_saved) {
1414
      PushOperand(v0);  // Save result on stack.
1415 1416 1417 1418 1419 1420
      result_saved = true;
    }
    switch (property->kind()) {
      case ObjectLiteral::Property::CONSTANT:
        UNREACHABLE();
      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1421
        DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1422 1423
        // Fall through.
      case ObjectLiteral::Property::COMPUTED:
1424 1425
        // It is safe to use [[Put]] here because the boilerplate already
        // contains computed properties with an uninitialized value.
1426 1427
        if (key->IsStringLiteral()) {
          DCHECK(key->IsPropertyName());
1428 1429
          if (property->emit_store()) {
            VisitForAccumulatorValue(value);
1430 1431 1432 1433
            __ mov(StoreDescriptor::ValueRegister(), result_register());
            DCHECK(StoreDescriptor::ValueRegister().is(a0));
            __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
            __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1434 1435
            EmitLoadStoreICSlot(property->GetSlot(0));
            CallStoreIC();
1436
            PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1437 1438

            if (NeedsHomeObject(value)) {
1439
              EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1440
            }
1441 1442 1443 1444 1445 1446 1447
          } else {
            VisitForEffect(value);
          }
          break;
        }
        // Duplicate receiver on stack.
        __ lw(a0, MemOperand(sp));
1448
        PushOperand(a0);
1449 1450 1451
        VisitForStackValue(key);
        VisitForStackValue(value);
        if (property->emit_store()) {
1452 1453 1454
          if (NeedsHomeObject(value)) {
            EmitSetHomeObject(value, 2, property->GetSlot());
          }
1455
          __ li(a0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes.
1456 1457
          PushOperand(a0);
          CallRuntimeWithOperands(Runtime::kSetProperty);
1458
        } else {
1459
          DropOperands(3);
1460 1461
        }
        break;
1462 1463 1464
      case ObjectLiteral::Property::PROTOTYPE:
        // Duplicate receiver on stack.
        __ lw(a0, MemOperand(sp));
1465
        PushOperand(a0);
1466
        VisitForStackValue(value);
1467
        DCHECK(property->emit_store());
1468
        CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1469
        PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1470
                               BailoutState::NO_REGISTERS);
1471
        break;
1472
      case ObjectLiteral::Property::GETTER:
1473
        if (property->emit_store()) {
1474 1475 1476
          AccessorTable::Iterator it = accessor_table.lookup(key);
          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
          it->second->getter = property;
1477
        }
1478
        break;
1479
      case ObjectLiteral::Property::SETTER:
1480
        if (property->emit_store()) {
1481 1482 1483
          AccessorTable::Iterator it = accessor_table.lookup(key);
          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
          it->second->setter = property;
1484
        }
1485 1486 1487 1488
        break;
    }
  }

1489 1490 1491 1492 1493 1494
  // Emit code to define accessors, using only a single call to the runtime for
  // each pair of corresponding getters and setters.
  for (AccessorTable::Iterator it = accessor_table.begin();
       it != accessor_table.end();
       ++it) {
    __ lw(a0, MemOperand(sp));  // Duplicate receiver.
1495
    PushOperand(a0);
1496 1497 1498 1499
    VisitForStackValue(it->first);
    EmitAccessor(it->second->getter);
    EmitAccessor(it->second->setter);
    __ li(a0, Operand(Smi::FromInt(NONE)));
1500 1501
    PushOperand(a0);
    CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1502
    PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1503 1504
  }

1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518
  // Object literals have two parts. The "static" part on the left contains no
  // computed property names, and so we can compute its map ahead of time; see
  // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
  // starts with the first computed property name, and continues with all
  // properties to its right.  All the code from above initializes the static
  // component of the object literal, and arranges for the map of the result to
  // reflect the static order in which the keys appear. For the dynamic
  // properties, we compile them into a series of "SetOwnProperty" runtime
  // calls. This will preserve insertion order.
  for (; property_index < expr->properties()->length(); property_index++) {
    ObjectLiteral::Property* property = expr->properties()->at(property_index);

    Expression* value = property->value();
    if (!result_saved) {
1519
      PushOperand(v0);  // Save result on the stack
1520 1521 1522 1523
      result_saved = true;
    }

    __ lw(a0, MemOperand(sp));  // Duplicate receiver.
1524
    PushOperand(a0);
1525 1526 1527 1528

    if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
      DCHECK(!property->is_computed_name());
      VisitForStackValue(value);
1529
      DCHECK(property->emit_store());
1530
      CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1531
      PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1532
                             BailoutState::NO_REGISTERS);
1533
    } else {
1534
      EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1535
      VisitForStackValue(value);
1536 1537 1538
      if (NeedsHomeObject(value)) {
        EmitSetHomeObject(value, 2, property->GetSlot());
      }
1539 1540 1541 1542 1543 1544

      switch (property->kind()) {
        case ObjectLiteral::Property::CONSTANT:
        case ObjectLiteral::Property::MATERIALIZED_LITERAL:
        case ObjectLiteral::Property::COMPUTED:
          if (property->emit_store()) {
1545 1546 1547
            PushOperand(Smi::FromInt(NONE));
            PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
            CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1548 1549
            PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
                                   BailoutState::NO_REGISTERS);
1550
          } else {
1551
            DropOperands(3);
1552 1553 1554 1555 1556 1557 1558 1559
          }
          break;

        case ObjectLiteral::Property::PROTOTYPE:
          UNREACHABLE();
          break;

        case ObjectLiteral::Property::GETTER:
1560 1561
          PushOperand(Smi::FromInt(NONE));
          CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1562 1563 1564
          break;

        case ObjectLiteral::Property::SETTER:
1565 1566
          PushOperand(Smi::FromInt(NONE));
          CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1567 1568 1569 1570 1571
          break;
      }
    }
  }

1572 1573 1574 1575 1576
  if (result_saved) {
    context()->PlugTOS();
  } else {
    context()->Plug(v0);
  }
1577 1578 1579 1580
}


void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1581 1582
  Comment cmnt(masm_, "[ ArrayLiteral");

1583
  Handle<FixedArray> constant_elements = expr->constant_elements();
1584
  bool has_fast_elements =
1585
      IsFastObjectElementsKind(expr->constant_elements_kind());
1586

1587
  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1588
  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1589 1590 1591 1592 1593
    // If the only customer of allocation sites is transitioning, then
    // we can turn it off if we don't have anywhere else to transition to.
    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
  }

1594 1595 1596
  __ mov(a0, result_register());
  __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1597
  __ li(a1, Operand(constant_elements));
1598 1599
  if (MustCreateArrayLiteralWithRuntime(expr)) {
    __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1600
    __ Push(a3, a2, a1, a0);
1601
    __ CallRuntime(Runtime::kCreateArrayLiteral);
1602
  } else {
1603
    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1604 1605
    __ CallStub(&stub);
  }
1606
  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1607 1608

  bool result_saved = false;  // Is the result saved to the stack?
1609 1610
  ZoneList<Expression*>* subexprs = expr->values();
  int length = subexprs->length();
1611 1612 1613

  // Emit code to evaluate all the non-constant subexpressions and to store
  // them into the newly cloned array.
arv's avatar
arv committed
1614 1615 1616
  int array_index = 0;
  for (; array_index < length; array_index++) {
    Expression* subexpr = subexprs->at(array_index);
1617
    DCHECK(!subexpr->IsSpread());
arv's avatar
arv committed
1618

1619 1620
    // If the subexpression is a literal or a simple materialized literal it
    // is already set in the cloned array.
1621
    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1622 1623

    if (!result_saved) {
1624
      PushOperand(v0);  // array literal
1625 1626
      result_saved = true;
    }
1627

1628 1629
    VisitForAccumulatorValue(subexpr);

1630
    __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1631
    __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1632 1633 1634 1635 1636
    __ mov(StoreDescriptor::ValueRegister(), result_register());
    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
    Handle<Code> ic =
        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
    CallIC(ic);
1637

1638 1639
    PrepareForBailoutForId(expr->GetIdForElement(array_index),
                           BailoutState::NO_REGISTERS);
arv's avatar
arv committed
1640 1641 1642 1643 1644 1645 1646 1647
  }

  // In case the array literal contains spread expressions it has two parts. The
  // first part is  the "static" array which has a literal index is  handled
  // above. The second part is the part after the first spread expression
  // (inclusive) and these elements gets appended to the array. Note that the
  // number elements an iterable produces is unknown ahead of time.
  if (array_index < length && result_saved) {
1648
    PopOperand(v0);
arv's avatar
arv committed
1649 1650 1651 1652 1653
    result_saved = false;
  }
  for (; array_index < length; array_index++) {
    Expression* subexpr = subexprs->at(array_index);

1654
    PushOperand(v0);
1655 1656
    DCHECK(!subexpr->IsSpread());
    VisitForStackValue(subexpr);
1657
    CallRuntimeWithOperands(Runtime::kAppendElement);
arv's avatar
arv committed
1658

1659 1660
    PrepareForBailoutForId(expr->GetIdForElement(array_index),
                           BailoutState::NO_REGISTERS);
1661
  }
arv's avatar
arv committed
1662

1663 1664 1665 1666 1667
  if (result_saved) {
    context()->PlugTOS();
  } else {
    context()->Plug(v0);
  }
1668 1669 1670
}


1671
void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1672
  DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1673

1674 1675 1676
  Comment cmnt(masm_, "[ Assignment");

  Property* property = expr->target()->AsProperty();
1677
  LhsKind assign_type = Property::GetAssignType(property);
1678 1679 1680 1681 1682 1683 1684 1685

  // Evaluate LHS expression.
  switch (assign_type) {
    case VARIABLE:
      // Nothing to do here.
      break;
    case NAMED_PROPERTY:
      if (expr->is_compound()) {
1686 1687
        // We need the receiver both on the stack and in the register.
        VisitForStackValue(property->obj());
1688
        __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1689 1690 1691 1692
      } else {
        VisitForStackValue(property->obj());
      }
      break;
1693
    case NAMED_SUPER_PROPERTY:
1694 1695
      VisitForStackValue(
          property->obj()->AsSuperPropertyReference()->this_var());
1696
      VisitForAccumulatorValue(
1697
          property->obj()->AsSuperPropertyReference()->home_object());
1698
      PushOperand(result_register());
1699 1700 1701
      if (expr->is_compound()) {
        const Register scratch = a1;
        __ lw(scratch, MemOperand(sp, kPointerSize));
1702
        PushOperands(scratch, result_register());
1703 1704
      }
      break;
1705
    case KEYED_SUPER_PROPERTY: {
1706 1707
      VisitForStackValue(
          property->obj()->AsSuperPropertyReference()->this_var());
1708
      VisitForStackValue(
1709
          property->obj()->AsSuperPropertyReference()->home_object());
1710
      VisitForAccumulatorValue(property->key());
1711
      PushOperand(result_register());
1712 1713
      if (expr->is_compound()) {
        const Register scratch1 = t0;
1714
        const Register scratch2 = a1;
1715
        __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
1716 1717
        __ lw(scratch2, MemOperand(sp, 1 * kPointerSize));
        PushOperands(scratch1, scratch2, result_register());
1718 1719 1720
      }
      break;
    }
1721 1722 1723
    case KEYED_PROPERTY:
      // We need the key and receiver on both the stack and in v0 and a1.
      if (expr->is_compound()) {
1724
        VisitForStackValue(property->obj());
1725
        VisitForStackValue(property->key());
1726
        __ lw(LoadDescriptor::ReceiverRegister(),
1727
              MemOperand(sp, 1 * kPointerSize));
1728
        __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1729
      } else {
1730 1731
        VisitForStackValue(property->obj());
        VisitForStackValue(property->key());
1732 1733 1734 1735 1736 1737 1738 1739 1740 1741
      }
      break;
  }

  // For compound assignments we need another deoptimization point after the
  // variable/property load.
  if (expr->is_compound()) {
    { AccumulatorValueContext context(this);
      switch (assign_type) {
        case VARIABLE:
1742
          EmitVariableLoad(expr->target()->AsVariableProxy());
1743
          PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1744 1745 1746
          break;
        case NAMED_PROPERTY:
          EmitNamedPropertyLoad(property);
1747 1748
          PrepareForBailoutForId(property->LoadId(),
                                 BailoutState::TOS_REGISTER);
1749
          break;
1750 1751
        case NAMED_SUPER_PROPERTY:
          EmitNamedSuperPropertyLoad(property);
1752 1753
          PrepareForBailoutForId(property->LoadId(),
                                 BailoutState::TOS_REGISTER);
1754
          break;
1755 1756
        case KEYED_SUPER_PROPERTY:
          EmitKeyedSuperPropertyLoad(property);
1757 1758
          PrepareForBailoutForId(property->LoadId(),
                                 BailoutState::TOS_REGISTER);
1759
          break;
1760 1761
        case KEYED_PROPERTY:
          EmitKeyedPropertyLoad(property);
1762 1763
          PrepareForBailoutForId(property->LoadId(),
                                 BailoutState::TOS_REGISTER);
1764 1765 1766 1767 1768
          break;
      }
    }

    Token::Value op = expr->binary_op();
1769
    PushOperand(v0);  // Left operand goes on the stack.
1770 1771 1772 1773 1774 1775 1776 1777 1778
    VisitForAccumulatorValue(expr->value());

    AccumulatorValueContext context(this);
    if (ShouldInlineSmiCase(op)) {
      EmitInlineSmiBinaryOp(expr->binary_operation(),
                            op,
                            expr->target(),
                            expr->value());
    } else {
1779
      EmitBinaryOp(expr->binary_operation(), op);
1780 1781 1782
    }

    // Deoptimization point in case the binary operation may have side effects.
1783
    PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1784 1785 1786 1787
  } else {
    VisitForAccumulatorValue(expr->value());
  }

1788
  SetExpressionPosition(expr);
1789 1790 1791 1792 1793

  // Store the value.
  switch (assign_type) {
    case VARIABLE:
      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1794
                             expr->op(), expr->AssignmentSlot());
1795
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1796 1797 1798 1799 1800
      context()->Plug(v0);
      break;
    case NAMED_PROPERTY:
      EmitNamedPropertyAssignment(expr);
      break;
1801
    case NAMED_SUPER_PROPERTY:
1802 1803
      EmitNamedSuperPropertyStore(property);
      context()->Plug(v0);
1804
      break;
1805 1806 1807 1808
    case KEYED_SUPER_PROPERTY:
      EmitKeyedSuperPropertyStore(property);
      context()->Plug(v0);
      break;
1809 1810 1811 1812
    case KEYED_PROPERTY:
      EmitKeyedPropertyAssignment(expr);
      break;
  }
1813 1814 1815
}


1816 1817
void FullCodeGenerator::VisitYield(Yield* expr) {
  Comment cmnt(masm_, "[ Yield");
1818 1819
  SetExpressionPosition(expr);

1820 1821 1822 1823
  // Evaluate yielded value first; the initial iterator definition depends on
  // this.  It stays on the stack while we update the iterator.
  VisitForStackValue(expr->expression());

1824
  Label suspend, continuation, post_runtime, resume, exception;
1825 1826 1827

  __ jmp(&suspend);
  __ bind(&continuation);
1828
  // When we arrive here, v0 holds the generator object.
1829
  __ RecordGeneratorContinuation();
1830
  __ lw(a1, FieldMemOperand(v0, JSGeneratorObject::kResumeModeOffset));
1831
  __ lw(v0, FieldMemOperand(v0, JSGeneratorObject::kInputOrDebugPosOffset));
1832 1833 1834 1835
  __ Branch(&resume, eq, a1, Operand(Smi::FromInt(JSGeneratorObject::kNext)));
  __ Push(result_register());
  __ Branch(&exception, eq, a1,
            Operand(Smi::FromInt(JSGeneratorObject::kThrow)));
1836 1837 1838
  EmitCreateIteratorResult(true);
  EmitUnwindAndReturn();

1839
  __ bind(&exception);
1840 1841
  __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
                                              : Runtime::kThrow);
1842

1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856
  __ bind(&suspend);
  OperandStackDepthIncrement(1);  // Not popped on this path.
  VisitForAccumulatorValue(expr->generator_object());
  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
  __ li(a1, Operand(Smi::FromInt(continuation.pos())));
  __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
  __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
  __ mov(a1, cp);
  __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
                      kRAHasBeenSaved, kDontSaveFPRegs);
  __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
  __ Branch(&post_runtime, eq, sp, Operand(a1));
  __ push(v0);  // generator object
  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1857
  RestoreContext();
1858 1859 1860
  __ bind(&post_runtime);
  PopOperand(result_register());
  EmitReturnSequence();
1861

1862 1863
  __ bind(&resume);
  context()->Plug(result_register());
1864 1865
}

1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895
void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
  OperandStackDepthIncrement(2);
  __ Push(reg1, reg2);
}

void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
                                     Register reg3) {
  OperandStackDepthIncrement(3);
  __ Push(reg1, reg2, reg3);
}

void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
                                     Register reg3, Register reg4) {
  OperandStackDepthIncrement(4);
  __ Push(reg1, reg2, reg3, reg4);
}

void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
  OperandStackDepthDecrement(2);
  __ Pop(reg1, reg2);
}

void FullCodeGenerator::EmitOperandStackDepthCheck() {
  if (FLAG_debug_code) {
    int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
                        operand_stack_depth_ * kPointerSize;
    __ Subu(v0, fp, sp);
    __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
  }
}
1896

1897
void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1898
  Label allocate, done_allocate;
1899

1900 1901
  __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate,
              NO_ALLOCATION_FLAGS);
1902
  __ jmp(&done_allocate);
1903

1904 1905
  __ bind(&allocate);
  __ Push(Smi::FromInt(JSIteratorResult::kSize));
1906
  __ CallRuntime(Runtime::kAllocateInNewSpace);
1907

1908
  __ bind(&done_allocate);
1909
  __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
1910
  PopOperand(a2);
1911 1912 1913
  __ LoadRoot(a3,
              done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
  __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
1914 1915 1916
  __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
  __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
  __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
1917 1918 1919
  __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
  __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1920 1921 1922
}


1923
void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1924
                                              Token::Value op,
1925 1926 1927 1928 1929 1930 1931 1932 1933 1934
                                              Expression* left_expr,
                                              Expression* right_expr) {
  Label done, smi_case, stub_call;

  Register scratch1 = a2;
  Register scratch2 = a3;

  // Get the arguments.
  Register left = a1;
  Register right = a0;
1935
  PopOperand(left);
1936 1937 1938 1939 1940 1941 1942 1943 1944
  __ mov(a0, result_register());

  // Perform combined smi check on both operands.
  __ Or(scratch1, left, Operand(right));
  STATIC_ASSERT(kSmiTag == 0);
  JumpPatchSite patch_site(masm_);
  patch_site.EmitJumpIfSmi(scratch1, &smi_case);

  __ bind(&stub_call);
1945
  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1946
  CallIC(code, expr->BinaryOperationFeedbackId());
1947
  patch_site.EmitPatchInfo();
1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977
  __ jmp(&done);

  __ bind(&smi_case);
  // Smi case. This code works the same way as the smi-smi case in the type
  // recording binary operation stub, see
  switch (op) {
    case Token::SAR:
      __ GetLeastBitsFromSmi(scratch1, right, 5);
      __ srav(right, left, scratch1);
      __ And(v0, right, Operand(~kSmiTagMask));
      break;
    case Token::SHL: {
      __ SmiUntag(scratch1, left);
      __ GetLeastBitsFromSmi(scratch2, right, 5);
      __ sllv(scratch1, scratch1, scratch2);
      __ Addu(scratch2, scratch1, Operand(0x40000000));
      __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
      __ SmiTag(v0, scratch1);
      break;
    }
    case Token::SHR: {
      __ SmiUntag(scratch1, left);
      __ GetLeastBitsFromSmi(scratch2, right, 5);
      __ srlv(scratch1, scratch1, scratch2);
      __ And(scratch2, scratch1, 0xc0000000);
      __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
      __ SmiTag(v0, scratch1);
      break;
    }
    case Token::ADD:
1978
      __ AddBranchOvf(v0, left, Operand(right), &stub_call);
1979 1980
      break;
    case Token::SUB:
1981
      __ SubBranchOvf(v0, left, Operand(right), &stub_call);
1982 1983 1984
      break;
    case Token::MUL: {
      __ SmiUntag(scratch1, right);
1985 1986
      __ Mul(scratch2, v0, left, scratch1);
      __ sra(scratch1, v0, 31);
1987 1988 1989 1990
      __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
      __ Branch(&done, ne, v0, Operand(zero_reg));
      __ Addu(scratch2, right, left);
      __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1991
      DCHECK(Smi::FromInt(0) == 0);
1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009
      __ mov(v0, zero_reg);
      break;
    }
    case Token::BIT_OR:
      __ Or(v0, left, Operand(right));
      break;
    case Token::BIT_AND:
      __ And(v0, left, Operand(right));
      break;
    case Token::BIT_XOR:
      __ Xor(v0, left, Operand(right));
      break;
    default:
      UNREACHABLE();
  }

  __ bind(&done);
  context()->Plug(v0);
2010 2011 2012
}


2013
void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2014 2015 2016 2017
  for (int i = 0; i < lit->properties()->length(); i++) {
    ObjectLiteral::Property* property = lit->properties()->at(i);
    Expression* value = property->value();

2018
    Register scratch = a1;
2019 2020 2021 2022 2023
    if (property->is_static()) {
      __ lw(scratch, MemOperand(sp, kPointerSize));  // constructor
    } else {
      __ lw(scratch, MemOperand(sp, 0));  // prototype
    }
2024
    PushOperand(scratch);
2025
    EmitPropertyKey(property, lit->GetIdForProperty(i));
2026 2027 2028 2029 2030 2031

    // The static prototype property is read only. We handle the non computed
    // property name case in the parser. Since this is the only case where we
    // need to check for an own read only property we special case this so we do
    // not need to do this for every property.
    if (property->is_static() && property->is_computed_name()) {
2032
      __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2033 2034 2035
      __ push(v0);
    }

2036
    VisitForStackValue(value);
2037 2038 2039
    if (NeedsHomeObject(value)) {
      EmitSetHomeObject(value, 2, property->GetSlot());
    }
2040 2041 2042 2043 2044

    switch (property->kind()) {
      case ObjectLiteral::Property::CONSTANT:
      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
      case ObjectLiteral::Property::PROTOTYPE:
2045 2046
        UNREACHABLE();
      case ObjectLiteral::Property::COMPUTED:
2047 2048 2049
        PushOperand(Smi::FromInt(DONT_ENUM));
        PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
        CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
2050 2051 2052
        break;

      case ObjectLiteral::Property::GETTER:
2053 2054
        PushOperand(Smi::FromInt(DONT_ENUM));
        CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
2055 2056 2057
        break;

      case ObjectLiteral::Property::SETTER:
2058 2059
        PushOperand(Smi::FromInt(DONT_ENUM));
        CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
2060 2061 2062 2063 2064 2065 2066 2067 2068
        break;

      default:
        UNREACHABLE();
    }
  }
}


2069
void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2070
  __ mov(a0, result_register());
2071
  PopOperand(a1);
2072
  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2073
  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2074
  CallIC(code, expr->BinaryOperationFeedbackId());
2075
  patch_site.EmitPatchInfo();
2076
  context()->Plug(v0);
2077 2078 2079
}


2080
void FullCodeGenerator::EmitAssignment(Expression* expr,
2081
                                       FeedbackVectorSlot slot) {
2082
  DCHECK(expr->IsValidReferenceExpressionOrThis());
2083 2084

  Property* prop = expr->AsProperty();
2085
  LhsKind assign_type = Property::GetAssignType(prop);
2086 2087 2088 2089 2090

  switch (assign_type) {
    case VARIABLE: {
      Variable* var = expr->AsVariableProxy()->var();
      EffectContext context(this);
2091
      EmitVariableAssignment(var, Token::ASSIGN, slot);
2092 2093 2094
      break;
    }
    case NAMED_PROPERTY: {
2095
      PushOperand(result_register());  // Preserve value.
2096
      VisitForAccumulatorValue(prop->obj());
2097
      __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2098
      PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
2099
      __ li(StoreDescriptor::NameRegister(),
2100
            Operand(prop->key()->AsLiteral()->value()));
2101
      EmitLoadStoreICSlot(slot);
2102
      CallStoreIC();
2103 2104
      break;
    }
2105
    case NAMED_SUPER_PROPERTY: {
2106
      PushOperand(v0);
2107
      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2108
      VisitForAccumulatorValue(
2109
          prop->obj()->AsSuperPropertyReference()->home_object());
2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122
      // stack: value, this; v0: home_object
      Register scratch = a2;
      Register scratch2 = a3;
      __ mov(scratch, result_register());             // home_object
      __ lw(v0, MemOperand(sp, kPointerSize));        // value
      __ lw(scratch2, MemOperand(sp, 0));             // this
      __ sw(scratch2, MemOperand(sp, kPointerSize));  // this
      __ sw(scratch, MemOperand(sp, 0));              // home_object
      // stack: this, home_object; v0: value
      EmitNamedSuperPropertyStore(prop);
      break;
    }
    case KEYED_SUPER_PROPERTY: {
2123
      PushOperand(v0);
2124 2125
      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
      VisitForStackValue(
2126
          prop->obj()->AsSuperPropertyReference()->home_object());
2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141
      VisitForAccumulatorValue(prop->key());
      Register scratch = a2;
      Register scratch2 = a3;
      __ lw(scratch2, MemOperand(sp, 2 * kPointerSize));  // value
      // stack: value, this, home_object; v0: key, a3: value
      __ lw(scratch, MemOperand(sp, kPointerSize));  // this
      __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
      __ lw(scratch, MemOperand(sp, 0));  // home_object
      __ sw(scratch, MemOperand(sp, kPointerSize));
      __ sw(v0, MemOperand(sp, 0));
      __ Move(v0, scratch2);
      // stack: this, home_object, key; v0: value.
      EmitKeyedSuperPropertyStore(prop);
      break;
    }
2142
    case KEYED_PROPERTY: {
2143
      PushOperand(result_register());  // Preserve value.
2144 2145
      VisitForStackValue(prop->obj());
      VisitForAccumulatorValue(prop->key());
2146
      __ mov(StoreDescriptor::NameRegister(), result_register());
2147 2148
      PopOperands(StoreDescriptor::ValueRegister(),
                  StoreDescriptor::ReceiverRegister());
2149
      EmitLoadStoreICSlot(slot);
2150
      Handle<Code> ic =
2151
          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2152
      CallIC(ic);
2153 2154 2155 2156
      break;
    }
  }
  context()->Plug(v0);
2157 2158 2159
}


2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172
void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
    Variable* var, MemOperand location) {
  __ sw(result_register(), location);
  if (var->IsContextSlot()) {
    // RecordWrite may destroy all its register arguments.
    __ Move(a3, result_register());
    int offset = Context::SlotOffset(var->index());
    __ RecordWriteContextSlot(
        a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
  }
}


2173
void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2174
                                               FeedbackVectorSlot slot) {
2175
  if (var->IsUnallocated()) {
2176
    // Global var, const, or let.
2177 2178
    __ mov(StoreDescriptor::ValueRegister(), result_register());
    __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2179
    __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2180
    EmitLoadStoreICSlot(slot);
2181
    CallStoreIC();
2182

2183
  } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2184 2185
    DCHECK(!var->IsLookupSlot());
    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2186
    MemOperand location = VarOperand(var, a1);
2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202
    // Perform an initialization check for lexically declared variables.
    if (var->binding_needs_init()) {
      Label assign;
      __ lw(a3, location);
      __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
      __ Branch(&assign, ne, a3, Operand(t0));
      __ li(a3, Operand(var->name()));
      __ push(a3);
      __ CallRuntime(Runtime::kThrowReferenceError);
      __ bind(&assign);
    }
    if (var->mode() == CONST) {
      __ CallRuntime(Runtime::kThrowConstAssignError);
    } else {
      EmitStoreToStackLocalOrContextSlot(var, location);
    }
2203
  } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2204 2205 2206 2207 2208 2209 2210 2211 2212
    // Initializing assignment to const {this} needs a write barrier.
    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
    Label uninitialized_this;
    MemOperand location = VarOperand(var, a1);
    __ lw(a3, location);
    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    __ Branch(&uninitialized_this, eq, a3, Operand(at));
    __ li(a0, Operand(var->name()));
    __ Push(a0);
2213
    __ CallRuntime(Runtime::kThrowReferenceError);
2214 2215 2216
    __ bind(&uninitialized_this);
    EmitStoreToStackLocalOrContextSlot(var, location);

2217
  } else if (!var->is_const_mode() || op == Token::INIT) {
2218
    if (var->IsLookupSlot()) {
2219
      // Assignment to var.
2220 2221 2222 2223 2224
      __ Push(var->name());
      __ Push(v0);
      __ CallRuntime(is_strict(language_mode())
                         ? Runtime::kStoreLookupSlot_Strict
                         : Runtime::kStoreLookupSlot_Sloppy);
2225
    } else {
2226 2227
      // Assignment to var or initializing assignment to let/const in harmony
      // mode.
2228
      DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2229
      MemOperand location = VarOperand(var, a1);
2230
      if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2231 2232 2233
        // Check for an uninitialized let binding.
        __ lw(a2, location);
        __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2234
        __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2235
      }
2236
      EmitStoreToStackLocalOrContextSlot(var, location);
2237
    }
2238 2239

  } else {
2240
    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2241
    if (is_strict(language_mode())) {
2242
      __ CallRuntime(Runtime::kThrowConstAssignError);
2243 2244
    }
    // Silently ignore store in sloppy mode.
2245
  }
2246 2247 2248 2249
}


void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2250 2251
  // Assignment to a property, using a named store IC.
  Property* prop = expr->target()->AsProperty();
2252 2253
  DCHECK(prop != NULL);
  DCHECK(prop->key()->IsLiteral());
2254

2255 2256
  __ mov(StoreDescriptor::ValueRegister(), result_register());
  __ li(StoreDescriptor::NameRegister(),
2257
        Operand(prop->key()->AsLiteral()->value()));
2258
  PopOperand(StoreDescriptor::ReceiverRegister());
2259 2260
  EmitLoadStoreICSlot(expr->AssignmentSlot());
  CallStoreIC();
2261

2262
  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2263
  context()->Plug(v0);
2264 2265 2266
}


2267
void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2268 2269 2270 2271 2272 2273 2274
  // Assignment to named property of super.
  // v0 : value
  // stack : receiver ('this'), home_object
  DCHECK(prop != NULL);
  Literal* key = prop->key()->AsLiteral();
  DCHECK(key != NULL);

2275 2276 2277 2278 2279
  PushOperand(key->value());
  PushOperand(v0);
  CallRuntimeWithOperands(is_strict(language_mode())
                              ? Runtime::kStoreToSuper_Strict
                              : Runtime::kStoreToSuper_Sloppy);
2280 2281 2282
}


2283 2284 2285 2286 2287 2288
void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
  // Assignment to named property of super.
  // v0 : value
  // stack : receiver ('this'), home_object, key
  DCHECK(prop != NULL);

2289 2290 2291 2292
  PushOperand(v0);
  CallRuntimeWithOperands(is_strict(language_mode())
                              ? Runtime::kStoreKeyedToSuper_Strict
                              : Runtime::kStoreKeyedToSuper_Sloppy);
2293 2294 2295
}


2296
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2297 2298 2299 2300 2301 2302
  // Assignment to a property, using a keyed store IC.
  // Call keyed store IC.
  // The arguments are:
  // - a0 is the value,
  // - a1 is the key,
  // - a2 is the receiver.
2303
  __ mov(StoreDescriptor::ValueRegister(), result_register());
2304 2305
  PopOperands(StoreDescriptor::ReceiverRegister(),
              StoreDescriptor::NameRegister());
2306
  DCHECK(StoreDescriptor::ValueRegister().is(a0));
2307

2308 2309
  Handle<Code> ic =
      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2310 2311
  EmitLoadStoreICSlot(expr->AssignmentSlot());
  CallIC(ic);
2312

2313
  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2314
  context()->Plug(v0);
2315 2316 2317
}


2318
void FullCodeGenerator::CallIC(Handle<Code> code,
2319
                               TypeFeedbackId id) {
2320
  ic_total_count_++;
2321
  __ Call(code, RelocInfo::CODE_TARGET, id);
2322 2323 2324
}


2325
// Code common for calls using the IC.
2326
void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2327 2328 2329
  Expression* callee = expr->expression();

  // Get the target function.
2330
  ConvertReceiverMode convert_mode;
2331
  if (callee->IsVariableProxy()) {
2332 2333
    { StackValueContext context(this);
      EmitVariableLoad(callee->AsVariableProxy());
2334
      PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2335 2336
    }
    // Push undefined as receiver. This is patched in the method prologue if it
2337
    // is a sloppy mode method.
2338
    __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2339
    PushOperand(at);
2340
    convert_mode = ConvertReceiverMode::kNullOrUndefined;
2341 2342
  } else {
    // Load the function from the receiver.
2343
    DCHECK(callee->IsProperty());
2344
    DCHECK(!callee->AsProperty()->IsSuperAccess());
2345
    __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2346
    EmitNamedPropertyLoad(callee->AsProperty());
2347 2348
    PrepareForBailoutForId(callee->AsProperty()->LoadId(),
                           BailoutState::TOS_REGISTER);
2349 2350
    // Push the target function under the receiver.
    __ lw(at, MemOperand(sp, 0));
2351
    PushOperand(at);
2352
    __ sw(v0, MemOperand(sp, kPointerSize));
2353
    convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2354 2355
  }

2356
  EmitCall(expr, convert_mode);
2357 2358 2359
}


2360
void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2361
  SetExpressionPosition(expr);
2362 2363 2364 2365 2366 2367 2368 2369 2370
  Expression* callee = expr->expression();
  DCHECK(callee->IsProperty());
  Property* prop = callee->AsProperty();
  DCHECK(prop->IsSuperAccess());

  Literal* key = prop->key()->AsLiteral();
  DCHECK(!key->value()->IsSmi());
  // Load the function from the receiver.
  const Register scratch = a1;
2371
  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2372
  VisitForAccumulatorValue(super_ref->home_object());
2373
  __ mov(scratch, v0);
2374
  VisitForAccumulatorValue(super_ref->this_var());
2375 2376
  PushOperands(scratch, v0, v0, scratch);
  PushOperand(key->value());
2377 2378 2379 2380

  // Stack here:
  //  - home_object
  //  - this (receiver)
2381 2382
  //  - this (receiver) <-- LoadFromSuper will pop here and below.
  //  - home_object
2383
  //  - key
2384
  CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2385
  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2386 2387 2388 2389 2390 2391 2392

  // Replace home_object with target function.
  __ sw(v0, MemOperand(sp, kPointerSize));

  // Stack here:
  // - target function
  // - this (receiver)
2393
  EmitCall(expr);
2394 2395 2396
}


2397
// Code common for calls using the IC.
2398 2399
void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
                                                Expression* key) {
2400 2401 2402
  // Load the key.
  VisitForAccumulatorValue(key);

2403 2404 2405
  Expression* callee = expr->expression();

  // Load the function from the receiver.
2406
  DCHECK(callee->IsProperty());
2407 2408
  __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
  __ Move(LoadDescriptor::NameRegister(), v0);
2409
  EmitKeyedPropertyLoad(callee->AsProperty());
2410 2411
  PrepareForBailoutForId(callee->AsProperty()->LoadId(),
                         BailoutState::TOS_REGISTER);
2412 2413 2414

  // Push the target function under the receiver.
  __ lw(at, MemOperand(sp, 0));
2415
  PushOperand(at);
2416 2417
  __ sw(v0, MemOperand(sp, kPointerSize));

2418
  EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2419 2420 2421
}


2422 2423 2424 2425 2426 2427
void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
  Expression* callee = expr->expression();
  DCHECK(callee->IsProperty());
  Property* prop = callee->AsProperty();
  DCHECK(prop->IsSuperAccess());

2428
  SetExpressionPosition(prop);
2429 2430
  // Load the function from the receiver.
  const Register scratch = a1;
2431
  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2432
  VisitForAccumulatorValue(super_ref->home_object());
2433 2434
  __ Move(scratch, v0);
  VisitForAccumulatorValue(super_ref->this_var());
2435
  PushOperands(scratch, v0, v0, scratch);
2436 2437 2438 2439 2440 2441 2442 2443
  VisitForStackValue(prop->key());

  // Stack here:
  //  - home_object
  //  - this (receiver)
  //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
  //  - home_object
  //  - key
2444
  CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2445
  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2446 2447 2448 2449 2450 2451 2452

  // Replace home_object with target function.
  __ sw(v0, MemOperand(sp, kPointerSize));

  // Stack here:
  // - target function
  // - this (receiver)
2453
  EmitCall(expr);
2454 2455 2456
}


2457
void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2458
  // Load the arguments.
2459 2460
  ZoneList<Expression*>* args = expr->arguments();
  int arg_count = args->length();
2461 2462
  for (int i = 0; i < arg_count; i++) {
    VisitForStackValue(args->at(i));
2463
  }
2464

2465
  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2466
  // Record source position of the IC call.
2467
  SetCallPosition(expr, expr->tail_call_mode());
2468 2469 2470 2471 2472 2473 2474 2475
  if (expr->tail_call_mode() == TailCallMode::kAllow) {
    if (FLAG_trace) {
      __ CallRuntime(Runtime::kTraceTailCall);
    }
    // Update profiling counters before the tail call since we will
    // not return to this function.
    EmitProfilingCounterHandlingForReturnSequence(true);
  }
ishell's avatar
ishell committed
2476 2477 2478
  Handle<Code> ic =
      CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
          .code();
2479
  __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2480
  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2481 2482 2483
  // Don't assign a type feedback id to the IC, since type feedback is provided
  // by the vector above.
  CallIC(ic);
2484
  OperandStackDepthDecrement(arg_count + 1);
2485

2486
  RecordJSReturnSite(expr);
2487
  RestoreContext();
2488 2489 2490
  context()->DropAndPlug(1, v0);
}

2491 2492 2493
void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
  int arg_count = expr->arguments()->length();
  // t4: copy of the first argument or undefined if it doesn't exist.
2494
  if (arg_count > 0) {
2495
    __ lw(t4, MemOperand(sp, arg_count * kPointerSize));
2496
  } else {
2497
    __ LoadRoot(t4, Heap::kUndefinedValueRootIndex);
2498 2499
  }

2500 2501 2502 2503 2504
  // t3: the receiver of the enclosing function.
  __ lw(t3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));

  // t2: the language mode.
  __ li(t2, Operand(Smi::FromInt(language_mode())));
2505

2506 2507
  // t1: the start position of the scope the calls resides in.
  __ li(t1, Operand(Smi::FromInt(scope()->start_position())));
2508

2509 2510
  // t0: the source position of the eval call.
  __ li(t0, Operand(Smi::FromInt(expr->position())));
2511

2512
  // Do the runtime call.
2513
  __ Push(t4, t3, t2, t1, t0);
2514
  __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2515 2516 2517
}


2518 2519 2520 2521 2522
// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
  VariableProxy* callee = expr->expression()->AsVariableProxy();
  if (callee->var()->IsLookupSlot()) {
    Label slow, done;
2523 2524 2525 2526 2527

    SetExpressionPosition(callee);
    // Generate code for loading from variables potentially shadowed by
    // eval-introduced variables.
    EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2528 2529 2530 2531

    __ bind(&slow);
    // Call the runtime to find the function to call (returned in v0)
    // and the object holding it (returned in v1).
2532 2533
    __ Push(callee->name());
    __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2534
    PushOperands(v0, v1);  // Function, receiver.
2535
    PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2536 2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551 2552 2553 2554 2555

    // If fast case code has been generated, emit code to push the
    // function and receiver and have the slow path jump around this
    // code.
    if (done.is_linked()) {
      Label call;
      __ Branch(&call);
      __ bind(&done);
      // Push function.
      __ push(v0);
      // The receiver is implicitly the global receiver. Indicate this
      // by passing the hole to the call function stub.
      __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
      __ push(a1);
      __ bind(&call);
    }
  } else {
    VisitForStackValue(callee);
    // refEnv.WithBaseObject()
    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2556
    PushOperand(a2);  // Reserved receiver slot.
2557 2558 2559 2560
  }
}


2561
void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2562
  // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2563 2564 2565 2566 2567
  // to resolve the function we need to call.  Then we call the resolved
  // function using the given arguments.
  ZoneList<Expression*>* args = expr->arguments();
  int arg_count = args->length();
  PushCalleeAndWithBaseObject(expr);
2568

2569 2570 2571 2572
  // Push the arguments.
  for (int i = 0; i < arg_count; i++) {
    VisitForStackValue(args->at(i));
  }
2573

2574 2575 2576 2577
  // Push a copy of the function (found below the arguments) and
  // resolve eval.
  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
  __ push(a1);
2578
  EmitResolvePossiblyDirectEval(expr);
2579

2580 2581
  // Touch up the stack with the resolved function.
  __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2582

2583
  PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2584
  // Record source position for debugger.
2585
  SetCallPosition(expr);
2586
  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2587
  __ li(a0, Operand(arg_count));
2588 2589 2590
  __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
                                      expr->tail_call_mode()),
          RelocInfo::CODE_TARGET);
2591
  OperandStackDepthDecrement(arg_count + 1);
2592
  RecordJSReturnSite(expr);
2593
  RestoreContext();
2594
  context()->DropAndPlug(1, v0);
2595 2596 2597 2598
}


void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2599 2600 2601 2602 2603 2604 2605
  Comment cmnt(masm_, "[ CallNew");
  // According to ECMA-262, section 11.2.2, page 44, the function
  // expression in new calls must be evaluated before the
  // arguments.

  // Push constructor on the stack.  If it's not a function it's used as
  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2606
  // ignored.g
2607
  DCHECK(!expr->expression()->IsSuperPropertyReference());
2608
  VisitForStackValue(expr->expression());
2609 2610 2611 2612 2613 2614 2615 2616 2617 2618

  // Push the arguments ("left-to-right") on the stack.
  ZoneList<Expression*>* args = expr->arguments();
  int arg_count = args->length();
  for (int i = 0; i < arg_count; i++) {
    VisitForStackValue(args->at(i));
  }

  // Call the construct call builtin that handles allocation and
  // constructor invocation.
2619
  SetConstructCallPosition(expr);
2620 2621 2622 2623 2624

  // Load function and argument count into a1 and a0.
  __ li(a0, Operand(arg_count));
  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));

2625
  // Record call targets in unoptimized code.
2626
  __ EmitLoadTypeFeedbackVector(a2);
2627
  __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2628

2629 2630
  CallConstructStub stub(isolate());
  __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2631
  OperandStackDepthDecrement(arg_count + 1);
2632
  PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2633
  RestoreContext();
2634
  context()->Plug(v0);
2635 2636 2637
}


2638
void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2639 2640 2641 2642
  SuperCallReference* super_call_ref =
      expr->expression()->AsSuperCallReference();
  DCHECK_NOT_NULL(super_call_ref);

2643 2644 2645 2646 2647 2648 2649 2650
  // Push the super constructor target on the stack (may be null,
  // but the Construct builtin can deal with that properly).
  VisitForAccumulatorValue(super_call_ref->this_function_var());
  __ AssertFunction(result_register());
  __ lw(result_register(),
        FieldMemOperand(result_register(), HeapObject::kMapOffset));
  __ lw(result_register(),
        FieldMemOperand(result_register(), Map::kPrototypeOffset));
2651
  PushOperand(result_register());
2652 2653 2654 2655 2656 2657 2658 2659 2660 2661

  // Push the arguments ("left-to-right") on the stack.
  ZoneList<Expression*>* args = expr->arguments();
  int arg_count = args->length();
  for (int i = 0; i < arg_count; i++) {
    VisitForStackValue(args->at(i));
  }

  // Call the construct call builtin that handles allocation and
  // constructor invocation.
2662
  SetConstructCallPosition(expr);
2663

2664
  // Load new target into a3.
2665
  VisitForAccumulatorValue(super_call_ref->new_target_var());
2666
  __ mov(a3, result_register());
2667

2668 2669 2670 2671
  // Load function and argument count into a1 and a0.
  __ li(a0, Operand(arg_count));
  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));

2672
  __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2673
  OperandStackDepthDecrement(arg_count + 1);
2674 2675

  RecordJSReturnSite(expr);
2676
  RestoreContext();
2677 2678 2679 2680
  context()->Plug(v0);
}


2681 2682
void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2683
  DCHECK(args->length() == 1);
2684 2685 2686 2687 2688 2689 2690 2691 2692 2693

  VisitForAccumulatorValue(args->at(0));

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

2694
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2695
  __ SmiTst(v0, t0);
2696 2697 2698
  Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);

  context()->Plug(if_true, if_false);
2699 2700 2701
}


2702
void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2703
  ZoneList<Expression*>* args = expr->arguments();
2704
  DCHECK(args->length() == 1);
2705 2706 2707 2708 2709 2710 2711 2712 2713 2714 2715 2716

  VisitForAccumulatorValue(args->at(0));

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

  __ JumpIfSmi(v0, if_false);
  __ GetObjectType(v0, a1, a1);
2717
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2718
  Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2719
        if_true, if_false, fall_through);
2720 2721 2722 2723 2724

  context()->Plug(if_true, if_false);
}


2725 2726
void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2727
  DCHECK(args->length() == 1);
2728 2729 2730 2731 2732 2733 2734 2735 2736 2737 2738 2739

  VisitForAccumulatorValue(args->at(0));

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

  __ JumpIfSmi(v0, if_false);
  __ GetObjectType(v0, a1, a1);
2740
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2741 2742 2743 2744
  Split(eq, a1, Operand(JS_ARRAY_TYPE),
        if_true, if_false, fall_through);

  context()->Plug(if_true, if_false);
2745 2746 2747
}


2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768 2769
void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK(args->length() == 1);

  VisitForAccumulatorValue(args->at(0));

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
                         &if_false, &fall_through);

  __ JumpIfSmi(v0, if_false);
  __ GetObjectType(v0, a1, a1);
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
  Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);

  context()->Plug(if_true, if_false);
}


2770 2771
void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2772
  DCHECK(args->length() == 1);
2773 2774 2775 2776 2777 2778 2779 2780 2781 2782 2783 2784

  VisitForAccumulatorValue(args->at(0));

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

  __ JumpIfSmi(v0, if_false);
  __ GetObjectType(v0, a1, a1);
2785
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2786 2787 2788
  Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);

  context()->Plug(if_true, if_false);
2789 2790 2791
}


2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805
void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK(args->length() == 1);

  VisitForAccumulatorValue(args->at(0));

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
                         &if_false, &fall_through);

  __ JumpIfSmi(v0, if_false);
2806
  __ GetObjectType(v0, a1, a1);
2807
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2808
  Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
2809 2810

  context()->Plug(if_true, if_false);
2811 2812 2813
}


2814 2815
void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2816
  DCHECK(args->length() == 1);
2817 2818 2819 2820
  Label done, null, function, non_function_constructor;

  VisitForAccumulatorValue(args->at(0));

2821
  // If the object is not a JSReceiver, we return null.
2822
  __ JumpIfSmi(v0, &null);
2823
  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2824
  __ GetObjectType(v0, v0, a1);  // Map is now in v0.
2825
  __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
2826

2827 2828 2829
  // Return 'Function' for JSFunction and JSBoundFunction objects.
  STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
  __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
2830

2831
  // Check if the constructor in the map is a JS function.
2832 2833 2834 2835
  Register instance_type = a2;
  __ GetMapConstructor(v0, v0, a1, instance_type);
  __ Branch(&non_function_constructor, ne, instance_type,
            Operand(JS_FUNCTION_TYPE));
2836 2837 2838 2839 2840 2841 2842 2843 2844

  // v0 now contains the constructor function. Grab the
  // instance class name from there.
  __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
  __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
  __ Branch(&done);

  // Functions have class 'Function'.
  __ bind(&function);
2845
  __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
2846 2847 2848 2849
  __ jmp(&done);

  // Objects with a non-function constructor have class 'Object'.
  __ bind(&non_function_constructor);
2850
  __ LoadRoot(v0, Heap::kObject_stringRootIndex);
2851 2852 2853 2854 2855 2856 2857 2858 2859 2860
  __ jmp(&done);

  // Non-JS objects have class null.
  __ bind(&null);
  __ LoadRoot(v0, Heap::kNullValueRootIndex);

  // All done.
  __ bind(&done);

  context()->Plug(v0);
2861 2862 2863
}


2864 2865
void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2866
  DCHECK(args->length() == 1);
2867 2868 2869 2870 2871 2872 2873 2874 2875 2876 2877 2878 2879

  VisitForAccumulatorValue(args->at(0));

  Label done;
  StringCharFromCodeGenerator generator(v0, a1);
  generator.GenerateFast(masm_);
  __ jmp(&done);

  NopRuntimeCallHelper call_helper;
  generator.GenerateSlow(masm_, call_helper);

  __ bind(&done);
  context()->Plug(a1);
2880 2881 2882
}


2883 2884
void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2885
  DCHECK(args->length() == 2);
2886 2887 2888 2889 2890 2891 2892 2893 2894

  VisitForStackValue(args->at(0));
  VisitForAccumulatorValue(args->at(1));
  __ mov(a0, result_register());

  Register object = a1;
  Register index = a0;
  Register result = v0;

2895
  PopOperand(object);
2896 2897 2898 2899

  Label need_conversion;
  Label index_out_of_range;
  Label done;
2900 2901
  StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
                                      &need_conversion, &index_out_of_range);
2902 2903 2904 2905 2906 2907 2908 2909 2910 2911 2912 2913 2914 2915 2916 2917
  generator.GenerateFast(masm_);
  __ jmp(&done);

  __ bind(&index_out_of_range);
  // When the index is out of range, the spec requires us to return
  // NaN.
  __ LoadRoot(result, Heap::kNanValueRootIndex);
  __ jmp(&done);

  __ bind(&need_conversion);
  // Load the undefined value into the result register, which will
  // trigger conversion.
  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
  __ jmp(&done);

  NopRuntimeCallHelper call_helper;
2918
  generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2919 2920 2921

  __ bind(&done);
  context()->Plug(result);
2922 2923 2924
}


2925 2926 2927 2928 2929 2930 2931
void FullCodeGenerator::EmitCall(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK_LE(2, args->length());
  // Push target, receiver and arguments onto the stack.
  for (Expression* const arg : *args) {
    VisitForStackValue(arg);
  }
2932
  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2933 2934 2935 2936 2937 2938
  // Move target to a1.
  int const argc = args->length() - 2;
  __ lw(a1, MemOperand(sp, (argc + 1) * kPointerSize));
  // Call the target.
  __ li(a0, Operand(argc));
  __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2939
  OperandStackDepthDecrement(argc + 1);
2940
  RestoreContext();
2941 2942 2943 2944 2945
  // Discard the function left on TOS.
  context()->DropAndPlug(1, v0);
}


2946 2947
void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2948 2949 2950 2951 2952 2953 2954 2955 2956 2957 2958 2959
  VisitForAccumulatorValue(args->at(0));

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

  __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
  __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));

2960
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2961 2962 2963
  Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);

  context()->Plug(if_true, if_false);
2964 2965 2966
}


2967 2968
void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2969
  DCHECK(args->length() == 1);
2970 2971
  VisitForAccumulatorValue(args->at(0));

2972
  __ AssertString(v0);
2973 2974 2975 2976 2977

  __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
  __ IndexFromHash(v0, v0);

  context()->Plug(v0);
2978 2979 2980
}


2981 2982 2983 2984 2985 2986 2987 2988 2989 2990
void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK_EQ(1, args->length());
  VisitForAccumulatorValue(args->at(0));
  __ AssertFunction(v0);
  __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
  __ lw(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
  context()->Plug(v0);
}

2991
void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2992
  DCHECK(expr->arguments()->length() == 0);
2993 2994 2995 2996 2997 2998 2999 3000 3001
  ExternalReference debug_is_active =
      ExternalReference::debug_is_active_address(isolate());
  __ li(at, Operand(debug_is_active));
  __ lb(v0, MemOperand(at));
  __ SmiTag(v0);
  context()->Plug(v0);
}


3002 3003 3004 3005 3006 3007 3008 3009
void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK_EQ(2, args->length());
  VisitForStackValue(args->at(0));
  VisitForStackValue(args->at(1));

  Label runtime, done;

3010 3011
  __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
              NO_ALLOCATION_FLAGS);
3012
  __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
3013 3014
  __ Pop(a2, a3);
  __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
3015 3016 3017 3018 3019 3020 3021 3022 3023
  __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
  __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
  __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
  __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
  __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
  __ jmp(&done);

  __ bind(&runtime);
3024
  CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
3025 3026 3027 3028 3029 3030

  __ bind(&done);
  context()->Plug(v0);
}


3031
void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3032 3033 3034 3035
  // Push function.
  __ LoadNativeContextSlot(expr->context_index(), v0);
  PushOperand(v0);

3036 3037
  // Push undefined as the receiver.
  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3038
  PushOperand(v0);
3039 3040 3041 3042 3043 3044 3045
}


void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  int arg_count = args->length();

3046
  SetCallPosition(expr);
3047
  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3048
  __ li(a0, Operand(arg_count));
3049 3050
  __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
          RelocInfo::CODE_TARGET);
3051
  OperandStackDepthDecrement(arg_count + 1);
3052
  RestoreContext();
3053 3054 3055 3056
}


void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3057 3058 3059
  switch (expr->op()) {
    case Token::DELETE: {
      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3060 3061
      Property* property = expr->expression()->AsProperty();
      VariableProxy* proxy = expr->expression()->AsVariableProxy();
3062

3063 3064 3065
      if (property != NULL) {
        VisitForStackValue(property->obj());
        VisitForStackValue(property->key());
3066 3067 3068
        CallRuntimeWithOperands(is_strict(language_mode())
                                    ? Runtime::kDeleteProperty_Strict
                                    : Runtime::kDeleteProperty_Sloppy);
3069
        context()->Plug(v0);
3070 3071
      } else if (proxy != NULL) {
        Variable* var = proxy->var();
3072 3073
        // Delete of an unqualified identifier is disallowed in strict mode but
        // "delete this" is allowed.
3074
        bool is_this = var->is_this();
3075
        DCHECK(is_sloppy(language_mode()) || is_this);
3076
        if (var->IsUnallocatedOrGlobalSlot()) {
3077
          __ LoadGlobalObject(a2);
3078
          __ li(a1, Operand(var->name()));
3079
          __ Push(a2, a1);
3080
          __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3081
          context()->Plug(v0);
3082
        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3083 3084
          // Result of deleting non-global, non-dynamic variables is false.
          // The subexpression does not have side effects.
3085
          context()->Plug(is_this);
3086 3087 3088
        } else {
          // Non-global variable.  Call the runtime to try to delete from the
          // context where the variable was introduced.
3089
          __ Push(var->name());
3090
          __ CallRuntime(Runtime::kDeleteLookupSlot);
3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114
          context()->Plug(v0);
        }
      } else {
        // Result of deleting non-property, non-variable reference is true.
        // The subexpression may have side effects.
        VisitForEffect(expr->expression());
        context()->Plug(true);
      }
      break;
    }

    case Token::VOID: {
      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
      VisitForEffect(expr->expression());
      context()->Plug(Heap::kUndefinedValueRootIndex);
      break;
    }

    case Token::NOT: {
      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
      if (context()->IsEffect()) {
        // Unary NOT has no side effects so it's only necessary to visit the
        // subexpression.  Match the optimizing compiler by not branching.
        VisitForEffect(expr->expression());
3115 3116 3117 3118 3119 3120 3121 3122
      } else if (context()->IsTest()) {
        const TestContext* test = TestContext::cast(context());
        // The labels are swapped for the recursive call.
        VisitForControl(expr->expression(),
                        test->false_label(),
                        test->true_label(),
                        test->fall_through());
        context()->Plug(test->true_label(), test->false_label());
3123
      } else {
3124 3125 3126 3127
        // We handle value contexts explicitly rather than simply visiting
        // for control and plugging the control flow into the context,
        // because we need to prepare a pair of extra administrative AST ids
        // for the optimizing compiler.
3128
        DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3129 3130 3131 3132 3133
        Label materialize_true, materialize_false, done;
        VisitForControl(expr->expression(),
                        &materialize_false,
                        &materialize_true,
                        &materialize_true);
3134
        if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3135
        __ bind(&materialize_true);
3136 3137
        PrepareForBailoutForId(expr->MaterializeTrueId(),
                               BailoutState::NO_REGISTERS);
3138 3139 3140 3141
        __ LoadRoot(v0, Heap::kTrueValueRootIndex);
        if (context()->IsStackValue()) __ push(v0);
        __ jmp(&done);
        __ bind(&materialize_false);
3142 3143
        PrepareForBailoutForId(expr->MaterializeFalseId(),
                               BailoutState::NO_REGISTERS);
3144 3145 3146
        __ LoadRoot(v0, Heap::kFalseValueRootIndex);
        if (context()->IsStackValue()) __ push(v0);
        __ bind(&done);
3147 3148 3149 3150 3151 3152
      }
      break;
    }

    case Token::TYPEOF: {
      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3153 3154
      {
        AccumulatorValueContext context(this);
3155 3156
        VisitForTypeofValue(expr->expression());
      }
3157 3158 3159
      __ mov(a3, v0);
      TypeofStub typeof_stub(isolate());
      __ CallStub(&typeof_stub);
3160 3161 3162 3163 3164 3165 3166 3167 3168 3169
      context()->Plug(v0);
      break;
    }

    default:
      UNREACHABLE();
  }
}


3170
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3171
  DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3172

3173 3174 3175
  Comment cmnt(masm_, "[ CountOperation");

  Property* prop = expr->expression()->AsProperty();
3176
  LhsKind assign_type = Property::GetAssignType(prop);
3177 3178 3179

  // Evaluate expression and get value.
  if (assign_type == VARIABLE) {
3180
    DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3181
    AccumulatorValueContext context(this);
3182
    EmitVariableLoad(expr->expression()->AsVariableProxy());
3183 3184 3185 3186
  } else {
    // Reserve space for result of postfix operation.
    if (expr->is_postfix() && !context()->IsEffect()) {
      __ li(at, Operand(Smi::FromInt(0)));
3187
      PushOperand(at);
3188
    }
3189 3190 3191 3192 3193 3194 3195 3196 3197 3198
    switch (assign_type) {
      case NAMED_PROPERTY: {
        // Put the object both on the stack and in the register.
        VisitForStackValue(prop->obj());
        __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
        EmitNamedPropertyLoad(prop);
        break;
      }

      case NAMED_SUPER_PROPERTY: {
3199
        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3200
        VisitForAccumulatorValue(
3201
            prop->obj()->AsSuperPropertyReference()->home_object());
3202
        const Register scratch = a1;
3203 3204
        __ lw(scratch, MemOperand(sp, 0));  // this
        PushOperands(result_register(), scratch, result_register());
3205 3206 3207 3208 3209
        EmitNamedSuperPropertyLoad(prop);
        break;
      }

      case KEYED_SUPER_PROPERTY: {
3210
        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3211
        VisitForStackValue(
3212
            prop->obj()->AsSuperPropertyReference()->home_object());
3213
        VisitForAccumulatorValue(prop->key());
3214 3215 3216 3217 3218
        const Register scratch1 = a1;
        const Register scratch2 = t0;
        __ lw(scratch1, MemOperand(sp, 1 * kPointerSize));  // this
        __ lw(scratch2, MemOperand(sp, 0 * kPointerSize));  // home object
        PushOperands(result_register(), scratch1, scratch2, result_register());
3219 3220 3221 3222 3223 3224 3225 3226 3227 3228 3229
        EmitKeyedSuperPropertyLoad(prop);
        break;
      }

      case KEYED_PROPERTY: {
        VisitForStackValue(prop->obj());
        VisitForStackValue(prop->key());
        __ lw(LoadDescriptor::ReceiverRegister(),
              MemOperand(sp, 1 * kPointerSize));
        __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
        EmitKeyedPropertyLoad(prop);
3230
        break;
3231 3232 3233 3234
      }

      case VARIABLE:
        UNREACHABLE();
3235 3236 3237 3238 3239 3240
    }
  }

  // We need a second deoptimization point after loading the value
  // in case evaluating the property load my have a side effect.
  if (assign_type == VARIABLE) {
3241
    PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3242
  } else {
3243
    PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3244 3245
  }

3246 3247 3248 3249 3250 3251
  // Inline smi case if we are in a loop.
  Label stub_call, done;
  JumpPatchSite patch_site(masm_);

  int count_value = expr->op() == Token::INC ? 1 : -1;
  __ mov(a0, v0);
3252
  if (ShouldInlineSmiCase(expr->op())) {
3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265 3266 3267 3268
    Label slow;
    patch_site.EmitJumpIfNotSmi(v0, &slow);

    // Save result for postfix expressions.
    if (expr->is_postfix()) {
      if (!context()->IsEffect()) {
        // Save the result on the stack. If we have a named or keyed property
        // we store the result under the receiver that is currently on top
        // of the stack.
        switch (assign_type) {
          case VARIABLE:
            __ push(v0);
            break;
          case NAMED_PROPERTY:
            __ sw(v0, MemOperand(sp, kPointerSize));
            break;
3269 3270 3271
          case NAMED_SUPER_PROPERTY:
            __ sw(v0, MemOperand(sp, 2 * kPointerSize));
            break;
3272 3273 3274
          case KEYED_PROPERTY:
            __ sw(v0, MemOperand(sp, 2 * kPointerSize));
            break;
3275 3276 3277
          case KEYED_SUPER_PROPERTY:
            __ sw(v0, MemOperand(sp, 3 * kPointerSize));
            break;
3278 3279 3280 3281 3282 3283
        }
      }
    }

    Register scratch1 = a1;
    __ li(scratch1, Operand(Smi::FromInt(count_value)));
3284
    __ AddBranchNoOvf(v0, v0, Operand(scratch1), &done);
3285 3286 3287 3288
    // Call stub. Undo operation first.
    __ Move(v0, a0);
    __ jmp(&stub_call);
    __ bind(&slow);
3289
  }
3290 3291

  // Convert old value into a number.
3292
  __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3293
  RestoreContext();
3294
  PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3295 3296 3297 3298 3299 3300 3301 3302 3303

  // Save result for postfix expressions.
  if (expr->is_postfix()) {
    if (!context()->IsEffect()) {
      // Save the result on the stack. If we have a named or keyed property
      // we store the result under the receiver that is currently on top
      // of the stack.
      switch (assign_type) {
        case VARIABLE:
3304
          PushOperand(v0);
3305 3306 3307 3308
          break;
        case NAMED_PROPERTY:
          __ sw(v0, MemOperand(sp, kPointerSize));
          break;
3309 3310 3311
        case NAMED_SUPER_PROPERTY:
          __ sw(v0, MemOperand(sp, 2 * kPointerSize));
          break;
3312 3313 3314
        case KEYED_PROPERTY:
          __ sw(v0, MemOperand(sp, 2 * kPointerSize));
          break;
3315 3316 3317
        case KEYED_SUPER_PROPERTY:
          __ sw(v0, MemOperand(sp, 3 * kPointerSize));
          break;
3318 3319 3320 3321
      }
    }
  }

3322 3323
  __ bind(&stub_call);
  __ mov(a1, v0);
3324
  __ li(a0, Operand(Smi::FromInt(count_value)));
3325

3326 3327
  SetExpressionPosition(expr);

3328
  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3329
  CallIC(code, expr->CountBinOpFeedbackId());
3330
  patch_site.EmitPatchInfo();
3331 3332 3333 3334 3335 3336 3337 3338
  __ bind(&done);

  // Store the value returned in v0.
  switch (assign_type) {
    case VARIABLE:
      if (expr->is_postfix()) {
        { EffectContext context(this);
          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3339
                                 Token::ASSIGN, expr->CountSlot());
3340 3341
          PrepareForBailoutForId(expr->AssignmentId(),
                                 BailoutState::TOS_REGISTER);
3342 3343 3344 3345 3346 3347 3348 3349 3350
          context.Plug(v0);
        }
        // For all contexts except EffectConstant we have the result on
        // top of the stack.
        if (!context()->IsEffect()) {
          context()->PlugTOS();
        }
      } else {
        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3351
                               Token::ASSIGN, expr->CountSlot());
3352 3353
        PrepareForBailoutForId(expr->AssignmentId(),
                               BailoutState::TOS_REGISTER);
3354 3355 3356 3357
        context()->Plug(v0);
      }
      break;
    case NAMED_PROPERTY: {
3358 3359
      __ mov(StoreDescriptor::ValueRegister(), result_register());
      __ li(StoreDescriptor::NameRegister(),
3360
            Operand(prop->key()->AsLiteral()->value()));
3361
      PopOperand(StoreDescriptor::ReceiverRegister());
3362 3363
      EmitLoadStoreICSlot(expr->CountSlot());
      CallStoreIC();
3364
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3365 3366 3367 3368 3369 3370 3371 3372 3373
      if (expr->is_postfix()) {
        if (!context()->IsEffect()) {
          context()->PlugTOS();
        }
      } else {
        context()->Plug(v0);
      }
      break;
    }
3374 3375
    case NAMED_SUPER_PROPERTY: {
      EmitNamedSuperPropertyStore(prop);
3376
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3377 3378 3379 3380 3381 3382 3383 3384 3385
      if (expr->is_postfix()) {
        if (!context()->IsEffect()) {
          context()->PlugTOS();
        }
      } else {
        context()->Plug(v0);
      }
      break;
    }
3386 3387
    case KEYED_SUPER_PROPERTY: {
      EmitKeyedSuperPropertyStore(prop);
3388
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3389 3390 3391 3392 3393 3394 3395 3396 3397
      if (expr->is_postfix()) {
        if (!context()->IsEffect()) {
          context()->PlugTOS();
        }
      } else {
        context()->Plug(v0);
      }
      break;
    }
3398
    case KEYED_PROPERTY: {
3399
      __ mov(StoreDescriptor::ValueRegister(), result_register());
3400 3401
      PopOperands(StoreDescriptor::ReceiverRegister(),
                  StoreDescriptor::NameRegister());
3402
      Handle<Code> ic =
3403
          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3404 3405
      EmitLoadStoreICSlot(expr->CountSlot());
      CallIC(ic);
3406
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3407 3408 3409 3410 3411 3412 3413 3414 3415 3416
      if (expr->is_postfix()) {
        if (!context()->IsEffect()) {
          context()->PlugTOS();
        }
      } else {
        context()->Plug(v0);
      }
      break;
    }
  }
3417 3418 3419
}


3420
void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3421
                                                 Expression* sub_expr,
3422 3423 3424 3425 3426 3427 3428 3429
                                                 Handle<String> check) {
  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

3430
  { AccumulatorValueContext context(this);
3431
    VisitForTypeofValue(sub_expr);
3432
  }
3433
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3434

3435 3436
  Factory* factory = isolate()->factory();
  if (String::Equals(check, factory->number_string())) {
3437 3438 3439 3440
    __ JumpIfSmi(v0, if_true);
    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
    __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3441
  } else if (String::Equals(check, factory->string_string())) {
3442 3443
    __ JumpIfSmi(v0, if_false);
    __ GetObjectType(v0, v0, a1);
3444 3445
    Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
          fall_through);
3446
  } else if (String::Equals(check, factory->symbol_string())) {
3447 3448 3449
    __ JumpIfSmi(v0, if_false);
    __ GetObjectType(v0, v0, a1);
    Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3450
  } else if (String::Equals(check, factory->boolean_string())) {
3451 3452 3453 3454
    __ LoadRoot(at, Heap::kTrueValueRootIndex);
    __ Branch(if_true, eq, v0, Operand(at));
    __ LoadRoot(at, Heap::kFalseValueRootIndex);
    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3455
  } else if (String::Equals(check, factory->undefined_string())) {
3456 3457
    __ LoadRoot(at, Heap::kNullValueRootIndex);
    __ Branch(if_false, eq, v0, Operand(at));
3458 3459 3460 3461 3462 3463
    __ JumpIfSmi(v0, if_false);
    // Check for undetectable objects => true.
    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
    __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
    Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3464
  } else if (String::Equals(check, factory->function_string())) {
3465
    __ JumpIfSmi(v0, if_false);
3466 3467 3468 3469 3470 3471
    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
    __ And(a1, a1,
           Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
    Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
          fall_through);
3472
  } else if (String::Equals(check, factory->object_string())) {
3473
    __ JumpIfSmi(v0, if_false);
3474 3475
    __ LoadRoot(at, Heap::kNullValueRootIndex);
    __ Branch(if_true, eq, v0, Operand(at));
3476
    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3477
    __ GetObjectType(v0, v0, a1);
3478
    __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3479
    // Check for callable or undetectable objects => false.
3480
    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3481 3482
    __ And(a1, a1,
           Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3483
    Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3484 3485 3486 3487 3488 3489 3490 3491 3492 3493
// clang-format off
#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)    \
  } else if (String::Equals(check, factory->type##_string())) {  \
    __ JumpIfSmi(v0, if_false);                                  \
    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));      \
    __ LoadRoot(at, Heap::k##Type##MapRootIndex);                \
    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
  SIMD128_TYPES(SIMD128_TYPE)
#undef SIMD128_TYPE
    // clang-format on
3494 3495 3496
  } else {
    if (if_false != fall_through) __ jmp(if_false);
  }
3497
  context()->Plug(if_true, if_false);
3498 3499 3500
}


3501
void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3502 3503
  Comment cmnt(masm_, "[ CompareOperation");

3504 3505 3506 3507
  // First we try a fast inlined version of the compare when one of
  // the operands is a literal.
  if (TryLiteralCompare(expr)) return;

3508 3509 3510 3511 3512 3513 3514 3515 3516
  // Always perform the comparison for its control flow.  Pack the result
  // into the expression's context after the comparison is performed.
  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

3517
  Token::Value op = expr->op();
3518 3519 3520 3521
  VisitForStackValue(expr->left());
  switch (op) {
    case Token::IN:
      VisitForStackValue(expr->right());
3522
      SetExpressionPosition(expr);
3523
      EmitHasProperty();
3524
      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3525 3526 3527 3528 3529
      __ LoadRoot(t0, Heap::kTrueValueRootIndex);
      Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
      break;

    case Token::INSTANCEOF: {
3530
      VisitForAccumulatorValue(expr->right());
3531
      SetExpressionPosition(expr);
3532
      __ mov(a0, result_register());
3533
      PopOperand(a1);
3534
      InstanceOfStub stub(isolate());
3535
      __ CallStub(&stub);
3536 3537 3538
      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
      __ LoadRoot(at, Heap::kTrueValueRootIndex);
      Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3539 3540 3541 3542 3543
      break;
    }

    default: {
      VisitForAccumulatorValue(expr->right());
3544
      SetExpressionPosition(expr);
3545
      Condition cc = CompareIC::ComputeCondition(op);
3546
      __ mov(a0, result_register());
3547
      PopOperand(a1);
3548 3549 3550 3551 3552 3553 3554 3555 3556 3557

      bool inline_smi_code = ShouldInlineSmiCase(op);
      JumpPatchSite patch_site(masm_);
      if (inline_smi_code) {
        Label slow_case;
        __ Or(a2, a0, Operand(a1));
        patch_site.EmitJumpIfNotSmi(a2, &slow_case);
        Split(cc, a1, Operand(a0), if_true, if_false, NULL);
        __ bind(&slow_case);
      }
3558

3559
      Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3560
      CallIC(ic, expr->CompareOperationFeedbackId());
3561
      patch_site.EmitPatchInfo();
3562
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3563 3564 3565 3566 3567 3568 3569
      Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
    }
  }

  // Convert the result of the comparison into one expected for this
  // expression's context.
  context()->Plug(if_true, if_false);
3570 3571 3572
}


3573 3574 3575 3576 3577 3578 3579 3580 3581 3582 3583
void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
                                              Expression* sub_expr,
                                              NilValue nil) {
  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

  VisitForAccumulatorValue(sub_expr);
3584
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3585
  if (expr->op() == Token::EQ_STRICT) {
3586 3587 3588 3589
    Heap::RootListIndex nil_value = nil == kNullValue ?
        Heap::kNullValueRootIndex :
        Heap::kUndefinedValueRootIndex;
    __ LoadRoot(a1, nil_value);
3590
    Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3591 3592 3593 3594 3595 3596
  } else {
    __ JumpIfSmi(v0, if_false);
    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
    __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
    Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3597
  }
3598
  context()->Plug(if_true, if_false);
3599 3600 3601 3602 3603 3604
}


Register FullCodeGenerator::result_register() {
  return v0;
}
3605 3606


3607 3608 3609 3610
Register FullCodeGenerator::context_register() {
  return cp;
}

3611 3612 3613 3614
void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
  __ lw(value, MemOperand(fp, frame_offset));
}
3615

3616
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3617
  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3618
  __ sw(value, MemOperand(fp, frame_offset));
3619 3620 3621 3622
}


void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3623
  __ lw(dst, ContextMemOperand(cp, context_index));
3624 3625 3626
}


3627
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3628
  DeclarationScope* closure_scope = scope()->GetClosureScope();
3629 3630
  if (closure_scope->is_script_scope() ||
      closure_scope->is_module_scope()) {
3631
    // Contexts nested in the native context have a canonical empty function
3632
    // as their closure, not the anonymous closure containing the global
3633
    // code.
3634
    __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
3635
  } else if (closure_scope->is_eval_scope()) {
3636 3637 3638
    // Contexts created by a call to eval have the same closure as the
    // context calling eval, not the anonymous closure containing the eval
    // code.  Fetch it from the context.
3639
    __ lw(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3640
  } else {
3641
    DCHECK(closure_scope->is_function_scope());
3642 3643
    __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
  }
3644
  PushOperand(at);
3645 3646 3647
}


3648 3649 3650 3651
// ----------------------------------------------------------------------------
// Non-local control flow support.

void FullCodeGenerator::EnterFinallyBlock() {
3652
  DCHECK(!result_register().is(a1));
3653 3654 3655 3656 3657
  // Store pending message while executing finally block.
  ExternalReference pending_message_obj =
      ExternalReference::address_of_pending_message_obj(isolate());
  __ li(at, Operand(pending_message_obj));
  __ lw(a1, MemOperand(at));
3658
  PushOperand(a1);
3659 3660

  ClearPendingMessage();
3661 3662 3663 3664
}


void FullCodeGenerator::ExitFinallyBlock() {
3665
  DCHECK(!result_register().is(a1));
3666
  // Restore pending message from stack.
3667
  PopOperand(a1);
3668 3669 3670 3671
  ExternalReference pending_message_obj =
      ExternalReference::address_of_pending_message_obj(isolate());
  __ li(at, Operand(pending_message_obj));
  __ sw(a1, MemOperand(at));
3672 3673 3674
}


3675 3676 3677 3678 3679 3680 3681 3682 3683 3684
void FullCodeGenerator::ClearPendingMessage() {
  DCHECK(!result_register().is(a1));
  ExternalReference pending_message_obj =
      ExternalReference::address_of_pending_message_obj(isolate());
  __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
  __ li(at, Operand(pending_message_obj));
  __ sw(a1, MemOperand(at));
}


3685 3686 3687 3688 3689 3690 3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705 3706 3707 3708 3709 3710
void FullCodeGenerator::DeferredCommands::EmitCommands() {
  DCHECK(!result_register().is(a1));
  __ Pop(result_register());  // Restore the accumulator.
  __ Pop(a1);                 // Get the token.
  for (DeferredCommand cmd : commands_) {
    Label skip;
    __ li(at, Operand(Smi::FromInt(cmd.token)));
    __ Branch(&skip, ne, a1, Operand(at));
    switch (cmd.command) {
      case kReturn:
        codegen_->EmitUnwindAndReturn();
        break;
      case kThrow:
        __ Push(result_register());
        __ CallRuntime(Runtime::kReThrow);
        break;
      case kContinue:
        codegen_->EmitContinue(cmd.target);
        break;
      case kBreak:
        codegen_->EmitBreak(cmd.target);
        break;
    }
    __ bind(&skip);
  }
}
3711

3712 3713
#undef __

3714 3715

void BackEdgeTable::PatchAt(Code* unoptimized_code,
3716 3717
                            Address pc,
                            BackEdgeState target_state,
3718 3719
                            Code* replacement_code) {
  static const int kInstrSize = Assembler::kInstrSize;
3720 3721 3722
  Address pc_immediate_load_address =
      Assembler::target_address_from_return_address(pc);
  Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3723 3724
  Isolate* isolate = unoptimized_code->GetIsolate();
  CodePatcher patcher(isolate, branch_address, 1);
3725 3726 3727 3728 3729 3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744 3745 3746 3747

  switch (target_state) {
    case INTERRUPT:
      // slt at, a3, zero_reg (in case of count based interrupts)
      // beq at, zero_reg, ok
      // lui t9, <interrupt stub address> upper
      // ori t9, <interrupt stub address> lower
      // jalr t9
      // nop
      // ok-label ----- pc_after points here
      patcher.masm()->slt(at, a3, zero_reg);
      break;
    case ON_STACK_REPLACEMENT:
      // addiu at, zero_reg, 1
      // beq at, zero_reg, ok  ;; Not changed
      // lui t9, <on-stack replacement address> upper
      // ori t9, <on-stack replacement address> lower
      // jalr t9  ;; Not changed
      // nop  ;; Not changed
      // ok-label ----- pc_after points here
      patcher.masm()->addiu(at, zero_reg, 1);
      break;
  }
3748 3749
  // Replace the stack check address in the load-immediate (lui/ori pair)
  // with the entry address of the replacement code.
3750
  Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3751 3752 3753
                                   replacement_code->entry());

  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3754
      unoptimized_code, pc_immediate_load_address, replacement_code);
3755 3756 3757 3758 3759 3760
}


BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
    Isolate* isolate,
    Code* unoptimized_code,
3761
    Address pc) {
3762
  static const int kInstrSize = Assembler::kInstrSize;
3763 3764 3765
  Address pc_immediate_load_address =
      Assembler::target_address_from_return_address(pc);
  Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3766

3767
  DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
3768
  if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
3769
    DCHECK(reinterpret_cast<uint32_t>(
3770 3771 3772
        Assembler::target_address_at(pc_immediate_load_address)) ==
           reinterpret_cast<uint32_t>(
               isolate->builtins()->InterruptCheck()->entry()));
3773 3774
    return INTERRUPT;
  }
3775

3776
  DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
3777

3778
  DCHECK(reinterpret_cast<uint32_t>(
3779
             Assembler::target_address_at(pc_immediate_load_address)) ==
3780
         reinterpret_cast<uint32_t>(
3781 3782
             isolate->builtins()->OnStackReplacement()->entry()));
  return ON_STACK_REPLACEMENT;
3783 3784 3785
}


3786 3787
}  // namespace internal
}  // namespace v8
3788 3789

#endif  // V8_TARGET_ARCH_MIPS