full-codegen-arm.cc 111 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#if V8_TARGET_ARCH_ARM
6

7
#include "src/full-codegen/full-codegen.h"
8
#include "src/ast/compile-time-value.h"
9
#include "src/ast/scopes.h"
10
#include "src/code-factory.h"
11 12
#include "src/code-stubs.h"
#include "src/codegen.h"
13 14
#include "src/compilation-info.h"
#include "src/compiler.h"
15
#include "src/debug/debug.h"
16
#include "src/ic/ic.h"
17 18 19

#include "src/arm/code-stubs-arm.h"
#include "src/arm/macro-assembler-arm.h"
20

21 22 23
namespace v8 {
namespace internal {

24
#define __ ACCESS_MASM(masm())
25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40

// A patch site is a location in the code which it is possible to patch. This
// class has a number of methods to emit the code which is patchable and the
// method EmitPatchInfo to record a marker back to the patchable code. This
// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
// immediate value is used) is the delta from the pc to the first instruction of
// the patchable code.
class JumpPatchSite BASE_EMBEDDED {
 public:
  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
#ifdef DEBUG
    info_emitted_ = false;
#endif
  }

  ~JumpPatchSite() {
41
    DCHECK(patch_site_.is_bound() == info_emitted_);
42 43 44 45 46
  }

  // When initially emitting this ensure that a jump is always generated to skip
  // the inlined smi code.
  void EmitJumpIfNotSmi(Register reg, Label* target) {
47
    DCHECK(!patch_site_.is_bound() && !info_emitted_);
48
    Assembler::BlockConstPoolScope block_const_pool(masm_);
49 50 51 52 53 54 55 56
    __ bind(&patch_site_);
    __ cmp(reg, Operand(reg));
    __ b(eq, target);  // Always taken before patched.
  }

  // When initially emitting this ensure that a jump is never generated to skip
  // the inlined smi code.
  void EmitJumpIfSmi(Register reg, Label* target) {
57
    DCHECK(!patch_site_.is_bound() && !info_emitted_);
58
    Assembler::BlockConstPoolScope block_const_pool(masm_);
59 60 61 62 63 64
    __ bind(&patch_site_);
    __ cmp(reg, Operand(reg));
    __ b(ne, target);  // Never taken before patched.
  }

  void EmitPatchInfo() {
65 66
    // Block literal pool emission whilst recording patch site information.
    Assembler::BlockConstPoolScope block_const_pool(masm_);
67 68 69 70 71
    if (patch_site_.is_bound()) {
      int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
      Register reg;
      reg.set_code(delta_to_patch_site / kOff12Mask);
      __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
72
#ifdef DEBUG
73
      info_emitted_ = true;
74
#endif
75 76 77
    } else {
      __ nop();  // Signals no inlined code.
    }
78 79 80
  }

 private:
81
  MacroAssembler* masm() { return masm_; }
82 83 84 85 86 87 88 89
  MacroAssembler* masm_;
  Label patch_site_;
#ifdef DEBUG
  bool info_emitted_;
#endif
};


90 91 92 93 94 95
// Generate code for a JS function.  On entry to the function the receiver
// and arguments have been pushed on the stack left to right.  The actual
// argument count matches the formal parameter count expected by the
// function.
//
// The live registers are:
96
//   o r1: the JS function object being called (i.e., ourselves)
97
//   o r3: the new target value
98
//   o cp: our context
99
//   o pp: our caller's constant pool pointer (if enabled)
100 101 102 103 104 105
//   o fp: our caller's frame pointer
//   o sp: stack pointer
//   o lr: return address
//
// The function builds a JS frame.  Please see JavaScriptFrameConstants in
// frames-arm.h for its layout.
106 107
void FullCodeGenerator::Generate() {
  CompilationInfo* info = info_;
108
  profiling_counter_ = isolate()->factory()->NewCell(
109
      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
110
  SetFunctionPosition(literal());
111
  Comment cmnt(masm_, "[ function compiled by full code generator");
112

113 114
  ProfileEntryHookStub::MaybeCallEntryHook(masm_);

115
  if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
116
    int receiver_offset = info->scope()->num_parameters() * kPointerSize;
117
    __ ldr(r2, MemOperand(sp, receiver_offset));
118
    __ AssertNotSmi(r2);
119
    __ CompareObjectType(r2, r2, no_reg, FIRST_JS_RECEIVER_TYPE);
120
    __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
121 122
  }

123 124 125 126 127
  // Open a frame scope to indicate that there is a frame on the stack.  The
  // MANUAL indicates that the scope shouldn't actually generate code to set up
  // the frame (that is done below).
  FrameScope frame_scope(masm_, StackFrame::MANUAL);

128
  info->set_prologue_offset(masm_->pc_offset());
129
  __ Prologue(info->GeneratePreagedPrologue());
130

131 132 133 134 135 136 137 138 139 140 141 142 143 144
  // Increment invocation count for the function.
  {
    Comment cmnt(masm_, "[ Increment invocation count");
    __ ldr(r2, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
    __ ldr(r2, FieldMemOperand(r2, LiteralsArray::kFeedbackVectorOffset));
    __ ldr(r9, FieldMemOperand(r2, TypeFeedbackVector::kInvocationCountIndex *
                                           kPointerSize +
                                       TypeFeedbackVector::kHeaderSize));
    __ add(r9, r9, Operand(Smi::FromInt(1)));
    __ str(r9, FieldMemOperand(r2, TypeFeedbackVector::kInvocationCountIndex *
                                           kPointerSize +
                                       TypeFeedbackVector::kHeaderSize));
  }

145
  { Comment cmnt(masm_, "[ Allocate locals");
146
    int locals_count = info->scope()->num_stack_slots();
147
    OperandStackDepthIncrement(locals_count);
148
    if (locals_count > 0) {
149
      if (locals_count >= 128) {
150 151 152 153 154
        Label ok;
        __ sub(r9, sp, Operand(locals_count * kPointerSize));
        __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
        __ cmp(r9, Operand(r2));
        __ b(hs, &ok);
155
        __ CallRuntime(Runtime::kThrowStackOverflow);
156
        __ bind(&ok);
157
      }
158
      __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
159 160 161 162 163 164 165 166
      int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
      if (locals_count >= kMaxPushes) {
        int loop_iterations = locals_count / kMaxPushes;
        __ mov(r2, Operand(loop_iterations));
        Label loop_header;
        __ bind(&loop_header);
        // Do pushes.
        for (int i = 0; i < kMaxPushes; i++) {
167 168
          __ push(r9);
        }
169 170 171 172 173 174 175 176
        // Continue loop if not done.
        __ sub(r2, r2, Operand(1), SetCC);
        __ b(&loop_header, ne);
      }
      int remaining = locals_count % kMaxPushes;
      // Emit the remaining pushes.
      for (int i  = 0; i < remaining; i++) {
        __ push(r9);
177
      }
178
    }
179
  }
180

181
  bool function_in_register_r1 = true;
182

183
  // Possibly allocate a local context.
184
  if (info->scope()->NeedsContext()) {
185 186
    // Argument to NewContext is the function, which is still in r1.
    Comment cmnt(masm_, "[ Allocate context");
187
    bool need_write_barrier = true;
188
    int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
189
    if (info->scope()->is_script_scope()) {
190
      __ push(r1);
191
      __ Push(info->scope()->scope_info());
192
      __ CallRuntime(Runtime::kNewScriptContext);
193 194
      PrepareForBailoutForId(BailoutId::ScriptContext(),
                             BailoutState::TOS_REGISTER);
195 196
      // The new target value is not used, clobbering is safe.
      DCHECK_NULL(info->scope()->new_target_var());
197
    } else {
198 199 200
      if (info->scope()->new_target_var() != nullptr) {
        __ push(r3);  // Preserve new target.
      }
201 202 203 204 205 206 207 208 209 210 211
      if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
        FastNewFunctionContextStub stub(isolate());
        __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
               Operand(slots));
        __ CallStub(&stub);
        // Result of FastNewFunctionContextStub is always in new space.
        need_write_barrier = false;
      } else {
        __ push(r1);
        __ CallRuntime(Runtime::kNewFunctionContext);
      }
212 213 214
      if (info->scope()->new_target_var() != nullptr) {
        __ pop(r3);  // Preserve new target.
      }
215
    }
216
    function_in_register_r1 = false;
217 218 219 220
    // Context is returned in r0.  It replaces the context passed to us.
    // It's saved in the stack and kept live in cp.
    __ mov(cp, r0);
    __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
221
    // Copy any necessary parameters into the context.
222
    int num_parameters = info->scope()->num_parameters();
223 224
    int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
    for (int i = first_parameter; i < num_parameters; i++) {
225 226
      Variable* var =
          (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
227
      if (var->IsContextSlot()) {
228 229 230 231 232
        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
            (num_parameters - 1 - i) * kPointerSize;
        // Load parameter from stack.
        __ ldr(r0, MemOperand(fp, parameter_offset));
        // Store it in the context.
233
        MemOperand target = ContextMemOperand(cp, var->index());
234 235 236
        __ str(r0, target);

        // Update the write barrier.
237
        if (need_write_barrier) {
238 239
          __ RecordWriteContextSlot(cp, target.offset(), r0, r2,
                                    kLRHasBeenSaved, kDontSaveFPRegs);
240 241 242 243 244 245
        } else if (FLAG_debug_code) {
          Label done;
          __ JumpIfInNewSpace(cp, r0, &done);
          __ Abort(kExpectedNewSpaceObject);
          __ bind(&done);
        }
246
      }
247 248
    }
  }
249

250 251 252
  // Register holding this function and new target are both trashed in case we
  // bailout here. But since that can happen only when new target is not used
  // and we allocate a context, the value of |function_in_register| is correct.
253 254
  PrepareForBailoutForId(BailoutId::FunctionContext(),
                         BailoutState::NO_REGISTERS);
255

256 257 258 259
  // We don't support new.target and rest parameters here.
  DCHECK_NULL(info->scope()->new_target_var());
  DCHECK_NULL(info->scope()->rest_parameter());
  DCHECK_NULL(info->scope()->this_function_var());
260

261
  Variable* arguments = info->scope()->arguments();
262 263 264
  if (arguments != NULL) {
    // Function uses arguments object.
    Comment cmnt(masm_, "[ Allocate arguments object");
265
    if (!function_in_register_r1) {
266
      // Load this again, if it's used by the local context below.
267
      __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
268
    }
269 270 271
    if (is_strict(language_mode()) || !has_simple_parameters()) {
      FastNewStrictArgumentsStub stub(isolate());
      __ CallStub(&stub);
272 273 274
    } else if (literal()->has_duplicate_parameters()) {
      __ Push(r1);
      __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
275
    } else {
276
      FastNewSloppyArgumentsStub stub(isolate());
277 278
      __ CallStub(&stub);
    }
279

280
    SetVar(arguments, r0, r1, r2);
281 282
  }

283
  if (FLAG_trace) {
284
    __ CallRuntime(Runtime::kTraceEnter);
285 286
  }

287
  // Visit the declarations and body.
288 289
  PrepareForBailoutForId(BailoutId::FunctionEntry(),
                         BailoutState::NO_REGISTERS);
290 291 292 293
  {
    Comment cmnt(masm_, "[ Declarations");
    VisitDeclarations(scope()->declarations());
  }
294

295 296 297 298 299 300 301
  // Assert that the declarations do not use ICs. Otherwise the debugger
  // won't be able to redirect a PC at an IC to the correct IC in newly
  // recompiled code.
  DCHECK_EQ(0, ic_total_count_);

  {
    Comment cmnt(masm_, "[ Stack check");
302 303
    PrepareForBailoutForId(BailoutId::Declarations(),
                           BailoutState::NO_REGISTERS);
304 305 306 307 308 309 310 311 312 313 314
    Label ok;
    __ LoadRoot(ip, Heap::kStackLimitRootIndex);
    __ cmp(sp, Operand(ip));
    __ b(hs, &ok);
    Handle<Code> stack_check = isolate()->builtins()->StackCheck();
    PredictableCodeSizeScope predictable(masm_);
    predictable.ExpectSize(
        masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
    __ Call(stack_check, RelocInfo::CODE_TARGET);
    __ bind(&ok);
  }
315

316 317 318 319 320
  {
    Comment cmnt(masm_, "[ Body");
    DCHECK(loop_depth() == 0);
    VisitStatements(literal()->body());
    DCHECK(loop_depth() == 0);
321 322
  }

323 324
  // Always emit a 'return undefined' in case control fell off the end of
  // the body.
325 326
  { Comment cmnt(masm_, "[ return <undefined>;");
    __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
327
  }
328
  EmitReturnSequence();
329 330

  // Force emit the constant pool, so it doesn't get emitted in the middle
331
  // of the back edge table.
332 333 334
  masm()->CheckConstPool(true, false);
}

335
void FullCodeGenerator::ClearAccumulator() { __ mov(r0, Operand(Smi::kZero)); }
kmillikin@chromium.org's avatar
kmillikin@chromium.org committed
336

337 338
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
  __ mov(r2, Operand(profiling_counter_));
339
  __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
340
  __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
341
  __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
342 343 344
}


345
#ifdef CAN_USE_ARMV7_INSTRUCTIONS
346
static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
347 348 349
#else
static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
#endif
350 351


352
void FullCodeGenerator::EmitProfilingCounterReset() {
353 354 355 356 357
  Assembler::BlockConstPoolScope block_const_pool(masm_);
  PredictableCodeSizeScope predictable_code_size_scope(
      masm_, kProfileCounterResetSequenceLength);
  Label start;
  __ bind(&start);
358 359
  int reset_value = FLAG_interrupt_budget;
  __ mov(r2, Operand(profiling_counter_));
360 361 362 363 364 365 366
  // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
  // instructions (for ARMv6) depending upon whether it is an extended constant
  // pool - insert nop to compensate.
  int expected_instr_count =
      (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
  DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
  while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
367 368
    __ nop();
  }
369
  __ mov(r3, Operand(Smi::FromInt(reset_value)));
370
  __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
371 372 373
}


374 375 376
void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
                                                Label* back_edge_target) {
  Comment cmnt(masm_, "[ Back edge bookkeeping");
377
  // Block literal pools whilst emitting back edge code.
378
  Assembler::BlockConstPoolScope block_const_pool(masm_);
379
  Label ok;
380

381
  DCHECK(back_edge_target->is_bound());
382 383 384
  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
  int weight = Min(kMaxBackEdgeWeight,
                   Max(1, distance / kCodeSizeMultiplier));
385 386
  EmitProfilingCounterDecrement(weight);
  __ b(pl, &ok);
387
  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
388

389 390 391
  // Record a mapping of this PC offset to the OSR id.  This is used to find
  // the AST id from the unoptimized code in order to use it as a key into
  // the deoptimization input data found in the optimized code.
392
  RecordBackEdge(stmt->OsrEntryId());
393

394
  EmitProfilingCounterReset();
395

396
  __ bind(&ok);
397
  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
398 399 400
  // Record a mapping of the OSR id to this PC.  This is used if the OSR
  // entry becomes the target of a bailout.  We don't expect it to be, but
  // we want it to work if it is.
401
  PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
402 403
}

404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
    bool is_tail_call) {
  // Pretend that the exit is a backwards jump to the entry.
  int weight = 1;
  if (info_->ShouldSelfOptimize()) {
    weight = FLAG_interrupt_budget / FLAG_self_opt_count;
  } else {
    int distance = masm_->pc_offset();
    weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
  }
  EmitProfilingCounterDecrement(weight);
  Label ok;
  __ b(pl, &ok);
  // Don't need to save result register if we are going to do a tail call.
  if (!is_tail_call) {
    __ push(r0);
  }
  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
  if (!is_tail_call) {
    __ pop(r0);
  }
  EmitProfilingCounterReset();
  __ bind(&ok);
}
428

429
void FullCodeGenerator::EmitReturnSequence() {
430 431 432 433 434 435 436 437 438
  Comment cmnt(masm_, "[ Return sequence");
  if (return_label_.is_bound()) {
    __ b(&return_label_);
  } else {
    __ bind(&return_label_);
    if (FLAG_trace) {
      // Push the return value on the stack as the parameter.
      // Runtime::TraceExit returns its parameter in r0.
      __ push(r0);
439
      __ CallRuntime(Runtime::kTraceExit);
440
    }
441
    EmitProfilingCounterHandlingForReturnSequence(false);
442

443 444 445
    // Make sure that the constant pool is not emitted inside of the return
    // sequence.
    { Assembler::BlockConstPoolScope block_const_pool(masm_);
446 447
      int32_t arg_count = info_->scope()->num_parameters() + 1;
      int32_t sp_delta = arg_count * kPointerSize;
448
      SetReturnPosition(literal());
449 450
      // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
      PredictableCodeSizeScope predictable(masm_, -1);
alph's avatar
alph committed
451
      __ LeaveFrame(StackFrame::JAVA_SCRIPT);
452 453 454 455
      { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
        __ add(sp, sp, Operand(sp_delta));
        __ Jump(lr);
      }
456
    }
457
  }
458 459
}

460 461 462
void FullCodeGenerator::RestoreContext() {
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
463

464
void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
465
  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
466
  codegen()->GetVar(result_register(), var);
467
  codegen()->PushOperand(result_register());
468 469 470
}


471 472 473 474 475 476 477 478 479 480 481 482 483
void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
}


void FullCodeGenerator::AccumulatorValueContext::Plug(
    Heap::RootListIndex index) const {
  __ LoadRoot(result_register(), index);
}


void FullCodeGenerator::StackValueContext::Plug(
    Heap::RootListIndex index) const {
  __ LoadRoot(result_register(), index);
484
  codegen()->PushOperand(result_register());
485 486 487 488
}


void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
489
  codegen()->PrepareForBailoutBeforeSplit(condition(),
490 491 492
                                          true,
                                          true_label_,
                                          false_label_);
493 494 495
  if (index == Heap::kUndefinedValueRootIndex ||
      index == Heap::kNullValueRootIndex ||
      index == Heap::kFalseValueRootIndex) {
496
    if (false_label_ != fall_through_) __ b(false_label_);
497
  } else if (index == Heap::kTrueValueRootIndex) {
498
    if (true_label_ != fall_through_) __ b(true_label_);
499 500
  } else {
    __ LoadRoot(result_register(), index);
501
    codegen()->DoTest(this);
502 503 504 505
  }
}


506 507
void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
}
508 509


510 511 512 513
void FullCodeGenerator::AccumulatorValueContext::Plug(
    Handle<Object> lit) const {
  __ mov(result_register(), Operand(lit));
}
514

515 516

void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
517
  // Immediates cannot be pushed directly.
518
  __ mov(result_register(), Operand(lit));
519
  codegen()->PushOperand(result_register());
520 521 522 523
}


void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
524
  codegen()->PrepareForBailoutBeforeSplit(condition(),
525 526 527
                                          true,
                                          true_label_,
                                          false_label_);
528
  DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
529
         !lit->IsUndetectable());
530 531
  if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
      lit->IsFalse(isolate())) {
532
    if (false_label_ != fall_through_) __ b(false_label_);
533
  } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
534
    if (true_label_ != fall_through_) __ b(true_label_);
535 536
  } else if (lit->IsString()) {
    if (String::cast(*lit)->length() == 0) {
537
      if (false_label_ != fall_through_) __ b(false_label_);
538
    } else {
539
      if (true_label_ != fall_through_) __ b(true_label_);
540 541 542
    }
  } else if (lit->IsSmi()) {
    if (Smi::cast(*lit)->value() == 0) {
543
      if (false_label_ != fall_through_) __ b(false_label_);
544
    } else {
545
      if (true_label_ != fall_through_) __ b(true_label_);
546 547 548 549
    }
  } else {
    // For simplicity we always test the accumulator register.
    __ mov(result_register(), Operand(lit));
550
    codegen()->DoTest(this);
551 552 553 554
  }
}


555 556
void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
                                                       Register reg) const {
557
  DCHECK(count > 0);
558
  if (count > 1) codegen()->DropOperands(count - 1);
559
  __ str(reg, MemOperand(sp, 0));
560 561
}

562

563 564
void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
                                            Label* materialize_false) const {
565
  DCHECK(materialize_true == materialize_false);
566 567
  __ bind(materialize_true);
}
568

569 570 571 572 573 574 575 576 577 578 579

void FullCodeGenerator::AccumulatorValueContext::Plug(
    Label* materialize_true,
    Label* materialize_false) const {
  Label done;
  __ bind(materialize_true);
  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
  __ jmp(&done);
  __ bind(materialize_false);
  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
  __ bind(&done);
580 581 582
}


583 584 585 586 587 588 589 590 591 592
void FullCodeGenerator::StackValueContext::Plug(
    Label* materialize_true,
    Label* materialize_false) const {
  Label done;
  __ bind(materialize_true);
  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
  __ jmp(&done);
  __ bind(materialize_false);
  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
  __ bind(&done);
593
  codegen()->PushOperand(ip);
594 595 596 597 598
}


void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
                                          Label* materialize_false) const {
599 600
  DCHECK(materialize_true == true_label_);
  DCHECK(materialize_false == false_label_);
601 602 603 604 605 606 607 608 609 610 611 612 613 614
}


void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
  Heap::RootListIndex value_root_index =
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
  __ LoadRoot(result_register(), value_root_index);
}


void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
  Heap::RootListIndex value_root_index =
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
  __ LoadRoot(ip, value_root_index);
615
  codegen()->PushOperand(ip);
616 617 618 619
}


void FullCodeGenerator::TestContext::Plug(bool flag) const {
620
  codegen()->PrepareForBailoutBeforeSplit(condition(),
621 622 623
                                          true,
                                          true_label_,
                                          false_label_);
624 625 626 627
  if (flag) {
    if (true_label_ != fall_through_) __ b(true_label_);
  } else {
    if (false_label_ != fall_through_) __ b(false_label_);
628 629 630 631
  }
}


632 633
void FullCodeGenerator::DoTest(Expression* condition,
                               Label* if_true,
634 635
                               Label* if_false,
                               Label* fall_through) {
636
  Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
637
  CallIC(ic, condition->test_id());
638 639
  __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
  Split(eq, if_true, if_false, fall_through);
640
}
641 642


643
void FullCodeGenerator::Split(Condition cond,
644 645 646 647
                              Label* if_true,
                              Label* if_false,
                              Label* fall_through) {
  if (if_false == fall_through) {
648
    __ b(cond, if_true);
649
  } else if (if_true == fall_through) {
650
    __ b(NegateCondition(cond), if_false);
651
  } else {
652
    __ b(cond, if_true);
653
    __ b(if_false);
654 655 656 657
  }
}


658
MemOperand FullCodeGenerator::StackOperand(Variable* var) {
659
  DCHECK(var->IsStackAllocated());
660 661 662 663 664 665 666
  // Offset is negative because higher indexes are at lower addresses.
  int offset = -var->index() * kPointerSize;
  // Adjust by a (parameter or local) base offset.
  if (var->IsParameter()) {
    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
  } else {
    offset += JavaScriptFrameConstants::kLocal0Offset;
667
  }
668
  return MemOperand(fp, offset);
669 670 671
}


672
MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
673
  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
674 675 676
  if (var->IsContextSlot()) {
    int context_chain_length = scope()->ContextChainLength(var->scope());
    __ LoadContext(scratch, context_chain_length);
677
    return ContextMemOperand(scratch, var->index());
678 679 680 681 682 683 684
  } else {
    return StackOperand(var);
  }
}


void FullCodeGenerator::GetVar(Register dest, Variable* var) {
685
  // Use destination as scratch.
686 687
  MemOperand location = VarOperand(var, dest);
  __ ldr(dest, location);
688 689 690
}


691 692 693 694
void FullCodeGenerator::SetVar(Variable* var,
                               Register src,
                               Register scratch0,
                               Register scratch1) {
695 696 697 698
  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
  DCHECK(!scratch0.is(src));
  DCHECK(!scratch0.is(scratch1));
  DCHECK(!scratch1.is(src));
699
  MemOperand location = VarOperand(var, scratch0);
700
  __ str(src, location);
701

702
  // Emit the write barrier code if the location is in the heap.
703
  if (var->IsContextSlot()) {
704 705 706 707 708 709
    __ RecordWriteContextSlot(scratch0,
                              location.offset(),
                              src,
                              scratch1,
                              kLRHasBeenSaved,
                              kDontSaveFPRegs);
710 711 712 713
  }
}


714
void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
715 716 717 718 719 720
                                                     bool should_normalize,
                                                     Label* if_true,
                                                     Label* if_false) {
  // Only prepare for bailouts before splits if we're in a test
  // context. Otherwise, we let the Visit function deal with the
  // preparation to avoid preparing with the same AST id twice.
721
  if (!context()->IsTest()) return;
722 723 724

  Label skip;
  if (should_normalize) __ b(&skip);
725
  PrepareForBailout(expr, BailoutState::TOS_REGISTER);
726 727 728 729 730 731 732 733 734
  if (should_normalize) {
    __ LoadRoot(ip, Heap::kTrueValueRootIndex);
    __ cmp(r0, ip);
    Split(eq, if_true, if_false, NULL);
    __ bind(&skip);
  }
}


735 736 737
void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
  // The variable in the declaration always resides in the current function
  // context.
738
  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
739
  if (FLAG_debug_code) {
740 741 742
    // Check that we're not inside a with or catch context.
    __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
    __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
743
    __ Check(ne, kDeclarationInWithContext);
744
    __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
745
    __ Check(ne, kDeclarationInCatchContext);
746 747 748 749 750 751 752
  }
}


void FullCodeGenerator::VisitVariableDeclaration(
    VariableDeclaration* declaration) {
  VariableProxy* proxy = declaration->proxy();
753
  Variable* variable = proxy->var();
754
  switch (variable->location()) {
755
    case VariableLocation::UNALLOCATED: {
756
      DCHECK(!variable->binding_needs_init());
757
      globals_->Add(variable->name(), zone());
758 759 760
      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
      DCHECK(!slot.IsInvalid());
      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
761
      globals_->Add(isolate()->factory()->undefined_value(), zone());
762
      break;
763
    }
764 765
    case VariableLocation::PARAMETER:
    case VariableLocation::LOCAL:
766
      if (variable->binding_needs_init()) {
767
        Comment cmnt(masm_, "[ VariableDeclaration");
768 769
        __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
        __ str(r0, StackOperand(variable));
770
      }
771
      break;
772

773
    case VariableLocation::CONTEXT:
774
      if (variable->binding_needs_init()) {
775 776
        Comment cmnt(masm_, "[ VariableDeclaration");
        EmitDebugCheckDeclarationContext(variable);
777
        __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
778
        __ str(r0, ContextMemOperand(cp, variable->index()));
779
        // No write barrier since the_hole_value is in old space.
780
        PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
781 782
      }
      break;
783

784
    case VariableLocation::LOOKUP:
785 786
    case VariableLocation::MODULE:
      UNREACHABLE();
787 788 789 790
  }
}


791 792 793 794 795
void FullCodeGenerator::VisitFunctionDeclaration(
    FunctionDeclaration* declaration) {
  VariableProxy* proxy = declaration->proxy();
  Variable* variable = proxy->var();
  switch (variable->location()) {
796
    case VariableLocation::UNALLOCATED: {
797
      globals_->Add(variable->name(), zone());
798 799 800
      FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
      DCHECK(!slot.IsInvalid());
      globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
801
      Handle<SharedFunctionInfo> function =
802
          Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
803 804
      // Check for stack-overflow exception.
      if (function.is_null()) return SetStackOverflow();
805
      globals_->Add(function, zone());
806
      break;
807
    }
808

809 810
    case VariableLocation::PARAMETER:
    case VariableLocation::LOCAL: {
811 812 813 814 815 816
      Comment cmnt(masm_, "[ FunctionDeclaration");
      VisitForAccumulatorValue(declaration->fun());
      __ str(result_register(), StackOperand(variable));
      break;
    }

817
    case VariableLocation::CONTEXT: {
818 819 820
      Comment cmnt(masm_, "[ FunctionDeclaration");
      EmitDebugCheckDeclarationContext(variable);
      VisitForAccumulatorValue(declaration->fun());
821
      __ str(result_register(), ContextMemOperand(cp, variable->index()));
822 823 824 825 826 827 828 829 830 831
      int offset = Context::SlotOffset(variable->index());
      // We know that we have written a function, which is not a smi.
      __ RecordWriteContextSlot(cp,
                                offset,
                                result_register(),
                                r2,
                                kLRHasBeenSaved,
                                kDontSaveFPRegs,
                                EMIT_REMEMBERED_SET,
                                OMIT_SMI_CHECK);
832
      PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
833 834 835
      break;
    }

836
    case VariableLocation::LOOKUP:
837 838
    case VariableLocation::MODULE:
      UNREACHABLE();
839 840 841 842
  }
}


843
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
844
  // Call the runtime to declare the globals.
845 846
  __ mov(r1, Operand(pairs));
  __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
847 848
  __ EmitLoadTypeFeedbackVector(r2);
  __ Push(r1, r0, r2);
849
  __ CallRuntime(Runtime::kDeclareGlobals);
850 851 852 853
  // Return value is ignored.
}


854
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
855 856 857
  Comment cmnt(masm_, "[ SwitchStatement");
  Breakable nested_statement(this, stmt);
  SetStatementPosition(stmt);
858

859
  // Keep the switch value on the stack until a case matches.
860
  VisitForStackValue(stmt->tag());
861
  PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
862

863 864 865 866 867 868 869
  ZoneList<CaseClause*>* clauses = stmt->cases();
  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.

  Label next_test;  // Recycled for each test.
  // Compile all the tests with branches to their bodies.
  for (int i = 0; i < clauses->length(); i++) {
    CaseClause* clause = clauses->at(i);
870
    clause->body_target()->Unuse();
871

872 873 874 875 876 877 878 879 880 881 882
    // The default is not a test, but remember it as final fall through.
    if (clause->is_default()) {
      default_clause = clause;
      continue;
    }

    Comment cmnt(masm_, "[ Case comparison");
    __ bind(&next_test);
    next_test.Unuse();

    // Compile the label expression.
883
    VisitForAccumulatorValue(clause->label());
884

885
    // Perform the comparison as if via '==='.
886
    __ ldr(r1, MemOperand(sp, 0));  // Switch value.
887
    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
888
    JumpPatchSite patch_site(masm_);
889
    if (inline_smi_code) {
890 891
      Label slow_case;
      __ orr(r2, r1, r0);
892 893
      patch_site.EmitJumpIfNotSmi(r2, &slow_case);

894 895 896
      __ cmp(r1, r0);
      __ b(ne, &next_test);
      __ Drop(1);  // Switch value is no longer needed.
897
      __ b(clause->body_target());
898
      __ bind(&slow_case);
899 900
    }

901
    // Record position before stub call for type feedback.
902
    SetExpressionPosition(clause);
903 904
    Handle<Code> ic =
        CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
905
    CallIC(ic, clause->CompareId());
906
    patch_site.EmitPatchInfo();
907

908 909
    Label skip;
    __ b(&skip);
910
    PrepareForBailout(clause, BailoutState::TOS_REGISTER);
911 912 913 914 915 916 917
    __ LoadRoot(ip, Heap::kTrueValueRootIndex);
    __ cmp(r0, ip);
    __ b(ne, &next_test);
    __ Drop(1);
    __ jmp(clause->body_target());
    __ bind(&skip);

918
    __ cmp(r0, Operand::Zero());
919 920
    __ b(ne, &next_test);
    __ Drop(1);  // Switch value is no longer needed.
921
    __ b(clause->body_target());
922 923 924 925 926
  }

  // Discard the test value and jump to the default if present, otherwise to
  // the end of the statement.
  __ bind(&next_test);
927
  DropOperands(1);  // Switch value is no longer needed.
928
  if (default_clause == NULL) {
929
    __ b(nested_statement.break_label());
930
  } else {
931
    __ b(default_clause->body_target());
932 933 934 935 936 937
  }

  // Compile all the case bodies.
  for (int i = 0; i < clauses->length(); i++) {
    Comment cmnt(masm_, "[ Case body");
    CaseClause* clause = clauses->at(i);
938
    __ bind(clause->body_target());
939
    PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
940 941 942
    VisitStatements(clause->statements());
  }

943
  __ bind(nested_statement.break_label());
944
  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
945 946 947 948
}


void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
949
  Comment cmnt(masm_, "[ ForInStatement");
950 951
  SetStatementPosition(stmt, SKIP_BREAK);

952
  FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
953

954
  // Get the object to enumerate over.
955
  SetExpressionAsStatementPosition(stmt->enumerable());
956
  VisitForAccumulatorValue(stmt->enumerable());
957 958 959 960 961
  OperandStackDepthIncrement(5);

  Label loop, exit;
  Iteration loop_statement(this, stmt);
  increment_loop_depth();
962

963 964
  // If the object is null or undefined, skip over the loop, otherwise convert
  // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
965
  Label convert, done_convert;
966
  __ JumpIfSmi(r0, &convert);
967
  __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
968
  __ b(ge, &done_convert);
969 970 971 972
  __ CompareRoot(r0, Heap::kNullValueRootIndex);
  __ b(eq, &exit);
  __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
  __ b(eq, &exit);
973
  __ bind(&convert);
974
  __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
975
  RestoreContext();
976
  __ bind(&done_convert);
977
  PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
978 979
  __ push(r0);

980 981 982 983
  // Check cache validity in generated code. If we cannot guarantee cache
  // validity, call the runtime system to check cache validity or get the
  // property names in a fixed array. Note: Proxies never have an enum cache,
  // so will always take the slow path.
984 985
  Label call_runtime;
  __ CheckEnumCache(&call_runtime);
sgjesse@chromium.org's avatar
sgjesse@chromium.org committed
986 987 988 989 990 991

  // The enum cache is valid.  Load the map of the object being
  // iterated over and use the cache for the iteration.
  Label use_cache;
  __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
  __ b(&use_cache);
992 993

  // Get the set of properties to enumerate.
sgjesse@chromium.org's avatar
sgjesse@chromium.org committed
994
  __ bind(&call_runtime);
995
  __ push(r0);  // Duplicate the enumerable object on the stack.
996
  __ CallRuntime(Runtime::kForInEnumerate);
997
  PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
998 999 1000 1001 1002

  // If we got a map from the runtime call, we can do a fast
  // modification check. Otherwise, we got a fixed array, and we have
  // to do a slow check.
  Label fixed_array;
1003
  __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1004
  __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1005
  __ cmp(r2, ip);
1006 1007 1008
  __ b(ne, &fixed_array);

  // We got a map in register r0. Get the enumeration cache from it.
1009
  Label no_descriptors;
sgjesse@chromium.org's avatar
sgjesse@chromium.org committed
1010
  __ bind(&use_cache);
1011 1012

  __ EnumLength(r1, r0);
1013
  __ cmp(r1, Operand(Smi::kZero));
1014 1015
  __ b(eq, &no_descriptors);

1016
  __ LoadInstanceDescriptors(r0, r2);
1017 1018
  __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
  __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1019

1020
  // Set up the four remaining stack slots.
1021
  __ push(r0);  // Map.
1022
  __ mov(r0, Operand(Smi::kZero));
1023 1024 1025 1026
  // Push enumeration cache, enumeration cache length (as smi) and zero.
  __ Push(r2, r1, r0);
  __ jmp(&loop);

1027 1028 1029 1030
  __ bind(&no_descriptors);
  __ Drop(1);
  __ jmp(&exit);

1031 1032
  // We got a fixed array in register r0. Iterate through that.
  __ bind(&fixed_array);
1033

1034
  __ mov(r1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
1035
  __ Push(r1, r0);  // Smi and array
1036
  __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1037
  __ Push(r1);  // Fixed array length (as smi).
1038
  PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1039
  __ mov(r0, Operand(Smi::kZero));
1040
  __ Push(r0);  // Initial index.
1041 1042 1043

  // Generate code for doing the condition check.
  __ bind(&loop);
1044
  SetExpressionAsStatementPosition(stmt->each());
1045

1046
  // Load the current count to r0, load the length to r1.
1047
  __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1048
  __ cmp(r0, r1);  // Compare to the array length.
1049
  __ b(hs, loop_statement.break_label());
1050

1051
  // Get the current entry of the array into register r0.
1052 1053
  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
  __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1054
  __ ldr(r0, MemOperand::PointerAddressFromSmiKey(r2, r0));
1055

1056
  // Get the expected map from the stack or a smi in the
1057 1058 1059 1060
  // permanent slow case into register r2.
  __ ldr(r2, MemOperand(sp, 3 * kPointerSize));

  // Check if the expected map still matches that of the enumerable.
1061
  // If not, we may have to filter the key.
1062 1063 1064 1065 1066 1067
  Label update_each;
  __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
  __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
  __ cmp(r4, Operand(r2));
  __ b(eq, &update_each);

1068 1069
  // We need to filter the key, record slow-path here.
  int const vector_index = SmiFromSlot(slot)->value();
1070
  __ EmitLoadTypeFeedbackVector(r3);
1071
  __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1072 1073 1074
  __ str(r2, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)));

  // r0 contains the key. The receiver in r1 is the second argument to the
1075
  // ForInFilter. ForInFilter returns undefined if the receiver doesn't
1076
  // have the key or returns the name-converted key.
1077
  __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1078
  RestoreContext();
1079
  PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1080
  __ CompareRoot(result_register(), Heap::kUndefinedValueRootIndex);
1081
  __ b(eq, loop_statement.continue_label());
1082 1083

  // Update the 'each' property or variable from the possibly filtered
1084
  // entry in register r0.
1085
  __ bind(&update_each);
1086

1087
  // Perform the assignment as if via '='.
1088
  { EffectContext context(this);
1089
    EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1090
    PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1091
  }
1092

1093
  // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1094
  PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1095 1096 1097 1098 1099
  // Generate code for the body of the loop.
  Visit(stmt->body());

  // Generate code for the going to the next element by incrementing
  // the index (smi) stored on top of the stack.
1100
  __ bind(loop_statement.continue_label());
1101
  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1102 1103 1104 1105
  __ pop(r0);
  __ add(r0, r0, Operand(Smi::FromInt(1)));
  __ push(r0);

1106
  EmitBackEdgeBookkeeping(stmt, &loop);
1107
  __ b(&loop);
1108

1109
  // Remove the pointers stored on the stack.
1110
  __ bind(loop_statement.break_label());
1111
  DropOperands(5);
1112 1113

  // Exit and decrement the loop depth.
1114
  PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1115 1116
  __ bind(&exit);
  decrement_loop_depth();
1117
}
1118 1119


1120
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1121
                                          FeedbackVectorSlot slot) {
1122 1123 1124 1125
  DCHECK(NeedsHomeObject(initializer));
  __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
  __ ldr(StoreDescriptor::ValueRegister(),
         MemOperand(sp, offset * kPointerSize));
1126
  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1127 1128 1129
}


1130 1131 1132
void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
                                                     int offset,
                                                     FeedbackVectorSlot slot) {
1133 1134 1135 1136
  DCHECK(NeedsHomeObject(initializer));
  __ Move(StoreDescriptor::ReceiverRegister(), r0);
  __ ldr(StoreDescriptor::ValueRegister(),
         MemOperand(sp, offset * kPointerSize));
1137
  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1138 1139
}

1140
void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1141
                                         TypeofMode typeof_mode) {
1142
  // Record position before possible IC call.
1143
  SetExpressionPosition(proxy);
1144
  PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1145 1146
  Variable* var = proxy->var();

1147
  // Two cases: global variables and all other types of variables.
1148
  switch (var->location()) {
1149
    case VariableLocation::UNALLOCATED: {
1150
      Comment cmnt(masm_, "[ Global variable");
1151
      EmitGlobalVariableLoad(proxy, typeof_mode);
1152 1153 1154
      context()->Plug(r0);
      break;
    }
1155

1156 1157 1158
    case VariableLocation::PARAMETER:
    case VariableLocation::LOCAL:
    case VariableLocation::CONTEXT: {
1159
      DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1160 1161
      Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
                                               : "[ Stack variable");
1162
      if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1163 1164 1165
        // Throw a reference error when using an uninitialized let/const
        // binding in harmony mode.
        Label done;
1166 1167
        GetVar(r0, var);
        __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1168 1169 1170 1171 1172
        __ b(ne, &done);
        __ mov(r0, Operand(var->name()));
        __ push(r0);
        __ CallRuntime(Runtime::kThrowReferenceError);
        __ bind(&done);
1173 1174
        context()->Plug(r0);
        break;
1175
      }
1176
      context()->Plug(var);
1177 1178
      break;
    }
1179

1180
    case VariableLocation::LOOKUP:
1181 1182
    case VariableLocation::MODULE:
      UNREACHABLE();
1183 1184 1185 1186
  }
}


1187 1188
void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
  Expression* expression = (property == NULL) ? NULL : property->value();
1189 1190
  if (expression == NULL) {
    __ LoadRoot(r1, Heap::kNullValueRootIndex);
1191
    PushOperand(r1);
1192 1193
  } else {
    VisitForStackValue(expression);
1194 1195 1196 1197 1198 1199
    if (NeedsHomeObject(expression)) {
      DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
             property->kind() == ObjectLiteral::Property::SETTER);
      int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
      EmitSetHomeObject(expression, offset, property->GetSlot());
    }
1200 1201 1202 1203
  }
}


1204
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1205
  Comment cmnt(masm_, "[ ObjectLiteral");
1206

1207
  Handle<FixedArray> constant_properties = expr->constant_properties();
1208
  __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1209
  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1210
  __ mov(r1, Operand(constant_properties));
1211
  int flags = expr->ComputeFlags();
1212
  __ mov(r0, Operand(Smi::FromInt(flags)));
1213
  if (MustCreateObjectLiteralWithRuntime(expr)) {
1214
    __ Push(r3, r2, r1, r0);
1215
    __ CallRuntime(Runtime::kCreateObjectLiteral);
1216
  } else {
1217
    FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1218
    __ CallStub(&stub);
1219
    RestoreContext();
1220
  }
1221
  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1222

1223 1224
  // If result_saved is true the result is on top of the stack.  If
  // result_saved is false the result is in r0.
1225 1226
  bool result_saved = false;

1227
  AccessorTable accessor_table(zone());
arv's avatar
arv committed
1228 1229 1230 1231
  int property_index = 0;
  for (; property_index < expr->properties()->length(); property_index++) {
    ObjectLiteral::Property* property = expr->properties()->at(property_index);
    if (property->is_computed_name()) break;
1232 1233
    if (property->IsCompileTimeValue()) continue;

arv's avatar
arv committed
1234
    Literal* key = property->key()->AsLiteral();
1235 1236
    Expression* value = property->value();
    if (!result_saved) {
1237
      PushOperand(r0);  // Save result on stack
1238 1239 1240
      result_saved = true;
    }
    switch (property->kind()) {
1241 1242
      case ObjectLiteral::Property::CONSTANT:
        UNREACHABLE();
1243
      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1244
        DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1245
        // Fall through.
1246
      case ObjectLiteral::Property::COMPUTED:
1247 1248
        // It is safe to use [[Put]] here because the boilerplate already
        // contains computed properties with an uninitialized value.
1249 1250
        if (key->IsStringLiteral()) {
          DCHECK(key->IsPropertyName());
1251
          if (property->emit_store()) {
1252
            VisitForAccumulatorValue(value);
1253 1254
            DCHECK(StoreDescriptor::ValueRegister().is(r0));
            __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1255
            CallStoreIC(property->GetSlot(0), key->value());
1256
            PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1257 1258

            if (NeedsHomeObject(value)) {
1259
              EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1260
            }
1261 1262
          } else {
            VisitForEffect(value);
1263
          }
1264 1265
          break;
        }
1266 1267
        // Duplicate receiver on stack.
        __ ldr(r0, MemOperand(sp));
1268
        PushOperand(r0);
1269 1270
        VisitForStackValue(key);
        VisitForStackValue(value);
1271
        if (property->emit_store()) {
1272 1273 1274
          if (NeedsHomeObject(value)) {
            EmitSetHomeObject(value, 2, property->GetSlot());
          }
1275
          __ mov(r0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes
1276 1277
          PushOperand(r0);
          CallRuntimeWithOperands(Runtime::kSetProperty);
1278
        } else {
1279
          DropOperands(3);
1280
        }
1281
        break;
1282 1283 1284
      case ObjectLiteral::Property::PROTOTYPE:
        // Duplicate receiver on stack.
        __ ldr(r0, MemOperand(sp));
1285
        PushOperand(r0);
1286
        VisitForStackValue(value);
1287
        DCHECK(property->emit_store());
1288
        CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1289
        PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1290
                               BailoutState::NO_REGISTERS);
1291 1292
        break;

1293
      case ObjectLiteral::Property::GETTER:
1294
        if (property->emit_store()) {
1295 1296 1297
          AccessorTable::Iterator it = accessor_table.lookup(key);
          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
          it->second->getter = property;
1298
        }
1299
        break;
1300
      case ObjectLiteral::Property::SETTER:
1301
        if (property->emit_store()) {
1302 1303 1304
          AccessorTable::Iterator it = accessor_table.lookup(key);
          it->second->bailout_id = expr->GetIdForPropertySet(property_index);
          it->second->setter = property;
1305
        }
1306 1307 1308
        break;
    }
  }
1309

1310 1311 1312 1313 1314 1315
  // Emit code to define accessors, using only a single call to the runtime for
  // each pair of corresponding getters and setters.
  for (AccessorTable::Iterator it = accessor_table.begin();
       it != accessor_table.end();
       ++it) {
    __ ldr(r0, MemOperand(sp));  // Duplicate receiver.
1316
    PushOperand(r0);
1317 1318 1319 1320
    VisitForStackValue(it->first);
    EmitAccessor(it->second->getter);
    EmitAccessor(it->second->setter);
    __ mov(r0, Operand(Smi::FromInt(NONE)));
1321 1322
    PushOperand(r0);
    CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1323
    PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1324 1325
  }

arv's avatar
arv committed
1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339
  // Object literals have two parts. The "static" part on the left contains no
  // computed property names, and so we can compute its map ahead of time; see
  // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
  // starts with the first computed property name, and continues with all
  // properties to its right.  All the code from above initializes the static
  // component of the object literal, and arranges for the map of the result to
  // reflect the static order in which the keys appear. For the dynamic
  // properties, we compile them into a series of "SetOwnProperty" runtime
  // calls. This will preserve insertion order.
  for (; property_index < expr->properties()->length(); property_index++) {
    ObjectLiteral::Property* property = expr->properties()->at(property_index);

    Expression* value = property->value();
    if (!result_saved) {
1340
      PushOperand(r0);  // Save result on the stack
arv's avatar
arv committed
1341 1342 1343 1344
      result_saved = true;
    }

    __ ldr(r0, MemOperand(sp));  // Duplicate receiver.
1345
    PushOperand(r0);
arv's avatar
arv committed
1346 1347 1348 1349

    if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
      DCHECK(!property->is_computed_name());
      VisitForStackValue(value);
1350
      DCHECK(property->emit_store());
1351
      CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1352
      PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1353
                             BailoutState::NO_REGISTERS);
arv's avatar
arv committed
1354
    } else {
1355
      EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
arv's avatar
arv committed
1356
      VisitForStackValue(value);
1357 1358 1359
      if (NeedsHomeObject(value)) {
        EmitSetHomeObject(value, 2, property->GetSlot());
      }
arv's avatar
arv committed
1360 1361 1362 1363 1364 1365

      switch (property->kind()) {
        case ObjectLiteral::Property::CONSTANT:
        case ObjectLiteral::Property::MATERIALIZED_LITERAL:
        case ObjectLiteral::Property::COMPUTED:
          if (property->emit_store()) {
1366 1367 1368
            PushOperand(Smi::FromInt(NONE));
            PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
            CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1369 1370
            PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
                                   BailoutState::NO_REGISTERS);
arv's avatar
arv committed
1371
          } else {
1372
            DropOperands(3);
arv's avatar
arv committed
1373 1374 1375 1376 1377 1378 1379 1380
          }
          break;

        case ObjectLiteral::Property::PROTOTYPE:
          UNREACHABLE();
          break;

        case ObjectLiteral::Property::GETTER:
1381 1382
          PushOperand(Smi::FromInt(NONE));
          CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
arv's avatar
arv committed
1383 1384 1385
          break;

        case ObjectLiteral::Property::SETTER:
1386 1387
          PushOperand(Smi::FromInt(NONE));
          CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
arv's avatar
arv committed
1388 1389 1390 1391 1392
          break;
      }
    }
  }

1393
  if (result_saved) {
1394
    context()->PlugTOS();
1395
  } else {
1396
    context()->Plug(r0);
1397 1398 1399 1400
  }
}


1401
void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1402
  Comment cmnt(masm_, "[ ArrayLiteral");
1403

1404
  Handle<FixedArray> constant_elements = expr->constant_elements();
1405 1406
  bool has_fast_elements =
      IsFastObjectElementsKind(expr->constant_elements_kind());
1407 1408
  Handle<FixedArrayBase> constant_elements_values(
      FixedArrayBase::cast(constant_elements->get(1)));
1409

1410
  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1411
  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1412 1413 1414 1415 1416
    // If the only customer of allocation sites is transitioning, then
    // we can turn it off if we don't have anywhere else to transition to.
    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
  }

1417 1418
  __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1419
  __ mov(r1, Operand(constant_elements));
1420 1421
  if (MustCreateArrayLiteralWithRuntime(expr)) {
    __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1422
    __ Push(r3, r2, r1, r0);
1423
    __ CallRuntime(Runtime::kCreateArrayLiteral);
1424
  } else {
1425
    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1426
    __ CallStub(&stub);
1427
    RestoreContext();
1428
  }
1429
  PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1430 1431

  bool result_saved = false;  // Is the result saved to the stack?
1432 1433
  ZoneList<Expression*>* subexprs = expr->values();
  int length = subexprs->length();
1434 1435 1436

  // Emit code to evaluate all the non-constant subexpressions and to store
  // them into the newly cloned array.
1437
  for (int array_index = 0; array_index < length; array_index++) {
arv's avatar
arv committed
1438
    Expression* subexpr = subexprs->at(array_index);
1439
    DCHECK(!subexpr->IsSpread());
arv's avatar
arv committed
1440

1441 1442
    // If the subexpression is a literal or a simple materialized literal it
    // is already set in the cloned array.
1443
    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1444 1445

    if (!result_saved) {
1446
      PushOperand(r0);
1447 1448
      result_saved = true;
    }
1449
    VisitForAccumulatorValue(subexpr);
1450

1451
    __ mov(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1452
    __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1453
    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1454

1455 1456
    PrepareForBailoutForId(expr->GetIdForElement(array_index),
                           BailoutState::NO_REGISTERS);
arv's avatar
arv committed
1457 1458
  }

1459
  if (result_saved) {
1460
    context()->PlugTOS();
1461
  } else {
1462
    context()->Plug(r0);
1463 1464 1465 1466
  }
}


1467
void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1468
  DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1469

1470
  Comment cmnt(masm_, "[ Assignment");
1471

1472
  Property* property = expr->target()->AsProperty();
1473
  LhsKind assign_type = Property::GetAssignType(property);
1474 1475 1476 1477 1478 1479 1480 1481

  // Evaluate LHS expression.
  switch (assign_type) {
    case VARIABLE:
      // Nothing to do here.
      break;
    case NAMED_PROPERTY:
      if (expr->is_compound()) {
1482 1483
        // We need the receiver both on the stack and in the register.
        VisitForStackValue(property->obj());
1484
        __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1485
      } else {
1486
        VisitForStackValue(property->obj());
1487 1488
      }
      break;
1489
    case NAMED_SUPER_PROPERTY:
1490 1491
      VisitForStackValue(
          property->obj()->AsSuperPropertyReference()->this_var());
1492
      VisitForAccumulatorValue(
1493
          property->obj()->AsSuperPropertyReference()->home_object());
1494
      PushOperand(result_register());
1495 1496 1497
      if (expr->is_compound()) {
        const Register scratch = r1;
        __ ldr(scratch, MemOperand(sp, kPointerSize));
1498 1499
        PushOperand(scratch);
        PushOperand(result_register());
1500 1501
      }
      break;
1502
    case KEYED_SUPER_PROPERTY:
1503
      VisitForStackValue(
1504 1505
          property->obj()->AsSuperPropertyReference()->this_var());
      VisitForStackValue(
1506
          property->obj()->AsSuperPropertyReference()->home_object());
1507
      VisitForAccumulatorValue(property->key());
1508
      PushOperand(result_register());
1509 1510 1511
      if (expr->is_compound()) {
        const Register scratch = r1;
        __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1512
        PushOperand(scratch);
1513
        __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1514 1515
        PushOperand(scratch);
        PushOperand(result_register());
1516 1517
      }
      break;
1518
    case KEYED_PROPERTY:
1519
      if (expr->is_compound()) {
1520
        VisitForStackValue(property->obj());
1521
        VisitForStackValue(property->key());
1522
        __ ldr(LoadDescriptor::ReceiverRegister(),
1523
               MemOperand(sp, 1 * kPointerSize));
1524
        __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1525
      } else {
1526 1527
        VisitForStackValue(property->obj());
        VisitForStackValue(property->key());
1528
      }
1529 1530 1531
      break;
  }

1532 1533
  // For compound assignments we need another deoptimization point after the
  // variable/property load.
1534
  if (expr->is_compound()) {
1535 1536 1537
    { AccumulatorValueContext context(this);
      switch (assign_type) {
        case VARIABLE:
1538
          EmitVariableLoad(expr->target()->AsVariableProxy());
1539
          PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1540 1541 1542
          break;
        case NAMED_PROPERTY:
          EmitNamedPropertyLoad(property);
1543 1544
          PrepareForBailoutForId(property->LoadId(),
                                 BailoutState::TOS_REGISTER);
1545
          break;
1546 1547
        case NAMED_SUPER_PROPERTY:
          EmitNamedSuperPropertyLoad(property);
1548 1549
          PrepareForBailoutForId(property->LoadId(),
                                 BailoutState::TOS_REGISTER);
1550
          break;
1551 1552
        case KEYED_SUPER_PROPERTY:
          EmitKeyedSuperPropertyLoad(property);
1553 1554
          PrepareForBailoutForId(property->LoadId(),
                                 BailoutState::TOS_REGISTER);
1555
          break;
1556 1557
        case KEYED_PROPERTY:
          EmitKeyedPropertyLoad(property);
1558 1559
          PrepareForBailoutForId(property->LoadId(),
                                 BailoutState::TOS_REGISTER);
1560 1561
          break;
      }
1562 1563
    }

1564
    Token::Value op = expr->binary_op();
1565
    PushOperand(r0);  // Left operand goes on the stack.
1566
    VisitForAccumulatorValue(expr->value());
1567

1568
    AccumulatorValueContext context(this);
1569
    if (ShouldInlineSmiCase(op)) {
1570
      EmitInlineSmiBinaryOp(expr->binary_operation(),
1571 1572
                            op,
                            expr->target(),
1573
                            expr->value());
1574
    } else {
1575
      EmitBinaryOp(expr->binary_operation(), op);
1576
    }
1577 1578

    // Deoptimization point in case the binary operation may have side effects.
1579
    PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1580
  } else {
1581
    VisitForAccumulatorValue(expr->value());
1582 1583
  }

1584
  SetExpressionPosition(expr);
1585 1586 1587

  // Store the value.
  switch (assign_type) {
1588 1589 1590 1591
    case VARIABLE: {
      VariableProxy* proxy = expr->target()->AsVariableProxy();
      EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
                             proxy->hole_check_mode());
1592
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1593
      context()->Plug(r0);
1594
      break;
1595
    }
1596 1597 1598
    case NAMED_PROPERTY:
      EmitNamedPropertyAssignment(expr);
      break;
1599
    case NAMED_SUPER_PROPERTY:
1600 1601
      EmitNamedSuperPropertyStore(property);
      context()->Plug(r0);
1602
      break;
1603 1604 1605 1606
    case KEYED_SUPER_PROPERTY:
      EmitKeyedSuperPropertyStore(property);
      context()->Plug(r0);
      break;
1607 1608 1609 1610 1611 1612 1613
    case KEYED_PROPERTY:
      EmitKeyedPropertyAssignment(expr);
      break;
  }
}


1614
void FullCodeGenerator::VisitYield(Yield* expr) {
1615 1616
  // Resumable functions are not supported.
  UNREACHABLE();
1617 1618
}

1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637
void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
  OperandStackDepthIncrement(2);
  __ Push(reg1, reg2);
}

void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
  OperandStackDepthDecrement(2);
  __ Pop(reg1, reg2);
}

void FullCodeGenerator::EmitOperandStackDepthCheck() {
  if (FLAG_debug_code) {
    int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
                        operand_stack_depth_ * kPointerSize;
    __ sub(r0, fp, sp);
    __ cmp(r0, Operand(expected_diff));
    __ Assert(eq, kUnexpectedStackDepth);
  }
}
1638

1639
void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1640
  Label allocate, done_allocate;
1641

1642 1643
  __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate,
              NO_ALLOCATION_FLAGS);
1644
  __ b(&done_allocate);
1645

1646 1647
  __ bind(&allocate);
  __ Push(Smi::FromInt(JSIteratorResult::kSize));
1648
  __ CallRuntime(Runtime::kAllocateInNewSpace);
1649

1650
  __ bind(&done_allocate);
1651
  __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
1652
  PopOperand(r2);
1653 1654 1655
  __ LoadRoot(r3,
              done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
  __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
1656 1657 1658
  __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
  __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
  __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1659 1660
  __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
  __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
1661 1662 1663
}


1664
void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1665
                                              Token::Value op,
1666
                                              Expression* left_expr,
1667
                                              Expression* right_expr) {
1668 1669 1670 1671 1672 1673 1674 1675
  Label done, smi_case, stub_call;

  Register scratch1 = r2;
  Register scratch2 = r3;

  // Get the arguments.
  Register left = r1;
  Register right = r0;
1676
  PopOperand(left);
1677 1678 1679 1680 1681 1682 1683 1684

  // Perform combined smi check on both operands.
  __ orr(scratch1, left, Operand(right));
  STATIC_ASSERT(kSmiTag == 0);
  JumpPatchSite patch_site(masm_);
  patch_site.EmitJumpIfSmi(scratch1, &smi_case);

  __ bind(&stub_call);
1685
  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1686
  CallIC(code, expr->BinaryOperationFeedbackId());
1687
  patch_site.EmitPatchInfo();
1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702
  __ jmp(&done);

  __ bind(&smi_case);
  // Smi case. This code works the same way as the smi-smi case in the type
  // recording binary operation stub, see
  switch (op) {
    case Token::SAR:
      __ GetLeastBitsFromSmi(scratch1, right, 5);
      __ mov(right, Operand(left, ASR, scratch1));
      __ bic(right, right, Operand(kSmiTagMask));
      break;
    case Token::SHL: {
      __ SmiUntag(scratch1, left);
      __ GetLeastBitsFromSmi(scratch2, right, 5);
      __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1703
      __ TrySmiTag(right, scratch1, &stub_call);
1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730
      break;
    }
    case Token::SHR: {
      __ SmiUntag(scratch1, left);
      __ GetLeastBitsFromSmi(scratch2, right, 5);
      __ mov(scratch1, Operand(scratch1, LSR, scratch2));
      __ tst(scratch1, Operand(0xc0000000));
      __ b(ne, &stub_call);
      __ SmiTag(right, scratch1);
      break;
    }
    case Token::ADD:
      __ add(scratch1, left, Operand(right), SetCC);
      __ b(vs, &stub_call);
      __ mov(right, scratch1);
      break;
    case Token::SUB:
      __ sub(scratch1, left, Operand(right), SetCC);
      __ b(vs, &stub_call);
      __ mov(right, scratch1);
      break;
    case Token::MUL: {
      __ SmiUntag(ip, right);
      __ smull(scratch1, scratch2, left, ip);
      __ mov(ip, Operand(scratch1, ASR, 31));
      __ cmp(ip, Operand(scratch2));
      __ b(ne, &stub_call);
1731
      __ cmp(scratch1, Operand::Zero());
1732 1733 1734
      __ mov(right, Operand(scratch1), LeaveCC, ne);
      __ b(ne, &done);
      __ add(scratch2, right, Operand(left), SetCC);
1735
      __ mov(right, Operand(Smi::kZero), LeaveCC, pl);
1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753
      __ b(mi, &stub_call);
      break;
    }
    case Token::BIT_OR:
      __ orr(right, left, Operand(right));
      break;
    case Token::BIT_AND:
      __ and_(right, left, Operand(right));
      break;
    case Token::BIT_XOR:
      __ eor(right, left, Operand(right));
      break;
    default:
      UNREACHABLE();
  }

  __ bind(&done);
  context()->Plug(r0);
1754 1755
}

1756
void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1757
  PopOperand(r1);
1758
  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1759
  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
1760
  CallIC(code, expr->BinaryOperationFeedbackId());
1761
  patch_site.EmitPatchInfo();
1762
  context()->Plug(r0);
1763 1764 1765
}


1766
void FullCodeGenerator::EmitAssignment(Expression* expr,
1767
                                       FeedbackVectorSlot slot) {
1768
  DCHECK(expr->IsValidReferenceExpressionOrThis());
1769 1770

  Property* prop = expr->AsProperty();
1771
  LhsKind assign_type = Property::GetAssignType(prop);
1772 1773 1774

  switch (assign_type) {
    case VARIABLE: {
1775
      VariableProxy* proxy = expr->AsVariableProxy();
1776
      EffectContext context(this);
1777 1778
      EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
                             proxy->hole_check_mode());
1779 1780 1781
      break;
    }
    case NAMED_PROPERTY: {
1782
      PushOperand(r0);  // Preserve value.
1783
      VisitForAccumulatorValue(prop->obj());
1784
      __ Move(StoreDescriptor::ReceiverRegister(), r0);
1785
      PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
1786
      CallStoreIC(slot, prop->key()->AsLiteral()->value());
1787 1788
      break;
    }
1789
    case NAMED_SUPER_PROPERTY: {
1790
      PushOperand(r0);
1791
      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
1792
      VisitForAccumulatorValue(
1793
          prop->obj()->AsSuperPropertyReference()->home_object());
1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806
      // stack: value, this; r0: home_object
      Register scratch = r2;
      Register scratch2 = r3;
      __ mov(scratch, result_register());              // home_object
      __ ldr(r0, MemOperand(sp, kPointerSize));        // value
      __ ldr(scratch2, MemOperand(sp, 0));             // this
      __ str(scratch2, MemOperand(sp, kPointerSize));  // this
      __ str(scratch, MemOperand(sp, 0));              // home_object
      // stack: this, home_object; r0: value
      EmitNamedSuperPropertyStore(prop);
      break;
    }
    case KEYED_SUPER_PROPERTY: {
1807
      PushOperand(r0);
1808 1809
      VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
      VisitForStackValue(
1810
          prop->obj()->AsSuperPropertyReference()->home_object());
1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825
      VisitForAccumulatorValue(prop->key());
      Register scratch = r2;
      Register scratch2 = r3;
      __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize));  // value
      // stack: value, this, home_object; r0: key, r3: value
      __ ldr(scratch, MemOperand(sp, kPointerSize));  // this
      __ str(scratch, MemOperand(sp, 2 * kPointerSize));
      __ ldr(scratch, MemOperand(sp, 0));  // home_object
      __ str(scratch, MemOperand(sp, kPointerSize));
      __ str(r0, MemOperand(sp, 0));
      __ Move(r0, scratch2);
      // stack: this, home_object, key; r0: value.
      EmitKeyedSuperPropertyStore(prop);
      break;
    }
1826
    case KEYED_PROPERTY: {
1827
      PushOperand(r0);  // Preserve value.
1828 1829
      VisitForStackValue(prop->obj());
      VisitForAccumulatorValue(prop->key());
1830
      __ Move(StoreDescriptor::NameRegister(), r0);
1831 1832
      PopOperands(StoreDescriptor::ValueRegister(),
                  StoreDescriptor::ReceiverRegister());
1833
      CallKeyedStoreIC(slot);
1834 1835 1836
      break;
    }
  }
1837
  context()->Plug(r0);
1838 1839 1840
}


1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852
void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
    Variable* var, MemOperand location) {
  __ str(result_register(), location);
  if (var->IsContextSlot()) {
    // RecordWrite may destroy all its register arguments.
    __ mov(r3, result_register());
    int offset = Context::SlotOffset(var->index());
    __ RecordWriteContextSlot(
        r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
  }
}

1853
void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
1854 1855
                                               FeedbackVectorSlot slot,
                                               HoleCheckMode hole_check_mode) {
1856
  if (var->IsUnallocated()) {
1857
    // Global var, const, or let.
1858
    __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
1859
    CallStoreIC(slot, var->name());
1860

1861
  } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
1862 1863
    DCHECK(!var->IsLookupSlot());
    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1864
    MemOperand location = VarOperand(var, r1);
1865
    // Perform an initialization check for lexically declared variables.
1866
    if (hole_check_mode == HoleCheckMode::kRequired) {
1867 1868 1869 1870 1871 1872 1873 1874 1875
      Label assign;
      __ ldr(r3, location);
      __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
      __ b(ne, &assign);
      __ mov(r3, Operand(var->name()));
      __ push(r3);
      __ CallRuntime(Runtime::kThrowReferenceError);
      __ bind(&assign);
    }
1876
    if (var->mode() != CONST) {
1877
      EmitStoreToStackLocalOrContextSlot(var, location);
1878 1879
    } else if (var->throw_on_const_assignment(language_mode())) {
      __ CallRuntime(Runtime::kThrowConstAssignError);
1880
    }
1881
  } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
1882 1883 1884 1885 1886 1887 1888 1889 1890
    // Initializing assignment to const {this} needs a write barrier.
    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
    Label uninitialized_this;
    MemOperand location = VarOperand(var, r1);
    __ ldr(r3, location);
    __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
    __ b(eq, &uninitialized_this);
    __ mov(r0, Operand(var->name()));
    __ Push(r0);
1891
    __ CallRuntime(Runtime::kThrowReferenceError);
1892 1893 1894
    __ bind(&uninitialized_this);
    EmitStoreToStackLocalOrContextSlot(var, location);

1895 1896
  } else {
    DCHECK(var->mode() != CONST || op == Token::INIT);
1897 1898 1899 1900 1901 1902 1903 1904 1905 1906
    DCHECK(var->IsStackAllocated() || var->IsContextSlot());
    DCHECK(!var->IsLookupSlot());
    // Assignment to var or initializing assignment to let/const in harmony
    // mode.
    MemOperand location = VarOperand(var, r1);
    if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
      // Check for an uninitialized let binding.
      __ ldr(r2, location);
      __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
      __ Check(eq, kLetBindingReInitialization);
1907
    }
1908
    EmitStoreToStackLocalOrContextSlot(var, location);
1909
  }
1910 1911 1912
}


1913
void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1914 1915
  // Assignment to a property, using a named store IC.
  Property* prop = expr->target()->AsProperty();
1916 1917
  DCHECK(prop != NULL);
  DCHECK(prop->key()->IsLiteral());
1918

1919
  PopOperand(StoreDescriptor::ReceiverRegister());
1920
  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
1921

1922
  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1923
  context()->Plug(r0);
1924 1925 1926
}


1927
void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
1928 1929 1930 1931 1932 1933 1934
  // Assignment to named property of super.
  // r0 : value
  // stack : receiver ('this'), home_object
  DCHECK(prop != NULL);
  Literal* key = prop->key()->AsLiteral();
  DCHECK(key != NULL);

1935 1936 1937 1938 1939
  PushOperand(key->value());
  PushOperand(r0);
  CallRuntimeWithOperands(is_strict(language_mode())
                              ? Runtime::kStoreToSuper_Strict
                              : Runtime::kStoreToSuper_Sloppy);
1940 1941 1942
}


1943 1944 1945 1946 1947 1948
void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
  // Assignment to named property of super.
  // r0 : value
  // stack : receiver ('this'), home_object, key
  DCHECK(prop != NULL);

1949 1950 1951 1952
  PushOperand(r0);
  CallRuntimeWithOperands(is_strict(language_mode())
                              ? Runtime::kStoreKeyedToSuper_Strict
                              : Runtime::kStoreKeyedToSuper_Sloppy);
1953 1954 1955
}


1956
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1957
  // Assignment to a property, using a keyed store IC.
1958 1959
  PopOperands(StoreDescriptor::ReceiverRegister(),
              StoreDescriptor::NameRegister());
1960
  DCHECK(StoreDescriptor::ValueRegister().is(r0));
1961

1962
  CallKeyedStoreIC(expr->AssignmentSlot());
1963

1964
  PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1965
  context()->Plug(r0);
1966 1967
}

verwaest@chromium.org's avatar
verwaest@chromium.org committed
1968
// Code common for calls using the IC.
1969
void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
verwaest@chromium.org's avatar
verwaest@chromium.org committed
1970 1971 1972
  Expression* callee = expr->expression();

  // Get the target function.
1973
  ConvertReceiverMode convert_mode;
1974
  if (callee->IsVariableProxy()) {
verwaest@chromium.org's avatar
verwaest@chromium.org committed
1975 1976
    { StackValueContext context(this);
      EmitVariableLoad(callee->AsVariableProxy());
1977
      PrepareForBailout(callee, BailoutState::NO_REGISTERS);
verwaest@chromium.org's avatar
verwaest@chromium.org committed
1978 1979
    }
    // Push undefined as receiver. This is patched in the method prologue if it
1980
    // is a sloppy mode method.
1981
    __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1982
    PushOperand(ip);
1983
    convert_mode = ConvertReceiverMode::kNullOrUndefined;
verwaest@chromium.org's avatar
verwaest@chromium.org committed
1984 1985
  } else {
    // Load the function from the receiver.
1986
    DCHECK(callee->IsProperty());
1987
    DCHECK(!callee->AsProperty()->IsSuperAccess());
1988
    __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
verwaest@chromium.org's avatar
verwaest@chromium.org committed
1989
    EmitNamedPropertyLoad(callee->AsProperty());
1990 1991
    PrepareForBailoutForId(callee->AsProperty()->LoadId(),
                           BailoutState::TOS_REGISTER);
verwaest@chromium.org's avatar
verwaest@chromium.org committed
1992 1993
    // Push the target function under the receiver.
    __ ldr(ip, MemOperand(sp, 0));
1994
    PushOperand(ip);
verwaest@chromium.org's avatar
verwaest@chromium.org committed
1995
    __ str(r0, MemOperand(sp, kPointerSize));
1996
    convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
verwaest@chromium.org's avatar
verwaest@chromium.org committed
1997 1998
  }

1999
  EmitCall(expr, convert_mode);
2000 2001 2002
}


2003 2004 2005 2006 2007
void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
  Expression* callee = expr->expression();
  DCHECK(callee->IsProperty());
  Property* prop = callee->AsProperty();
  DCHECK(prop->IsSuperAccess());
2008
  SetExpressionPosition(prop);
2009 2010 2011 2012 2013

  Literal* key = prop->key()->AsLiteral();
  DCHECK(!key->value()->IsSmi());
  // Load the function from the receiver.
  const Register scratch = r1;
2014
  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2015
  VisitForStackValue(super_ref->home_object());
2016
  VisitForAccumulatorValue(super_ref->this_var());
2017 2018
  PushOperand(r0);
  PushOperand(r0);
2019
  __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2020 2021
  PushOperand(scratch);
  PushOperand(key->value());
2022 2023 2024 2025

  // Stack here:
  //  - home_object
  //  - this (receiver)
2026 2027
  //  - this (receiver) <-- LoadFromSuper will pop here and below.
  //  - home_object
2028
  //  - key
2029
  CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2030
  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2031 2032 2033 2034 2035 2036 2037

  // Replace home_object with target function.
  __ str(r0, MemOperand(sp, kPointerSize));

  // Stack here:
  // - target function
  // - this (receiver)
2038
  EmitCall(expr);
2039 2040 2041
}


verwaest@chromium.org's avatar
verwaest@chromium.org committed
2042
// Code common for calls using the IC.
2043 2044
void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
                                                Expression* key) {
2045 2046 2047
  // Load the key.
  VisitForAccumulatorValue(key);

verwaest@chromium.org's avatar
verwaest@chromium.org committed
2048 2049 2050
  Expression* callee = expr->expression();

  // Load the function from the receiver.
2051
  DCHECK(callee->IsProperty());
2052 2053
  __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
  __ Move(LoadDescriptor::NameRegister(), r0);
verwaest@chromium.org's avatar
verwaest@chromium.org committed
2054
  EmitKeyedPropertyLoad(callee->AsProperty());
2055 2056
  PrepareForBailoutForId(callee->AsProperty()->LoadId(),
                         BailoutState::TOS_REGISTER);
verwaest@chromium.org's avatar
verwaest@chromium.org committed
2057 2058 2059

  // Push the target function under the receiver.
  __ ldr(ip, MemOperand(sp, 0));
2060
  PushOperand(ip);
verwaest@chromium.org's avatar
verwaest@chromium.org committed
2061 2062
  __ str(r0, MemOperand(sp, kPointerSize));

2063
  EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2064 2065 2066
}


2067 2068 2069 2070 2071 2072
void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
  Expression* callee = expr->expression();
  DCHECK(callee->IsProperty());
  Property* prop = callee->AsProperty();
  DCHECK(prop->IsSuperAccess());

2073
  SetExpressionPosition(prop);
2074 2075
  // Load the function from the receiver.
  const Register scratch = r1;
2076
  SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2077
  VisitForStackValue(super_ref->home_object());
2078
  VisitForAccumulatorValue(super_ref->this_var());
2079 2080
  PushOperand(r0);
  PushOperand(r0);
2081
  __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2082
  PushOperand(scratch);
2083 2084 2085 2086 2087 2088 2089 2090
  VisitForStackValue(prop->key());

  // Stack here:
  //  - home_object
  //  - this (receiver)
  //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
  //  - home_object
  //  - key
2091
  CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2092
  PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2093 2094 2095 2096 2097 2098 2099

  // Replace home_object with target function.
  __ str(r0, MemOperand(sp, kPointerSize));

  // Stack here:
  // - target function
  // - this (receiver)
2100
  EmitCall(expr);
2101 2102 2103
}


2104
void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2105
  // Load the arguments.
2106 2107
  ZoneList<Expression*>* args = expr->arguments();
  int arg_count = args->length();
2108 2109
  for (int i = 0; i < arg_count; i++) {
    VisitForStackValue(args->at(i));
2110
  }
2111

2112
  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2113
  SetCallPosition(expr, expr->tail_call_mode());
2114 2115 2116 2117 2118 2119 2120 2121
  if (expr->tail_call_mode() == TailCallMode::kAllow) {
    if (FLAG_trace) {
      __ CallRuntime(Runtime::kTraceTailCall);
    }
    // Update profiling counters before the tail call since we will
    // not return to this function.
    EmitProfilingCounterHandlingForReturnSequence(true);
  }
2122
  Handle<Code> code =
2123
      CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
2124
  __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2125
  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2126
  __ mov(r0, Operand(arg_count));
2127
  CallIC(code);
2128
  OperandStackDepthDecrement(arg_count + 1);
2129

2130
  RecordJSReturnSite(expr);
2131
  RestoreContext();
2132
  context()->DropAndPlug(1, r0);
2133 2134
}

2135
void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2136 2137 2138 2139
  Comment cmnt(masm_, "[ CallNew");
  // According to ECMA-262, section 11.2.2, page 44, the function
  // expression in new calls must be evaluated before the
  // arguments.
2140 2141 2142 2143

  // Push constructor on the stack.  If it's not a function it's used as
  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
  // ignored.
2144
  DCHECK(!expr->expression()->IsSuperPropertyReference());
2145
  VisitForStackValue(expr->expression());
2146 2147

  // Push the arguments ("left-to-right") on the stack.
2148
  ZoneList<Expression*>* args = expr->arguments();
2149 2150
  int arg_count = args->length();
  for (int i = 0; i < arg_count; i++) {
2151
    VisitForStackValue(args->at(i));
2152 2153 2154 2155
  }

  // Call the construct call builtin that handles allocation and
  // constructor invocation.
2156
  SetConstructCallPosition(expr);
2157

2158
  // Load function and argument count into r1 and r0.
2159
  __ mov(r0, Operand(arg_count));
2160
  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2161

2162
  // Record call targets in unoptimized code.
2163
  __ EmitLoadTypeFeedbackVector(r2);
2164
  __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2165

2166
  CallConstructStub stub(isolate());
2167
  CallIC(stub.GetCode());
2168
  OperandStackDepthDecrement(arg_count + 1);
2169
  PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2170
  RestoreContext();
2171
  context()->Plug(r0);
2172 2173 2174
}


2175 2176
void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2177
  DCHECK(args->length() == 1);
2178

2179
  VisitForAccumulatorValue(args->at(0));
2180 2181 2182 2183

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
2184
  Label* fall_through = NULL;
2185 2186
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);
2187

2188
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2189
  __ SmiTst(r0);
2190
  Split(eq, if_true, if_false, fall_through);
2191

2192
  context()->Plug(if_true, if_false);
2193 2194 2195
}


2196
void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2197
  ZoneList<Expression*>* args = expr->arguments();
2198
  DCHECK(args->length() == 1);
2199

2200
  VisitForAccumulatorValue(args->at(0));
2201 2202 2203 2204

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
2205
  Label* fall_through = NULL;
2206 2207
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);
2208

2209
  __ JumpIfSmi(r0, if_false);
2210
  __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
2211
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2212
  Split(ge, if_true, if_false, fall_through);
2213

2214
  context()->Plug(if_true, if_false);
2215 2216 2217
}


2218 2219
void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2220
  DCHECK(args->length() == 1);
2221

2222
  VisitForAccumulatorValue(args->at(0));
2223 2224 2225 2226

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
2227
  Label* fall_through = NULL;
2228 2229
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);
2230

2231
  __ JumpIfSmi(r0, if_false);
2232
  __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2233
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2234
  Split(eq, if_true, if_false, fall_through);
2235

2236
  context()->Plug(if_true, if_false);
2237 2238 2239
}


2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261
void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK(args->length() == 1);

  VisitForAccumulatorValue(args->at(0));

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
                         &if_false, &fall_through);

  __ JumpIfSmi(r0, if_false);
  __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
  Split(eq, if_true, if_false, fall_through);

  context()->Plug(if_true, if_false);
}


2262 2263
void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2264
  DCHECK(args->length() == 1);
2265

2266
  VisitForAccumulatorValue(args->at(0));
2267 2268 2269 2270

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
2271
  Label* fall_through = NULL;
2272 2273
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);
2274

2275
  __ JumpIfSmi(r0, if_false);
2276
  __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2277
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2278
  Split(eq, if_true, if_false, fall_through);
2279

2280
  context()->Plug(if_true, if_false);
2281 2282 2283
}


2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297
void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK(args->length() == 1);

  VisitForAccumulatorValue(args->at(0));

  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
                         &if_false, &fall_through);

  __ JumpIfSmi(r0, if_false);
2298
  __ CompareObjectType(r0, r1, r1, JS_PROXY_TYPE);
2299
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2300
  Split(eq, if_true, if_false, fall_through);
2301 2302 2303 2304

  context()->Plug(if_true, if_false);
}

2305

2306 2307
void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2308
  DCHECK(args->length() == 1);
2309 2310
  Label done, null, function, non_function_constructor;

2311
  VisitForAccumulatorValue(args->at(0));
2312

2313
  // If the object is not a JSReceiver, we return null.
2314
  __ JumpIfSmi(r0, &null);
2315
  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2316
  __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
2317
  // Map is now in r0.
2318
  __ b(lt, &null);
2319

2320 2321 2322 2323
  // Return 'Function' for JSFunction and JSBoundFunction objects.
  __ cmp(r1, Operand(FIRST_FUNCTION_TYPE));
  STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
  __ b(hs, &function);
2324 2325

  // Check if the constructor in the map is a JS function.
2326 2327 2328
  Register instance_type = r2;
  __ GetMapConstructor(r0, r0, r1, instance_type);
  __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
2329 2330 2331 2332 2333 2334 2335 2336 2337 2338
  __ b(ne, &non_function_constructor);

  // r0 now contains the constructor function. Grab the
  // instance class name from there.
  __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
  __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
  __ b(&done);

  // Functions have class 'Function'.
  __ bind(&function);
2339
  __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
2340 2341 2342 2343
  __ jmp(&done);

  // Objects with a non-function constructor have class 'Object'.
  __ bind(&non_function_constructor);
2344
  __ LoadRoot(r0, Heap::kObject_stringRootIndex);
2345 2346 2347 2348 2349 2350 2351 2352 2353
  __ jmp(&done);

  // Non-JS objects have class null.
  __ bind(&null);
  __ LoadRoot(r0, Heap::kNullValueRootIndex);

  // All done.
  __ bind(&done);

2354
  context()->Plug(r0);
2355 2356 2357
}


2358 2359
void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
2360
  DCHECK(args->length() == 2);
2361 2362
  VisitForStackValue(args->at(0));
  VisitForAccumulatorValue(args->at(1));
2363 2364 2365 2366 2367

  Register object = r1;
  Register index = r0;
  Register result = r3;

2368
  PopOperand(object);
2369 2370 2371 2372

  Label need_conversion;
  Label index_out_of_range;
  Label done;
2373 2374
  StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
                                      &need_conversion, &index_out_of_range);
2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388 2389 2390
  generator.GenerateFast(masm_);
  __ jmp(&done);

  __ bind(&index_out_of_range);
  // When the index is out of range, the spec requires us to return
  // NaN.
  __ LoadRoot(result, Heap::kNanValueRootIndex);
  __ jmp(&done);

  __ bind(&need_conversion);
  // Load the undefined value into the result register, which will
  // trigger conversion.
  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
  __ jmp(&done);

  NopRuntimeCallHelper call_helper;
2391
  generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2392 2393

  __ bind(&done);
2394
  context()->Plug(result);
2395 2396 2397
}


2398 2399 2400 2401 2402 2403 2404
void FullCodeGenerator::EmitCall(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK_LE(2, args->length());
  // Push target, receiver and arguments onto the stack.
  for (Expression* const arg : *args) {
    VisitForStackValue(arg);
  }
2405
  PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2406 2407 2408 2409 2410 2411
  // Move target to r1.
  int const argc = args->length() - 2;
  __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
  // Call the target.
  __ mov(r0, Operand(argc));
  __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2412
  OperandStackDepthDecrement(argc + 1);
2413
  RestoreContext();
2414 2415 2416 2417
  // Discard the function left on TOS.
  context()->DropAndPlug(1, r0);
}

2418 2419 2420 2421 2422 2423 2424 2425 2426 2427
void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK_EQ(1, args->length());
  VisitForAccumulatorValue(args->at(0));
  __ AssertFunction(r0);
  __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
  __ ldr(r0, FieldMemOperand(r0, Map::kPrototypeOffset));
  context()->Plug(r0);
}

2428
void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2429
  DCHECK(expr->arguments()->length() == 0);
2430 2431 2432 2433 2434 2435 2436 2437 2438
  ExternalReference debug_is_active =
      ExternalReference::debug_is_active_address(isolate());
  __ mov(ip, Operand(debug_is_active));
  __ ldrb(r0, MemOperand(ip));
  __ SmiTag(r0);
  context()->Plug(r0);
}


2439 2440 2441 2442 2443 2444 2445 2446
void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  DCHECK_EQ(2, args->length());
  VisitForStackValue(args->at(0));
  VisitForStackValue(args->at(1));

  Label runtime, done;

2447 2448
  __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime,
              NO_ALLOCATION_FLAGS);
2449
  __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461
  __ pop(r3);
  __ pop(r2);
  __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
  __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
  __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
  __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
  __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
  __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
  __ b(&done);

  __ bind(&runtime);
2462
  CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2463 2464 2465 2466 2467 2468

  __ bind(&done);
  context()->Plug(r0);
}


2469
void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2470 2471 2472 2473
  // Push function.
  __ LoadNativeContextSlot(expr->context_index(), r0);
  PushOperand(r0);

2474 2475
  // Push undefined as the receiver.
  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2476
  PushOperand(r0);
2477 2478 2479 2480 2481 2482 2483
}


void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
  ZoneList<Expression*>* args = expr->arguments();
  int arg_count = args->length();

2484
  SetCallPosition(expr);
2485
  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2486
  __ mov(r0, Operand(arg_count));
2487 2488
  __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
          RelocInfo::CODE_TARGET);
2489
  OperandStackDepthDecrement(arg_count + 1);
2490
  RestoreContext();
2491 2492 2493
}


2494
void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2495
  switch (expr->op()) {
2496 2497
    case Token::DELETE: {
      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2498 2499
      Property* property = expr->expression()->AsProperty();
      VariableProxy* proxy = expr->expression()->AsVariableProxy();
2500

2501 2502 2503
      if (property != NULL) {
        VisitForStackValue(property->obj());
        VisitForStackValue(property->key());
2504 2505 2506
        CallRuntimeWithOperands(is_strict(language_mode())
                                    ? Runtime::kDeleteProperty_Strict
                                    : Runtime::kDeleteProperty_Sloppy);
2507
        context()->Plug(r0);
2508 2509
      } else if (proxy != NULL) {
        Variable* var = proxy->var();
2510 2511
        // Delete of an unqualified identifier is disallowed in strict mode but
        // "delete this" is allowed.
2512
        bool is_this = var->is_this();
2513
        DCHECK(is_sloppy(language_mode()) || is_this);
2514
        if (var->IsUnallocated()) {
2515
          __ LoadGlobalObject(r2);
2516
          __ mov(r1, Operand(var->name()));
2517
          __ Push(r2, r1);
2518
          __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2519
          context()->Plug(r0);
2520 2521 2522
        } else {
          DCHECK(!var->IsLookupSlot());
          DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2523 2524
          // Result of deleting non-global, non-dynamic variables is false.
          // The subexpression does not have side effects.
2525
          context()->Plug(is_this);
2526
        }
2527
      } else {
2528 2529 2530 2531
        // Result of deleting non-property, non-variable reference is true.
        // The subexpression may have side effects.
        VisitForEffect(expr->expression());
        context()->Plug(true);
2532 2533 2534 2535
      }
      break;
    }

2536 2537
    case Token::VOID: {
      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2538
      VisitForEffect(expr->expression());
2539
      context()->Plug(Heap::kUndefinedValueRootIndex);
2540
      break;
2541
    }
2542

2543
    case Token::NOT: {
2544
      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2545 2546 2547 2548
      if (context()->IsEffect()) {
        // Unary NOT has no side effects so it's only necessary to visit the
        // subexpression.  Match the optimizing compiler by not branching.
        VisitForEffect(expr->expression());
2549 2550 2551 2552 2553 2554 2555 2556
      } else if (context()->IsTest()) {
        const TestContext* test = TestContext::cast(context());
        // The labels are swapped for the recursive call.
        VisitForControl(expr->expression(),
                        test->false_label(),
                        test->true_label(),
                        test->fall_through());
        context()->Plug(test->true_label(), test->false_label());
2557
      } else {
2558 2559 2560 2561
        // We handle value contexts explicitly rather than simply visiting
        // for control and plugging the control flow into the context,
        // because we need to prepare a pair of extra administrative AST ids
        // for the optimizing compiler.
2562
        DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2563 2564 2565 2566 2567
        Label materialize_true, materialize_false, done;
        VisitForControl(expr->expression(),
                        &materialize_false,
                        &materialize_true,
                        &materialize_true);
2568
        if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2569
        __ bind(&materialize_true);
2570 2571
        PrepareForBailoutForId(expr->MaterializeTrueId(),
                               BailoutState::NO_REGISTERS);
2572 2573 2574 2575
        __ LoadRoot(r0, Heap::kTrueValueRootIndex);
        if (context()->IsStackValue()) __ push(r0);
        __ jmp(&done);
        __ bind(&materialize_false);
2576 2577
        PrepareForBailoutForId(expr->MaterializeFalseId(),
                               BailoutState::NO_REGISTERS);
2578 2579 2580
        __ LoadRoot(r0, Heap::kFalseValueRootIndex);
        if (context()->IsStackValue()) __ push(r0);
        __ bind(&done);
2581
      }
2582 2583 2584
      break;
    }

2585 2586
    case Token::TYPEOF: {
      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2587 2588
      {
        AccumulatorValueContext context(this);
2589 2590
        VisitForTypeofValue(expr->expression());
      }
2591
      __ mov(r3, r0);
2592
      __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
2593
      context()->Plug(r0);
2594 2595 2596
      break;
    }

2597 2598 2599 2600 2601 2602
    default:
      UNREACHABLE();
  }
}


2603
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2604
  DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
2605

2606
  Comment cmnt(masm_, "[ CountOperation");
2607

2608
  Property* prop = expr->expression()->AsProperty();
2609
  LhsKind assign_type = Property::GetAssignType(prop);
2610 2611 2612

  // Evaluate expression and get value.
  if (assign_type == VARIABLE) {
2613
    DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
2614
    AccumulatorValueContext context(this);
2615
    EmitVariableLoad(expr->expression()->AsVariableProxy());
2616 2617
  } else {
    // Reserve space for result of postfix operation.
2618
    if (expr->is_postfix() && !context()->IsEffect()) {
2619
      __ mov(ip, Operand(Smi::kZero));
2620
      PushOperand(ip);
2621
    }
2622 2623 2624 2625 2626 2627 2628 2629 2630 2631
    switch (assign_type) {
      case NAMED_PROPERTY: {
        // Put the object both on the stack and in the register.
        VisitForStackValue(prop->obj());
        __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
        EmitNamedPropertyLoad(prop);
        break;
      }

      case NAMED_SUPER_PROPERTY: {
2632
        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2633
        VisitForAccumulatorValue(
2634
            prop->obj()->AsSuperPropertyReference()->home_object());
2635
        PushOperand(result_register());
2636 2637
        const Register scratch = r1;
        __ ldr(scratch, MemOperand(sp, kPointerSize));
2638 2639
        PushOperand(scratch);
        PushOperand(result_register());
2640 2641 2642 2643 2644
        EmitNamedSuperPropertyLoad(prop);
        break;
      }

      case KEYED_SUPER_PROPERTY: {
2645 2646
        VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
        VisitForStackValue(
2647
            prop->obj()->AsSuperPropertyReference()->home_object());
2648
        VisitForAccumulatorValue(prop->key());
2649
        PushOperand(result_register());
2650 2651
        const Register scratch = r1;
        __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
2652
        PushOperand(scratch);
2653
        __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
2654 2655
        PushOperand(scratch);
        PushOperand(result_register());
2656 2657 2658 2659 2660 2661 2662 2663 2664 2665 2666 2667 2668 2669 2670 2671
        EmitKeyedSuperPropertyLoad(prop);
        break;
      }

      case KEYED_PROPERTY: {
        VisitForStackValue(prop->obj());
        VisitForStackValue(prop->key());
        __ ldr(LoadDescriptor::ReceiverRegister(),
               MemOperand(sp, 1 * kPointerSize));
        __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
        EmitKeyedPropertyLoad(prop);
        break;
      }

      case VARIABLE:
        UNREACHABLE();
2672 2673 2674
    }
  }

2675 2676
  // We need a second deoptimization point after loading the value
  // in case evaluating the property load my have a side effect.
2677
  if (assign_type == VARIABLE) {
2678
    PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
2679
  } else {
2680
    PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2681
  }
2682

2683 2684 2685 2686 2687
  // Inline smi case if we are in a loop.
  Label stub_call, done;
  JumpPatchSite patch_site(masm_);

  int count_value = expr->op() == Token::INC ? 1 : -1;
2688
  if (ShouldInlineSmiCase(expr->op())) {
2689 2690 2691 2692 2693 2694 2695 2696 2697 2698 2699 2700 2701 2702 2703 2704
    Label slow;
    patch_site.EmitJumpIfNotSmi(r0, &slow);

    // Save result for postfix expressions.
    if (expr->is_postfix()) {
      if (!context()->IsEffect()) {
        // Save the result on the stack. If we have a named or keyed property
        // we store the result under the receiver that is currently on top
        // of the stack.
        switch (assign_type) {
          case VARIABLE:
            __ push(r0);
            break;
          case NAMED_PROPERTY:
            __ str(r0, MemOperand(sp, kPointerSize));
            break;
2705 2706 2707
          case NAMED_SUPER_PROPERTY:
            __ str(r0, MemOperand(sp, 2 * kPointerSize));
            break;
2708 2709 2710
          case KEYED_PROPERTY:
            __ str(r0, MemOperand(sp, 2 * kPointerSize));
            break;
2711 2712 2713
          case KEYED_SUPER_PROPERTY:
            __ str(r0, MemOperand(sp, 3 * kPointerSize));
            break;
2714 2715 2716 2717 2718 2719 2720 2721 2722 2723
        }
      }
    }

    __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
    __ b(vc, &done);
    // Call stub. Undo operation first.
    __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
    __ jmp(&stub_call);
    __ bind(&slow);
2724
  }
2725 2726

  // Convert old value into a number.
2727
  __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
2728
  RestoreContext();
2729
  PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
2730

2731 2732
  // Save result for postfix expressions.
  if (expr->is_postfix()) {
2733 2734 2735 2736 2737 2738
    if (!context()->IsEffect()) {
      // Save the result on the stack. If we have a named or keyed property
      // we store the result under the receiver that is currently on top
      // of the stack.
      switch (assign_type) {
        case VARIABLE:
2739
          PushOperand(r0);
2740 2741 2742 2743
          break;
        case NAMED_PROPERTY:
          __ str(r0, MemOperand(sp, kPointerSize));
          break;
2744 2745 2746
        case NAMED_SUPER_PROPERTY:
          __ str(r0, MemOperand(sp, 2 * kPointerSize));
          break;
2747 2748 2749
        case KEYED_PROPERTY:
          __ str(r0, MemOperand(sp, 2 * kPointerSize));
          break;
2750 2751 2752
        case KEYED_SUPER_PROPERTY:
          __ str(r0, MemOperand(sp, 3 * kPointerSize));
          break;
2753
      }
2754
    }
2755
  }
2756

2757

2758
  __ bind(&stub_call);
2759 2760
  __ mov(r1, r0);
  __ mov(r0, Operand(Smi::FromInt(count_value)));
2761

2762
  SetExpressionPosition(expr);
2763

2764
  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
2765
  CallIC(code, expr->CountBinOpFeedbackId());
2766
  patch_site.EmitPatchInfo();
2767
  __ bind(&done);
2768

2769 2770
  // Store the value returned in r0.
  switch (assign_type) {
2771 2772
    case VARIABLE: {
      VariableProxy* proxy = expr->expression()->AsVariableProxy();
2773
      if (expr->is_postfix()) {
2774
        { EffectContext context(this);
2775 2776
          EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
                                 proxy->hole_check_mode());
2777 2778
          PrepareForBailoutForId(expr->AssignmentId(),
                                 BailoutState::TOS_REGISTER);
2779
          context.Plug(r0);
2780 2781
        }
        // For all contexts except EffectConstant We have the result on
2782
        // top of the stack.
2783 2784
        if (!context()->IsEffect()) {
          context()->PlugTOS();
2785 2786
        }
      } else {
2787 2788
        EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
                               proxy->hole_check_mode());
2789 2790
        PrepareForBailoutForId(expr->AssignmentId(),
                               BailoutState::TOS_REGISTER);
2791
        context()->Plug(r0);
2792
      }
2793
      break;
2794
    }
2795
    case NAMED_PROPERTY: {
2796
      PopOperand(StoreDescriptor::ReceiverRegister());
2797
      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
2798
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2799
      if (expr->is_postfix()) {
2800 2801
        if (!context()->IsEffect()) {
          context()->PlugTOS();
2802 2803
        }
      } else {
2804
        context()->Plug(r0);
2805
      }
2806 2807
      break;
    }
2808 2809
    case NAMED_SUPER_PROPERTY: {
      EmitNamedSuperPropertyStore(prop);
2810
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2811 2812 2813 2814 2815 2816 2817 2818 2819
      if (expr->is_postfix()) {
        if (!context()->IsEffect()) {
          context()->PlugTOS();
        }
      } else {
        context()->Plug(r0);
      }
      break;
    }
2820 2821
    case KEYED_SUPER_PROPERTY: {
      EmitKeyedSuperPropertyStore(prop);
2822
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2823 2824 2825 2826 2827 2828 2829 2830 2831
      if (expr->is_postfix()) {
        if (!context()->IsEffect()) {
          context()->PlugTOS();
        }
      } else {
        context()->Plug(r0);
      }
      break;
    }
2832
    case KEYED_PROPERTY: {
2833 2834
      PopOperands(StoreDescriptor::ReceiverRegister(),
                  StoreDescriptor::NameRegister());
2835
      CallKeyedStoreIC(expr->CountSlot());
2836
      PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2837
      if (expr->is_postfix()) {
2838 2839
        if (!context()->IsEffect()) {
          context()->PlugTOS();
2840 2841
        }
      } else {
2842
        context()->Plug(r0);
2843
      }
2844 2845 2846 2847 2848 2849
      break;
    }
  }
}


2850
void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
2851
                                                 Expression* sub_expr,
2852 2853 2854 2855 2856 2857 2858 2859
                                                 Handle<String> check) {
  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

2860
  { AccumulatorValueContext context(this);
2861
    VisitForTypeofValue(sub_expr);
2862
  }
2863
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2864

2865 2866
  Factory* factory = isolate()->factory();
  if (String::Equals(check, factory->number_string())) {
2867
    __ JumpIfSmi(r0, if_true);
2868 2869 2870 2871
    __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
    __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
    __ cmp(r0, ip);
    Split(eq, if_true, if_false, fall_through);
2872
  } else if (String::Equals(check, factory->string_string())) {
2873 2874
    __ JumpIfSmi(r0, if_false);
    __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
2875
    Split(lt, if_true, if_false, fall_through);
2876
  } else if (String::Equals(check, factory->symbol_string())) {
2877 2878 2879
    __ JumpIfSmi(r0, if_false);
    __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
    Split(eq, if_true, if_false, fall_through);
2880
  } else if (String::Equals(check, factory->boolean_string())) {
2881
    __ CompareRoot(r0, Heap::kTrueValueRootIndex);
2882
    __ b(eq, if_true);
2883
    __ CompareRoot(r0, Heap::kFalseValueRootIndex);
2884
    Split(eq, if_true, if_false, fall_through);
2885
  } else if (String::Equals(check, factory->undefined_string())) {
2886 2887
    __ CompareRoot(r0, Heap::kNullValueRootIndex);
    __ b(eq, if_false);
2888
    __ JumpIfSmi(r0, if_false);
2889 2890 2891
    // Check for undetectable objects => true.
    __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
2892 2893 2894
    __ tst(r1, Operand(1 << Map::kIsUndetectable));
    Split(ne, if_true, if_false, fall_through);

2895
  } else if (String::Equals(check, factory->function_string())) {
2896
    __ JumpIfSmi(r0, if_false);
2897 2898 2899 2900 2901
    __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
    __ and_(r1, r1,
            Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
    __ cmp(r1, Operand(1 << Map::kIsCallable));
2902
    Split(eq, if_true, if_false, fall_through);
2903
  } else if (String::Equals(check, factory->object_string())) {
2904
    __ JumpIfSmi(r0, if_false);
2905 2906
    __ CompareRoot(r0, Heap::kNullValueRootIndex);
    __ b(eq, if_true);
2907 2908
    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
2909
    __ b(lt, if_false);
2910
    // Check for callable or undetectable objects => false.
2911
    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
2912
    __ tst(r1, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2913
    Split(eq, if_true, if_false, fall_through);
2914 2915 2916 2917 2918 2919 2920 2921 2922 2923
// clang-format off
#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
  } else if (String::Equals(check, factory->type##_string())) { \
    __ JumpIfSmi(r0, if_false);                                 \
    __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));    \
    __ CompareRoot(r0, Heap::k##Type##MapRootIndex);            \
    Split(eq, if_true, if_false, fall_through);
  SIMD128_TYPES(SIMD128_TYPE)
#undef SIMD128_TYPE
    // clang-format on
2924 2925
  } else {
    if (if_false != fall_through) __ jmp(if_false);
2926
  }
2927
  context()->Plug(if_true, if_false);
2928 2929 2930
}


2931
void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
2932
  Comment cmnt(masm_, "[ CompareOperation");
2933

2934 2935 2936 2937
  // First we try a fast inlined version of the compare when one of
  // the operands is a literal.
  if (TryLiteralCompare(expr)) return;

2938 2939
  // Always perform the comparison for its control flow.  Pack the result
  // into the expression's context after the comparison is performed.
2940 2941 2942
  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
2943
  Label* fall_through = NULL;
2944 2945
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);
2946

2947
  Token::Value op = expr->op();
2948
  VisitForStackValue(expr->left());
2949
  switch (op) {
2950
    case Token::IN:
2951
      VisitForStackValue(expr->right());
2952
      SetExpressionPosition(expr);
2953
      EmitHasProperty();
2954
      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
2955
      __ CompareRoot(r0, Heap::kTrueValueRootIndex);
2956
      Split(eq, if_true, if_false, fall_through);
2957 2958 2959
      break;

    case Token::INSTANCEOF: {
2960
      VisitForAccumulatorValue(expr->right());
2961
      SetExpressionPosition(expr);
2962
      PopOperand(r1);
2963
      __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
2964 2965
      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
      __ CompareRoot(r0, Heap::kTrueValueRootIndex);
2966
      Split(eq, if_true, if_false, fall_through);
2967 2968 2969 2970
      break;
    }

    default: {
2971
      VisitForAccumulatorValue(expr->right());
2972
      SetExpressionPosition(expr);
2973
      Condition cond = CompareIC::ComputeCondition(op);
2974
      PopOperand(r1);
2975

2976
      bool inline_smi_code = ShouldInlineSmiCase(op);
2977
      JumpPatchSite patch_site(masm_);
2978
      if (inline_smi_code) {
2979 2980
        Label slow_case;
        __ orr(r2, r0, Operand(r1));
2981
        patch_site.EmitJumpIfNotSmi(r2, &slow_case);
2982
        __ cmp(r1, r0);
2983
        Split(cond, if_true, if_false, NULL);
2984 2985
        __ bind(&slow_case);
      }
2986

2987
      Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2988
      CallIC(ic, expr->CompareOperationFeedbackId());
2989
      patch_site.EmitPatchInfo();
2990
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2991
      __ cmp(r0, Operand::Zero());
2992
      Split(cond, if_true, if_false, fall_through);
2993 2994 2995
    }
  }

2996 2997
  // Convert the result of the comparison into one expected for this
  // expression's context.
2998
  context()->Plug(if_true, if_false);
2999 3000
}

3001

3002 3003 3004 3005 3006 3007 3008 3009 3010 3011 3012
void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
                                              Expression* sub_expr,
                                              NilValue nil) {
  Label materialize_true, materialize_false;
  Label* if_true = NULL;
  Label* if_false = NULL;
  Label* fall_through = NULL;
  context()->PrepareTest(&materialize_true, &materialize_false,
                         &if_true, &if_false, &fall_through);

  VisitForAccumulatorValue(sub_expr);
3013
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3014
  if (expr->op() == Token::EQ_STRICT) {
3015 3016 3017 3018
    Heap::RootListIndex nil_value = nil == kNullValue ?
        Heap::kNullValueRootIndex :
        Heap::kUndefinedValueRootIndex;
    __ LoadRoot(r1, nil_value);
3019
    __ cmp(r0, r1);
3020
    Split(eq, if_true, if_false, fall_through);
3021
  } else {
3022 3023 3024 3025 3026
    __ JumpIfSmi(r0, if_false);
    __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
    __ tst(r1, Operand(1 << Map::kIsUndetectable));
    Split(ne, if_true, if_false, fall_through);
3027
  }
3028
  context()->Plug(if_true, if_false);
3029 3030 3031
}


3032 3033 3034
Register FullCodeGenerator::result_register() {
  return r0;
}
3035

3036

3037 3038 3039 3040
Register FullCodeGenerator::context_register() {
  return cp;
}

3041 3042 3043 3044
void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
  __ ldr(value, MemOperand(fp, frame_offset));
}
3045

3046
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3047
  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3048 3049 3050 3051
  __ str(value, MemOperand(fp, frame_offset));
}


3052
void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3053
  __ ldr(dst, ContextMemOperand(cp, context_index));
3054 3055 3056
}


3057
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3058
  DeclarationScope* closure_scope = scope()->GetClosureScope();
3059 3060
  if (closure_scope->is_script_scope() ||
      closure_scope->is_module_scope()) {
3061
    // Contexts nested in the native context have a canonical empty function
3062
    // as their closure, not the anonymous closure containing the global
3063
    // code.
3064
    __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3065
  } else if (closure_scope->is_eval_scope()) {
3066 3067 3068
    // Contexts created by a call to eval have the same closure as the
    // context calling eval, not the anonymous closure containing the eval
    // code.  Fetch it from the context.
3069
    __ ldr(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3070
  } else {
3071
    DCHECK(closure_scope->is_function_scope());
3072 3073
    __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
  }
3074
  PushOperand(ip);
3075 3076 3077
}


3078
#undef __
3079

3080

3081 3082
static Address GetInterruptImmediateLoadAddress(Address pc) {
  Address load_address = pc - 2 * Assembler::kInstrSize;
3083
  if (!FLAG_enable_embedded_constant_pool) {
3084
    DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
3085 3086
  } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
    // This is an extended constant pool lookup.
3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104
    if (CpuFeatures::IsSupported(ARMv7)) {
      load_address -= 2 * Assembler::kInstrSize;
      DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
      DCHECK(Assembler::IsMovT(
          Memory::int32_at(load_address + Assembler::kInstrSize)));
    } else {
      load_address -= 4 * Assembler::kInstrSize;
      DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
      DCHECK(Assembler::IsOrrImmed(
          Memory::int32_at(load_address + Assembler::kInstrSize)));
      DCHECK(Assembler::IsOrrImmed(
          Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
      DCHECK(Assembler::IsOrrImmed(
          Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
    }
  } else if (CpuFeatures::IsSupported(ARMv7) &&
             Assembler::IsMovT(Memory::int32_at(load_address))) {
    // This is a movw / movt immediate load.
3105
    load_address -= Assembler::kInstrSize;
3106
    DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
3107 3108 3109 3110 3111 3112 3113 3114 3115
  } else if (!CpuFeatures::IsSupported(ARMv7) &&
             Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
    // This is a mov / orr immediate load.
    load_address -= 3 * Assembler::kInstrSize;
    DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
    DCHECK(Assembler::IsOrrImmed(
        Memory::int32_at(load_address + Assembler::kInstrSize)));
    DCHECK(Assembler::IsOrrImmed(
        Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
3116
  } else {
3117
    // This is a small constant pool lookup.
3118
    DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
3119 3120 3121
  }
  return load_address;
}
3122 3123 3124


void BackEdgeTable::PatchAt(Code* unoptimized_code,
3125 3126
                            Address pc,
                            BackEdgeState target_state,
3127
                            Code* replacement_code) {
3128
  Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
3129
  Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
3130 3131
  Isolate* isolate = unoptimized_code->GetIsolate();
  CodePatcher patcher(isolate, branch_address, 1);
3132 3133
  switch (target_state) {
    case INTERRUPT:
3134
    {
3135
      //  <decrement profiling counter>
3136
      //   bpl ok
3137
      //   ; load interrupt stub address into ip - either of (for ARMv7):
3138 3139
      //   ; <small cp load>      |  <extended cp load> |  <immediate load>
      //   ldr ip, [pc/pp, #imm]  |   movw ip, #imm     |   movw ip, #imm
3140
      //                          |   movt ip, #imm     |   movw ip, #imm
3141
      //                          |   ldr  ip, [pp, ip]
3142 3143 3144 3145 3146 3147
      //   ; or (for ARMv6):
      //   ; <small cp load>      |  <extended cp load> |  <immediate load>
      //   ldr ip, [pc/pp, #imm]  |   mov ip, #imm      |   mov ip, #imm
      //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
      //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
      //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
3148
      //   blx ip
3149
      //  <reset profiling counter>
3150
      //  ok-label
3151

3152 3153 3154 3155 3156
      // Calculate branch offset to the ok-label - this is the difference
      // between the branch address and |pc| (which points at <blx ip>) plus
      // kProfileCounterResetSequence instructions
      int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
                          kProfileCounterResetSequenceLength;
3157
      patcher.masm()->b(branch_offset, pl);
3158
      break;
3159
    }
3160 3161
    case ON_STACK_REPLACEMENT:
      //  <decrement profiling counter>
3162
      //   mov r0, r0 (NOP)
3163
      //   ; load on-stack replacement address into ip - either of (for ARMv7):
3164 3165 3166 3167
      //   ; <small cp load>      |  <extended cp load> |  <immediate load>
      //   ldr ip, [pc/pp, #imm]  |   movw ip, #imm     |   movw ip, #imm
      //                          |   movt ip, #imm>    |   movw ip, #imm
      //                          |   ldr  ip, [pp, ip]
3168 3169 3170 3171 3172 3173
      //   ; or (for ARMv6):
      //   ; <small cp load>      |  <extended cp load> |  <immediate load>
      //   ldr ip, [pc/pp, #imm]  |   mov ip, #imm      |   mov ip, #imm
      //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
      //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
      //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
3174
      //   blx ip
3175
      //  <reset profiling counter>
3176 3177 3178 3179 3180
      //  ok-label
      patcher.masm()->nop();
      break;
  }

3181
  // Replace the call address.
3182 3183
  Assembler::set_target_address_at(isolate, pc_immediate_load_address,
                                   unoptimized_code, replacement_code->entry());
3184 3185

  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3186
      unoptimized_code, pc_immediate_load_address, replacement_code);
3187 3188 3189 3190 3191 3192
}


BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
    Isolate* isolate,
    Code* unoptimized_code,
3193
    Address pc) {
3194
  DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
3195

3196
  Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
3197
  Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
3198
#ifdef DEBUG
3199 3200
  Address interrupt_address = Assembler::target_address_at(
      pc_immediate_load_address, unoptimized_code);
3201
#endif
3202 3203

  if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
3204
    DCHECK(interrupt_address ==
3205
           isolate->builtins()->InterruptCheck()->entry());
3206 3207
    return INTERRUPT;
  }
3208

3209
  DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
3210

3211
  DCHECK(interrupt_address ==
3212 3213
         isolate->builtins()->OnStackReplacement()->entry());
  return ON_STACK_REPLACEMENT;
3214 3215 3216
}


3217 3218
}  // namespace internal
}  // namespace v8
3219 3220

#endif  // V8_TARGET_ARCH_ARM