handler-compiler-mips.cc 28.6 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#if V8_TARGET_ARCH_MIPS

#include "src/ic/handler-compiler.h"
8

9
#include "src/api-arguments.h"
10 11
#include "src/field-type.h"
#include "src/ic/call-optimization.h"
12
#include "src/ic/ic.h"
13
#include "src/isolate-inl.h"
14 15 16 17 18 19 20 21

namespace v8 {
namespace internal {

#define __ ACCESS_MASM(masm)


void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
22 23
    MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
    int accessor_index, int expected_arguments, Register scratch) {
24 25 26 27 28 29 30 31
  // ----------- S t a t e -------------
  //  -- a0    : receiver
  //  -- a2    : name
  //  -- ra    : return address
  // -----------------------------------
  {
    FrameScope scope(masm, StackFrame::INTERNAL);

32 33 34
    // Save context register
    __ push(cp);

35
    if (accessor_index >= 0) {
36 37
      DCHECK(!holder.is(scratch));
      DCHECK(!receiver.is(scratch));
38
      // Call the JavaScript getter with the receiver on the stack.
39
      if (map->IsJSGlobalObjectMap()) {
40
        // Swap in the global receiver.
41
        __ lw(scratch,
42
              FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
43
        receiver = scratch;
44 45
      }
      __ push(receiver);
46
      __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_GETTER);
47 48 49 50
      __ li(a0, Operand(0));
      __ Call(masm->isolate()->builtins()->CallFunction(
                  ConvertReceiverMode::kNotNullOrUndefined),
              RelocInfo::CODE_TARGET);
51 52 53 54 55 56 57
    } else {
      // If we generate a global code snippet for deoptimization only, remember
      // the place to continue after deoptimization.
      masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
    }

    // Restore context register.
58
    __ pop(cp);
59 60 61 62 63 64
  }
  __ Ret();
}


void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
65 66
    MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
    int accessor_index, int expected_arguments, Register scratch) {
67 68 69 70 71 72
  // ----------- S t a t e -------------
  //  -- ra    : return address
  // -----------------------------------
  {
    FrameScope scope(masm, StackFrame::INTERNAL);

73 74
    // Save context and value registers, so we can restore them later.
    __ Push(cp, value());
75

76
    if (accessor_index >= 0) {
77 78 79
      DCHECK(!holder.is(scratch));
      DCHECK(!receiver.is(scratch));
      DCHECK(!value().is(scratch));
80
      // Call the JavaScript setter with receiver and value on the stack.
81
      if (map->IsJSGlobalObjectMap()) {
82
        // Swap in the global receiver.
83
        __ lw(scratch,
84
              FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
85
        receiver = scratch;
86 87
      }
      __ Push(receiver, value());
88
      __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_SETTER);
89 90 91 92
      __ li(a0, Operand(1));
      __ Call(masm->isolate()->builtins()->CallFunction(
                  ConvertReceiverMode::kNotNullOrUndefined),
              RelocInfo::CODE_TARGET);
93 94 95 96 97 98 99 100
    } else {
      // If we generate a global code snippet for deoptimization only, remember
      // the place to continue after deoptimization.
      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
    }

    // We have to return the passed value, not the return value of the setter.
    // Restore context register.
101
    __ Pop(cp, v0);
102 103 104 105 106
  }
  __ Ret();
}


107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126
void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                Register slot) {
  MacroAssembler* masm = this->masm();
  __ Push(vector, slot);
}


void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
  MacroAssembler* masm = this->masm();
  __ Pop(vector, slot);
}


void PropertyHandlerCompiler::DiscardVectorAndSlot() {
  MacroAssembler* masm = this->masm();
  // Remove vector and slot.
  __ Addu(sp, sp, Operand(2 * kPointerSize));
}


127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149
void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
    MacroAssembler* masm, Label* miss_label, Register receiver,
    Handle<Name> name, Register scratch0, Register scratch1) {
  DCHECK(name->IsUniqueName());
  DCHECK(!receiver.is(scratch0));
  Counters* counters = masm->isolate()->counters();
  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);

  Label done;

  const int kInterceptorOrAccessCheckNeededMask =
  (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);

  // Bail out if the receiver has a named interceptor or requires access checks.
  Register map = scratch1;
  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
  __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
  __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
  __ Branch(miss_label, ne, scratch0, Operand(zero_reg));

  // Check that receiver is a JSObject.
  __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
150
  __ Branch(miss_label, lt, scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172

  // Load properties array.
  Register properties = scratch0;
  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
  // Check that the properties array is a dictionary.
  __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
  Register tmp = properties;
  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
  __ Branch(miss_label, ne, map, Operand(tmp));

  // Restore the temporarily used register.
  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));


  NameDictionaryLookupStub::GenerateNegativeLookup(
      masm, miss_label, &done, receiver, properties, name, scratch1);
  __ bind(&done);
  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
}


void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
173
    MacroAssembler* masm, int index, Register result, Label* miss) {
174
  __ LoadNativeContextSlot(index, result);
175
  // Load its initial map. The global functions all have initial maps.
176 177
  __ lw(result,
        FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset));
178
  // Load the prototype from the initial map.
179
  __ lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
}


void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
    MacroAssembler* masm, Register receiver, Register scratch1,
    Register scratch2, Label* miss_label) {
  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
  __ Ret(USE_DELAY_SLOT);
  __ mov(v0, scratch1);
}


// Generate code to check that a global property cell is empty. Create
// the property cell at compilation time if no cell exists for the
// property.
void PropertyHandlerCompiler::GenerateCheckPropertyCell(
    MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
    Register scratch, Label* miss) {
198
  Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
199
  DCHECK(cell->value()->IsTheHole());
200 201
  Handle<WeakCell> weak_cell = masm->isolate()->factory()->NewWeakCell(cell);
  __ LoadWeakValue(scratch, weak_cell, miss);
202
  __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
203 204 205 206 207 208 209 210 211
  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
  __ Branch(miss, ne, scratch, Operand(at));
}


static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
                                     Register holder, Register name,
                                     Handle<JSObject> holder_obj) {
  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
212 213 214 215
  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
  __ Push(name, receiver, holder);
216 217 218 219 220
}


static void CompileCallLoadPropertyWithInterceptor(
    MacroAssembler* masm, Register receiver, Register holder, Register name,
221
    Handle<JSObject> holder_obj, Runtime::FunctionId id) {
222 223
  DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
         Runtime::FunctionForId(id)->nargs);
224
  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
225
  __ CallRuntime(id);
226 227 228 229
}


// Generate call to api function.
230
void PropertyHandlerCompiler::GenerateApiAccessorCall(
231 232
    MacroAssembler* masm, const CallOptimization& optimization,
    Handle<Map> receiver_map, Register receiver, Register scratch_in,
233 234 235
    bool is_store, Register store_parameter, Register accessor_holder,
    int accessor_index) {
  DCHECK(!accessor_holder.is(scratch_in));
236
  DCHECK(!receiver.is(scratch_in));
237
  __ push(receiver);
238
  // Write the arguments to stack frame.
239 240 241 242
  if (is_store) {
    DCHECK(!receiver.is(store_parameter));
    DCHECK(!scratch_in.is(store_parameter));
    __ push(store_parameter);
243 244 245
  }
  DCHECK(optimization.is_simple_api_call());

vogelheim's avatar
vogelheim committed
246
  // Abi for CallApiCallbackStub.
247
  Register callee = a0;
248
  Register data = t0;
249 250 251
  Register holder = a2;
  Register api_function_address = a1;

252
  // Put callee in place.
253
  __ LoadAccessor(callee, accessor_holder, accessor_index,
254 255
                  is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);

256 257
  // Put holder in place.
  CallOptimization::HolderLookup holder_lookup;
258 259 260
  int holder_depth = 0;
  optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
                                          &holder_depth);
261 262 263 264 265
  switch (holder_lookup) {
    case CallOptimization::kHolderIsReceiver:
      __ Move(holder, receiver);
      break;
    case CallOptimization::kHolderFound:
266 267 268 269 270 271
      __ lw(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
      __ lw(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
      for (int i = 1; i < holder_depth; i++) {
        __ lw(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
        __ lw(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
      }
272 273 274 275 276 277 278 279 280
      break;
    case CallOptimization::kHolderNotFound:
      UNREACHABLE();
      break;
  }

  Isolate* isolate = masm->isolate();
  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
  bool call_data_undefined = false;
281 282
  // Put call data in place.
  if (api_call_info->data()->IsUndefined()) {
283
    call_data_undefined = true;
284
    __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
285
  } else {
286 287 288 289 290 291 292 293 294 295
    if (optimization.is_constant_call()) {
      __ lw(data,
            FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
      __ lw(data,
            FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
      __ lw(data, FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
    } else {
      __ lw(data,
            FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset));
    }
296
    __ lw(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
297
  }
298 299 300 301 302 303 304

  if (api_call_info->fast_handler()->IsCode()) {
    // Just tail call into the fast handler if present.
    __ Jump(handle(Code::cast(api_call_info->fast_handler())),
            RelocInfo::CODE_TARGET);
    return;
  }
305 306 307 308 309 310 311 312
  // Put api_function_address in place.
  Address function_address = v8::ToCData<Address>(api_call_info->callback());
  ApiFunction fun(function_address);
  ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
  ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
  __ li(api_function_address, Operand(ref));

  // Jump to stub.
vogelheim's avatar
vogelheim committed
313
  CallApiCallbackStub stub(isolate, is_store, call_data_undefined,
314
                           !optimization.is_constant_call());
315 316 317 318
  __ TailCallStub(&stub);
}


319
static void StoreIC_PushArgs(MacroAssembler* masm) {
320 321 322 323
  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
          StoreDescriptor::ValueRegister(),
          VectorStoreICDescriptor::SlotRegister(),
          VectorStoreICDescriptor::VectorRegister());
324 325 326
}


327
void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
328
  StoreIC_PushArgs(masm);
329 330 331

  // The slow case calls into the runtime to complete the store without causing
  // an IC miss that would otherwise cause a transition to the generic stub.
332
  __ TailCallRuntime(Runtime::kStoreIC_Slow);
333 334 335 336
}


void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
337
  StoreIC_PushArgs(masm);
338 339 340

  // The slow case calls into the runtime to complete the store without causing
  // an IC miss that would otherwise cause a transition to the generic stub.
341
  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
342 343 344
}


345 346 347 348 349 350 351 352 353 354 355 356 357
#undef __
#define __ ACCESS_MASM(masm())


void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
                                                    Handle<Name> name) {
  if (!label->is_unused()) {
    __ bind(label);
    __ li(this->name(), Operand(name));
  }
}


358
void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) {
359
  __ li(this->name(), Operand(name));
360 361 362
}


363 364
void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
    Register current_map, Register destination_map) {
365 366 367 368
  DCHECK(false);  // Not implemented.
}


369
void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
370
                                                   Register map_reg,
371 372 373 374 375 376 377 378 379 380
                                                   Register scratch,
                                                   Label* miss) {
  Handle<WeakCell> cell = Map::WeakCellForMap(transition);
  DCHECK(!map_reg.is(scratch));
  __ LoadWeakValue(map_reg, cell, miss);
  if (transition->CanBeDeprecated()) {
    __ lw(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset));
    __ And(at, scratch, Operand(Map::Deprecated::kMask));
    __ Branch(miss, ne, at, Operand(zero_reg));
  }
381
}
382 383


384 385
void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg,
                                                      int descriptor,
386
                                                      Register value_reg,
387
                                                      Register scratch,
388
                                                      Label* miss_label) {
389 390 391 392 393 394 395
  DCHECK(!map_reg.is(scratch));
  DCHECK(!map_reg.is(value_reg));
  DCHECK(!value_reg.is(scratch));
  __ LoadInstanceDescriptors(map_reg, scratch);
  __ lw(scratch,
        FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor)));
  __ Branch(miss_label, ne, value_reg, Operand(scratch));
396 397
}

398
void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(FieldType* field_type,
399 400
                                                        Register value_reg,
                                                        Label* miss_label) {
401 402 403 404
  Register map_reg = scratch1();
  Register scratch = scratch2();
  DCHECK(!value_reg.is(map_reg));
  DCHECK(!value_reg.is(scratch));
405
  __ JumpIfSmi(value_reg, miss_label);
jarin's avatar
jarin committed
406
  if (field_type->IsClass()) {
407
    __ lw(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset));
jarin's avatar
jarin committed
408 409 410
    // Compare map directly within the Branch() functions.
    __ GetWeakValue(scratch, Map::WeakCellForMap(field_type->AsClass()));
    __ Branch(miss_label, ne, map_reg, Operand(scratch));
411 412 413 414 415 416
  }
}


Register PropertyHandlerCompiler::CheckPrototypes(
    Register object_reg, Register holder_reg, Register scratch1,
417 418
    Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check,
    ReturnHolder return_what) {
419
  Handle<Map> receiver_map = map();
420 421 422 423 424 425

  // Make sure there's no overlap between holder and object registers.
  DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
  DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
         !scratch2.is(scratch1));

426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449
  if (FLAG_eliminate_prototype_chain_checks) {
    Handle<Cell> validity_cell =
        Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
    if (!validity_cell.is_null()) {
      DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid),
                validity_cell->value());
      __ li(scratch1, Operand(validity_cell));
      __ lw(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
      __ Branch(miss, ne, scratch1,
                Operand(Smi::FromInt(Map::kPrototypeChainValid)));
    }

    // The prototype chain of primitives (and their JSValue wrappers) depends
    // on the native context, which can't be guarded by validity cells.
    // |object_reg| holds the native context specific prototype in this case;
    // we need to check its map.
    if (check == CHECK_ALL_MAPS) {
      __ lw(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset));
      Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map);
      __ GetWeakValue(scratch2, cell);
      __ Branch(miss, ne, scratch1, Operand(scratch2));
    }
  }

450 451 452 453 454
  // Keep track of the current object in register reg.
  Register reg = object_reg;
  int depth = 0;

  Handle<JSObject> current = Handle<JSObject>::null();
455 456
  if (receiver_map->IsJSGlobalObjectMap()) {
    current = isolate()->global_object();
457
  }
458 459 460 461 462 463 464 465 466 467 468

  // Check access rights to the global object.  This has to happen after
  // the map check so that we know that the object is actually a global
  // object.
  // This allows us to install generated handlers for accesses to the
  // global proxy (as opposed to using slow ICs). See corresponding code
  // in LookupForRead().
  if (receiver_map->IsJSGlobalProxyMap()) {
    __ CheckAccessGlobalProxy(reg, scratch2, miss);
  }

469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493
  Handle<JSObject> prototype = Handle<JSObject>::null();
  Handle<Map> current_map = receiver_map;
  Handle<Map> holder_map(holder()->map());
  // Traverse the prototype chain and check the maps in the prototype chain for
  // fast and global objects or do negative lookup for normal objects.
  while (!current_map.is_identical_to(holder_map)) {
    ++depth;

    // Only global objects and objects that do not require access
    // checks are allowed in stubs.
    DCHECK(current_map->IsJSGlobalProxyMap() ||
           !current_map->is_access_check_needed());

    prototype = handle(JSObject::cast(current_map->prototype()));
    if (current_map->is_dictionary_map() &&
        !current_map->IsJSGlobalObjectMap()) {
      DCHECK(!current_map->IsJSGlobalProxyMap());  // Proxy maps are fast.
      if (!name->IsUniqueName()) {
        DCHECK(name->IsString());
        name = factory()->InternalizeString(Handle<String>::cast(name));
      }
      DCHECK(current.is_null() ||
             current->property_dictionary()->FindEntry(name) ==
                 NameDictionary::kNotFound);

494 495 496 497
      if (FLAG_eliminate_prototype_chain_checks && depth > 1) {
        // TODO(jkummerow): Cache and re-use weak cell.
        __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
      }
498 499
      GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
                                       scratch2);
500 501 502 503
      if (!FLAG_eliminate_prototype_chain_checks) {
        __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
        __ lw(holder_reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
      }
504 505
    } else {
      Register map_reg = scratch1;
506 507 508
      if (!FLAG_eliminate_prototype_chain_checks) {
        __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
      }
509
      if (current_map->IsJSGlobalObjectMap()) {
510 511
        GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
                                  name, scratch2, miss);
512 513
      } else if (!FLAG_eliminate_prototype_chain_checks &&
                 (depth != 1 || check == CHECK_ALL_MAPS)) {
514 515 516
        Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
        __ GetWeakValue(scratch2, cell);
        __ Branch(miss, ne, scratch2, Operand(map_reg));
517
      }
518 519 520
      if (!FLAG_eliminate_prototype_chain_checks) {
        __ lw(holder_reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
      }
521 522
    }

523
    reg = holder_reg;  // From now on the object will be in holder_reg.
524 525 526 527 528
    // Go to the next object in the prototype chain.
    current = prototype;
    current_map = handle(current->map());
  }

529 530
  DCHECK(!current_map->IsJSGlobalProxyMap());

531 532 533
  // Log the check depth.
  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));

534 535
  if (!FLAG_eliminate_prototype_chain_checks &&
      (depth != 0 || check == CHECK_ALL_MAPS)) {
536
    // Check the holder map.
537 538
    __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
    Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
539 540
    __ GetWeakValue(scratch2, cell);
    __ Branch(miss, ne, scratch2, Operand(scratch1));
541 542
  }

543 544 545 546 547
  bool return_holder = return_what == RETURN_HOLDER;
  if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) {
    __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
  }

548
  // Return the register containing the holder.
549
  return return_holder ? reg : no_reg;
550 551 552 553 554 555 556 557
}


void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
  if (!miss->is_unused()) {
    Label success;
    __ Branch(&success);
    __ bind(miss);
558 559 560 561
    if (IC::ICUseVector(kind())) {
      DCHECK(kind() == Code::LOAD_IC);
      PopVectorAndSlot();
    }
562 563 564 565 566 567 568 569 570 571 572
    TailCallBuiltin(masm(), MissBuiltin(kind()));
    __ bind(&success);
  }
}


void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
  if (!miss->is_unused()) {
    Label success;
    __ Branch(&success);
    GenerateRestoreName(miss, name);
573
    if (IC::ICUseVector(kind())) PopVectorAndSlot();
574 575 576 577 578 579 580 581 582 583 584 585 586 587
    TailCallBuiltin(masm(), MissBuiltin(kind()));
    __ bind(&success);
  }
}


void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
  // Return the constant value.
  __ li(v0, value);
  __ Ret();
}


void NamedLoadHandlerCompiler::GenerateLoadCallback(
588
    Register reg, Handle<AccessorInfo> callback) {
589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606
  DCHECK(!AreAliased(scratch2(), scratch3(), scratch4(), receiver()));
  DCHECK(!AreAliased(scratch2(), scratch3(), scratch4(), reg));

  // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
  // name below the exit frame to make GC aware of them.
  STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);

  // Here and below +1 is for name() pushed after the args_ array.
  typedef PropertyCallbackArguments PCA;
  __ Subu(sp, sp, (PCA::kArgsLength + 1) * kPointerSize);
  __ sw(receiver(), MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize));
607 608
  Handle<Object> data(callback->data(), isolate());
  if (data->IsUndefined() || data->IsSmi()) {
609
    __ li(scratch2(), data);
610
  } else {
611 612 613 614
    Handle<WeakCell> cell =
        isolate()->factory()->NewWeakCell(Handle<HeapObject>::cast(data));
    // The callback is alive if this instruction is executed,
    // so the weak cell is not cleared and points to data.
615
    __ GetWeakValue(scratch2(), cell);
616
  }
617 618 619 620 621 622 623 624 625 626 627 628 629 630
  __ sw(scratch2(), MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize));
  __ LoadRoot(scratch2(), Heap::kUndefinedValueRootIndex);
  __ sw(scratch2(),
        MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize));
  __ sw(scratch2(), MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) *
                                       kPointerSize));
  __ li(scratch2(), Operand(ExternalReference::isolate_address(isolate())));
  __ sw(scratch2(), MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize));
  __ sw(reg, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize));
  // should_throw_on_error -> false
  DCHECK(Smi::FromInt(0) == nullptr);
  __ sw(zero_reg,
        MemOperand(sp, (PCA::kShouldThrowOnErrorIndex + 1) * kPointerSize));

631 632 633
  __ sw(name(), MemOperand(sp, 0 * kPointerSize));

  // Abi for CallApiGetter.
634
  Register getter_address_reg = ApiGetterDescriptor::function_address();
635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664

  Address getter_address = v8::ToCData<Address>(callback->getter());
  ApiFunction fun(getter_address);
  ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
  ExternalReference ref = ExternalReference(&fun, type, isolate());
  __ li(getter_address_reg, Operand(ref));

  CallApiGetterStub stub(isolate());
  __ TailCallStub(&stub);
}


void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
    LookupIterator* it, Register holder_reg) {
  DCHECK(holder()->HasNamedInterceptor());
  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());

  // Compile the interceptor call, followed by inline code to load the
  // property from further up the prototype chain if the call fails.
  // Check that the maps haven't changed.
  DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));

  // Preserve the receiver register explicitly whenever it is different from the
  // holder and it is needed should the interceptor return without any result.
  // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
  // case might cause a miss during the prototype check.
  bool must_perform_prototype_check =
      !holder().is_identical_to(it->GetHolder<JSObject>());
  bool must_preserve_receiver_reg =
      !receiver().is(holder_reg) &&
665
      (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
666 667 668 669 670 671 672 673 674 675

  // Save necessary data before invoking an interceptor.
  // Requires a frame to make GC aware of pushed pointers.
  {
    FrameScope frame_scope(masm(), StackFrame::INTERNAL);
    if (must_preserve_receiver_reg) {
      __ Push(receiver(), holder_reg, this->name());
    } else {
      __ Push(holder_reg, this->name());
    }
676
    InterceptorVectorSlotPush(holder_reg);
677 678 679 680 681
    // Invoke an interceptor.  Note: map checks from receiver to
    // interceptor's holder has been compiled before (see a caller
    // of this method).
    CompileCallLoadPropertyWithInterceptor(
        masm(), receiver(), holder_reg, this->name(), holder(),
682
        Runtime::kLoadPropertyWithInterceptorOnly);
683 684 685 686 687 688 689 690 691 692

    // Check if interceptor provided a value for property.  If it's
    // the case, return immediately.
    Label interceptor_failed;
    __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
    __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
    frame_scope.GenerateLeaveFrame();
    __ Ret();

    __ bind(&interceptor_failed);
693
    InterceptorVectorSlotPop(holder_reg);
694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712
    if (must_preserve_receiver_reg) {
      __ Pop(receiver(), holder_reg, this->name());
    } else {
      __ Pop(holder_reg, this->name());
    }
    // Leave the internal frame.
  }

  GenerateLoadPostInterceptor(it, holder_reg);
}


void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
  // Call the runtime system to load the interceptor.
  DCHECK(holder()->HasNamedInterceptor());
  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
  PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
                           holder());

713
  __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
714 715 716 717
}


Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
718 719
    Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
    LanguageMode language_mode) {
720
  Register holder_reg = Frontend(name);
721 722

  __ Push(receiver(), holder_reg);  // Receiver.
723 724 725 726 727 728 729 730
  // If the callback cannot leak, then push the callback directly,
  // otherwise wrap it in a weak cell.
  if (callback->data()->IsUndefined() || callback->data()->IsSmi()) {
    __ li(at, Operand(callback));
  } else {
    Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
    __ li(at, Operand(cell));
  }
731 732 733
  __ push(at);
  __ li(at, Operand(name));
  __ Push(at, value());
734
  __ Push(Smi::FromInt(language_mode));
735 736

  // Do tail-call to the runtime system.
737
  __ TailCallRuntime(Runtime::kStoreCallbackProperty);
738 739

  // Return the generated code.
740
  return GetCode(kind(), name);
741 742 743
}


744
Register NamedStoreHandlerCompiler::value() {
745
  return StoreDescriptor::ValueRegister();
746
}
747 748 749 750 751


Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
    Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
  Label miss;
752 753 754
  if (IC::ICUseVector(kind())) {
    PushVectorAndSlot();
  }
755

756
  FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
757 758

  // Get the value from the cell.
759
  Register result = StoreDescriptor::ValueRegister();
760 761
  Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
  __ LoadWeakValue(result, weak_cell, &miss);
762
  __ lw(result, FieldMemOperand(result, PropertyCell::kValueOffset));
763 764 765 766 767 768 769 770

  // Check for deleted property if property can actually be deleted.
  if (is_configurable) {
    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    __ Branch(&miss, eq, result, Operand(at));
  }

  Counters* counters = isolate()->counters();
cbruni's avatar
cbruni committed
771
  __ IncrementCounter(counters->ic_named_load_global_stub(), 1, a1, a3);
772 773 774
  if (IC::ICUseVector(kind())) {
    DiscardVectorAndSlot();
  }
775 776 777 778 779 780
  __ Ret(USE_DELAY_SLOT);
  __ mov(v0, result);

  FrontendFooter(name, &miss);

  // Return the generated code.
781
  return GetCode(kind(), name);
782 783 784 785
}


#undef __
786 787
}  // namespace internal
}  // namespace v8
788 789

#endif  // V8_TARGET_ARCH_MIPS