handler-compiler-arm.cc 14.4 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#if V8_TARGET_ARCH_ARM

#include "src/ic/handler-compiler.h"
8

9
#include "src/api-arguments.h"
10
#include "src/assembler-inl.h"
11 12
#include "src/field-type.h"
#include "src/ic/call-optimization.h"
13
#include "src/ic/ic.h"
14
#include "src/isolate-inl.h"
15 16 17 18 19 20

namespace v8 {
namespace internal {

#define __ ACCESS_MASM(masm)

21 22
void NamedLoadHandlerCompiler::GenerateLoadViaGetterForDeopt(
    MacroAssembler* masm) {
23 24
  {
    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
25 26 27
    // If we generate a global code snippet for deoptimization only, remember
    // the place to continue after deoptimization.
    masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
28
    // Restore context register.
29
    __ pop(cp);
30 31 32 33 34 35
  }
  __ Ret();
}


void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
36 37
    MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
    int accessor_index, int expected_arguments, Register scratch) {
38 39 40 41 42 43
  // ----------- S t a t e -------------
  //  -- lr    : return address
  // -----------------------------------
  {
    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);

44 45
    // Save context register
    __ push(cp);
46 47 48
    // Save value register, so we can restore it later.
    __ push(value());

49
    if (accessor_index >= 0) {
50 51 52
      DCHECK(!holder.is(scratch));
      DCHECK(!receiver.is(scratch));
      DCHECK(!value().is(scratch));
53
      // Call the JavaScript setter with receiver and value on the stack.
54
      if (map->IsJSGlobalObjectMap()) {
55
        // Swap in the global receiver.
56
        __ ldr(scratch,
57
               FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
58
        receiver = scratch;
59 60
      }
      __ Push(receiver, value());
61
      __ LoadAccessor(r1, holder, accessor_index, ACCESSOR_SETTER);
62 63 64 65
      __ mov(r0, Operand(1));
      __ Call(masm->isolate()->builtins()->CallFunction(
                  ConvertReceiverMode::kNotNullOrUndefined),
              RelocInfo::CODE_TARGET);
66 67 68 69 70 71 72 73 74 75
    } else {
      // If we generate a global code snippet for deoptimization only, remember
      // the place to continue after deoptimization.
      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
    }

    // We have to return the passed value, not the return value of the setter.
    __ pop(r0);

    // Restore context register.
76
    __ pop(cp);
77 78 79 80 81
  }
  __ Ret();
}


82 83 84
void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                Register slot) {
  MacroAssembler* masm = this->masm();
85 86 87 88 89 90
  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
                LoadWithVectorDescriptor::kVector);
  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
                StoreWithVectorDescriptor::kVector);
  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
                StoreTransitionDescriptor::kVector);
91
  __ push(slot);
92
  __ push(vector);
93 94 95 96 97 98
}


void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
  MacroAssembler* masm = this->masm();
  __ pop(vector);
99
  __ pop(slot);
100 101 102 103 104 105 106 107 108
}


void PropertyHandlerCompiler::DiscardVectorAndSlot() {
  MacroAssembler* masm = this->masm();
  // Remove vector and slot.
  __ add(sp, sp, Operand(2 * kPointerSize));
}

109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
    MacroAssembler* masm, Label* miss_label, Register receiver,
    Handle<Name> name, Register scratch0, Register scratch1) {
  DCHECK(name->IsUniqueName());
  DCHECK(!receiver.is(scratch0));
  Counters* counters = masm->isolate()->counters();
  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);

  Label done;

  const int kInterceptorOrAccessCheckNeededMask =
      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);

  // Bail out if the receiver has a named interceptor or requires access checks.
  Register map = scratch1;
  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
  __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
  __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
  __ b(ne, miss_label);

  // Check that receiver is a JSObject.
  __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
132
  __ cmp(scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
133 134 135 136
  __ b(lt, miss_label);

  // Load properties array.
  Register properties = scratch0;
137 138
  __ ldr(properties,
         FieldMemOperand(receiver, JSObject::kPropertiesOrHashOffset));
139 140 141 142 143 144 145 146
  // Check that the properties array is a dictionary.
  __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
  Register tmp = properties;
  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
  __ cmp(map, tmp);
  __ b(ne, miss_label);

  // Restore the temporarily used register.
147 148
  __ ldr(properties,
         FieldMemOperand(receiver, JSObject::kPropertiesOrHashOffset));
149 150 151 152 153 154 155 156 157 158 159 160 161

  NameDictionaryLookupStub::GenerateNegativeLookup(
      masm, miss_label, &done, receiver, properties, name, scratch1);
  __ bind(&done);
  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
}

// Generate code to check that a global property cell is empty. Create
// the property cell at compilation time if no cell exists for the
// property.
void PropertyHandlerCompiler::GenerateCheckPropertyCell(
    MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
    Register scratch, Label* miss) {
162 163
  Handle<PropertyCell> cell = JSGlobalObject::EnsureEmptyPropertyCell(
      global, name, PropertyCellType::kInvalidated);
164 165 166
  Isolate* isolate = masm->isolate();
  DCHECK(cell->value()->IsTheHole(isolate));
  Handle<WeakCell> weak_cell = isolate->factory()->NewWeakCell(cell);
167
  __ LoadWeakValue(scratch, weak_cell, miss);
168
  __ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
169
  __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
170 171 172 173
  __ b(ne, miss);
}

// Generate call to api function.
174
void PropertyHandlerCompiler::GenerateApiAccessorCall(
175 176
    MacroAssembler* masm, const CallOptimization& optimization,
    Handle<Map> receiver_map, Register receiver, Register scratch_in,
177 178 179
    bool is_store, Register store_parameter, Register accessor_holder,
    int accessor_index) {
  DCHECK(!accessor_holder.is(scratch_in));
180
  DCHECK(!receiver.is(scratch_in));
181
  __ push(accessor_holder);
182 183
  __ push(receiver);
  // Write the arguments to stack frame.
184 185 186 187
  if (is_store) {
    DCHECK(!receiver.is(store_parameter));
    DCHECK(!scratch_in.is(store_parameter));
    __ push(store_parameter);
188 189 190
  }
  DCHECK(optimization.is_simple_api_call());

vogelheim's avatar
vogelheim committed
191
  // Abi for CallApiCallbackStub.
192
  Register callee = r0;
193
  Register data = r4;
194 195 196
  Register holder = r2;
  Register api_function_address = r1;

197 198 199 200
  // Put callee in place.
  __ LoadAccessor(callee, accessor_holder, accessor_index,
                  is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);

201 202
  // Put holder in place.
  CallOptimization::HolderLookup holder_lookup;
203
  optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
204 205 206 207 208
  switch (holder_lookup) {
    case CallOptimization::kHolderIsReceiver:
      __ Move(holder, receiver);
      break;
    case CallOptimization::kHolderFound:
209 210
      __ ldr(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
      __ ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
211 212 213 214 215 216 217 218
      break;
    case CallOptimization::kHolderNotFound:
      UNREACHABLE();
      break;
  }

  Isolate* isolate = masm->isolate();
  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
219
  // Put call data in place.
220
  if (api_call_info->data()->IsUndefined(isolate)) {
221
    __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
222
  } else {
223 224 225 226 227 228 229 230 231 232 233
    if (optimization.is_constant_call()) {
      __ ldr(data,
             FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
      __ ldr(data,
             FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
      __ ldr(data,
             FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
    } else {
      __ ldr(data,
             FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset));
    }
234
    __ ldr(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
235 236 237 238 239 240 241 242 243 244
  }

  // Put api_function_address in place.
  Address function_address = v8::ToCData<Address>(api_call_info->callback());
  ApiFunction fun(function_address);
  ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
  ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
  __ mov(api_function_address, Operand(ref));

  // Jump to stub.
245
  CallApiCallbackStub stub(isolate, is_store, !optimization.is_constant_call());
246 247 248 249 250 251 252 253 254 255 256 257 258 259 260
  __ TailCallStub(&stub);
}

#undef __
#define __ ACCESS_MASM(masm())


void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
                                                    Handle<Name> name) {
  if (!label->is_unused()) {
    __ bind(label);
    __ mov(this->name(), Operand(name));
  }
}

261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285
void PropertyHandlerCompiler::GenerateAccessCheck(
    Handle<WeakCell> native_context_cell, Register scratch1, Register scratch2,
    Label* miss, bool compare_native_contexts_only) {
  Label done;
  // Load current native context.
  __ ldr(scratch1, NativeContextMemOperand());
  // Load expected native context.
  __ LoadWeakValue(scratch2, native_context_cell, miss);
  __ cmp(scratch1, scratch2);

  if (!compare_native_contexts_only) {
    __ b(eq, &done);

    // Compare security tokens of current and expected native contexts.
    __ ldr(scratch1,
           ContextMemOperand(scratch1, Context::SECURITY_TOKEN_INDEX));
    __ ldr(scratch2,
           ContextMemOperand(scratch2, Context::SECURITY_TOKEN_INDEX));
    __ cmp(scratch1, scratch2);
  }
  __ b(ne, miss);

  __ bind(&done);
}

286 287
Register PropertyHandlerCompiler::CheckPrototypes(
    Register object_reg, Register holder_reg, Register scratch1,
288
    Register scratch2, Handle<Name> name, Label* miss) {
289
  Handle<Map> receiver_map = map();
290 291 292 293 294 295

  // Make sure there's no overlap between holder and object registers.
  DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
  DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
         !scratch2.is(scratch1));

296 297 298 299 300 301 302 303 304
  Handle<Cell> validity_cell =
      Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
  if (!validity_cell.is_null()) {
    DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), validity_cell->value());
    __ mov(scratch1, Operand(validity_cell));
    __ ldr(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
    __ cmp(scratch1, Operand(Smi::FromInt(Map::kPrototypeChainValid)));
    __ b(ne, miss);
  }
305

306 307 308 309 310
  // Keep track of the current object in register reg.
  Register reg = object_reg;
  int depth = 0;

  Handle<JSObject> current = Handle<JSObject>::null();
311 312
  if (receiver_map->IsJSGlobalObjectMap()) {
    current = isolate()->global_object();
313
  }
314

315 316
  Handle<Map> current_map(receiver_map->GetPrototypeChainRootMap(isolate()),
                          isolate());
317 318 319 320 321 322
  Handle<Map> holder_map(holder()->map());
  // Traverse the prototype chain and check the maps in the prototype chain for
  // fast and global objects or do negative lookup for normal objects.
  while (!current_map.is_identical_to(holder_map)) {
    ++depth;

323 324 325 326
    if (current_map->IsJSGlobalObjectMap()) {
      GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
                                name, scratch2, miss);
    } else if (current_map->is_dictionary_map()) {
327
      DCHECK(!current_map->IsJSGlobalProxyMap());  // Proxy maps are fast.
328
      DCHECK(name->IsUniqueName());
329 330 331 332
      DCHECK(current.is_null() ||
             current->property_dictionary()->FindEntry(name) ==
                 NameDictionary::kNotFound);

333
      if (depth > 1) {
334 335 336
        Handle<WeakCell> weak_cell =
            Map::GetOrCreatePrototypeWeakCell(current, isolate());
        __ LoadWeakValue(reg, weak_cell, miss);
337
      }
338 339 340 341
      GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
                                       scratch2);
    }

342
    reg = holder_reg;  // From now on the object will be in holder_reg.
343
    // Go to the next object in the prototype chain.
344
    current = handle(JSObject::cast(current_map->prototype()));
345 346 347
    current_map = handle(current->map());
  }

348 349
  DCHECK(!current_map->IsJSGlobalProxyMap());

350 351 352
  // Log the check depth.
  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));

353
  if (depth != 0) {
354 355 356
    Handle<WeakCell> weak_cell =
        Map::GetOrCreatePrototypeWeakCell(current, isolate());
    __ LoadWeakValue(reg, weak_cell, miss);
357 358
  }

359
  // Return the register containing the holder.
360
  return reg;
361 362 363 364 365 366 367 368
}


void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
  if (!miss->is_unused()) {
    Label success;
    __ b(&success);
    __ bind(miss);
369 370
    DCHECK(kind() == Code::LOAD_IC);
    PopVectorAndSlot();
371 372 373 374 375 376 377 378 379 380 381
    TailCallBuiltin(masm(), MissBuiltin(kind()));
    __ bind(&success);
  }
}


void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
  if (!miss->is_unused()) {
    Label success;
    __ b(&success);
    GenerateRestoreName(miss, name);
382
    PopVectorAndSlot();
383 384 385 386 387
    TailCallBuiltin(masm(), MissBuiltin(kind()));
    __ bind(&success);
  }
}

388 389 390
void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
  STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
}
391 392

Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
393 394
    Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
    LanguageMode language_mode) {
395
  Register holder_reg = Frontend(name);
396 397 398

  __ push(receiver());  // receiver
  __ push(holder_reg);
399

400 401 402 403 404 405 406 407 408 409 410 411 412 413 414
  {
    UseScratchRegisterScope temps(masm());
    Register scratch = temps.Acquire();

    // If the callback cannot leak, then push the callback directly,
    // otherwise wrap it in a weak cell.
    if (callback->data()->IsUndefined(isolate()) || callback->data()->IsSmi()) {
      __ mov(scratch, Operand(callback));
    } else {
      Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
      __ mov(scratch, Operand(cell));
    }
    __ push(scratch);
    __ mov(scratch, Operand(name));
    __ Push(scratch, value());
415
  }
416
  __ Push(Smi::FromInt(language_mode));
417 418

  // Do tail-call to the runtime system.
419
  __ TailCallRuntime(Runtime::kStoreCallbackProperty);
420 421

  // Return the generated code.
422
  return GetCode(kind(), name);
423 424 425
}


426
Register NamedStoreHandlerCompiler::value() {
427
  return StoreDescriptor::ValueRegister();
428
}
429 430 431


#undef __
432 433
}  // namespace internal
}  // namespace v8
434 435

#endif  // V8_TARGET_ARCH_ARM