builtins.cc 54.2 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#include "src/v8.h"
6

7
#include "src/api.h"
8
#include "src/api-natives.h"
9
#include "src/arguments.h"
10
#include "src/base/once.h"
11 12 13 14
#include "src/bootstrapper.h"
#include "src/builtins.h"
#include "src/cpu-profiler.h"
#include "src/gdb-jit.h"
15
#include "src/heap/mark-compact.h"
16
#include "src/heap-profiler.h"
17
#include "src/ic/handler-compiler.h"
18
#include "src/ic/ic.h"
19
#include "src/prototype.h"
20
#include "src/vm-state-inl.h"
21

22 23
namespace v8 {
namespace internal {
24

25 26 27 28 29 30
namespace {

// Arguments object passed to C++ builtins.
template <BuiltinExtraArguments extra_args>
class BuiltinArguments : public Arguments {
 public:
vitalyr@chromium.org's avatar
vitalyr@chromium.org committed
31 32 33
  BuiltinArguments(int length, Object** arguments)
      : Arguments(length, arguments) { }

34
  Object*& operator[] (int index) {
35
    DCHECK(index < length());
36 37 38 39
    return Arguments::operator[](index);
  }

  template <class S> Handle<S> at(int index) {
40
    DCHECK(index < length());
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
    return Arguments::at<S>(index);
  }

  Handle<Object> receiver() {
    return Arguments::at<Object>(0);
  }

  Handle<JSFunction> called_function() {
    STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
    return Arguments::at<JSFunction>(Arguments::length() - 1);
  }

  // Gets the total number of arguments including the receiver (but
  // excluding extra arguments).
  int length() const {
    STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
    return Arguments::length();
  }

#ifdef DEBUG
  void Verify() {
    // Check we have at least the receiver.
63
    DCHECK(Arguments::length() >= 1);
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
  }
#endif
};


// Specialize BuiltinArguments for the called function extra argument.

template <>
int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
  return Arguments::length() - 1;
}

#ifdef DEBUG
template <>
void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
  // Check we have at least the receiver and the called function.
80
  DCHECK(Arguments::length() >= 2);
81 82 83 84 85 86 87 88 89 90 91 92 93
  // Make sure cast to JSFunction succeeds.
  called_function();
}
#endif


#define DEF_ARG_TYPE(name, spec)                      \
  typedef BuiltinArguments<spec> name##ArgumentsType;
BUILTIN_LIST_C(DEF_ARG_TYPE)
#undef DEF_ARG_TYPE

}  // namespace

94
// ----------------------------------------------------------------------------
95
// Support macro for defining builtins in C++.
96 97 98 99
// ----------------------------------------------------------------------------
//
// A builtin function is defined by writing:
//
100
//   BUILTIN(name) {
101 102 103
//     ...
//   }
//
104 105
// In the body of the builtin function the arguments can be accessed
// through the BuiltinArguments object args.
106

107
#ifdef DEBUG
108

109
#define BUILTIN(name)                                            \
110
  MUST_USE_RESULT static Object* Builtin_Impl_##name(            \
111
      name##ArgumentsType args, Isolate* isolate);               \
112
  MUST_USE_RESULT static Object* Builtin_##name(                 \
113 114 115 116 117
      int args_length, Object** args_object, Isolate* isolate) { \
    name##ArgumentsType args(args_length, args_object);          \
    args.Verify();                                               \
    return Builtin_Impl_##name(args, isolate);                   \
  }                                                              \
118
  MUST_USE_RESULT static Object* Builtin_Impl_##name(            \
119
      name##ArgumentsType args, Isolate* isolate)
120

121
#else  // For release mode.
122

123
#define BUILTIN(name)                                            \
124
  static Object* Builtin_impl##name(                             \
125
      name##ArgumentsType args, Isolate* isolate);               \
126
  static Object* Builtin_##name(                                 \
127 128 129 130
      int args_length, Object** args_object, Isolate* isolate) { \
    name##ArgumentsType args(args_length, args_object);          \
    return Builtin_impl##name(args, isolate);                    \
  }                                                              \
131
  static Object* Builtin_impl##name(                             \
132
      name##ArgumentsType args, Isolate* isolate)
133
#endif
134 135


136
#ifdef DEBUG
137
static inline bool CalledAsConstructor(Isolate* isolate) {
138 139 140
  // Calculate the result using a full stack frame iterator and check
  // that the state of the stack is as we assume it to be in the
  // code below.
141
  StackFrameIterator it(isolate);
142
  DCHECK(it.frame()->is_exit());
143 144
  it.Advance();
  StackFrame* frame = it.frame();
145
  bool reference_result = frame->is_construct();
146
  Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
147 148 149 150 151 152 153 154 155 156 157 158
  // Because we know fp points to an exit frame we can use the relevant
  // part of ExitFrame::ComputeCallerState directly.
  const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
  Address caller_fp = Memory::Address_at(fp + kCallerOffset);
  // This inlines the part of StackFrame::ComputeType that grabs the
  // type of the current frame.  Note that StackFrame::ComputeType
  // has been specialized for each architecture so if any one of them
  // changes this code has to be changed as well.
  const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
  const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
  Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
  bool result = (marker == kConstructMarker);
159
  DCHECK_EQ(result, reference_result);
160
  return result;
161
}
162
#endif
163

164

165 166
// ----------------------------------------------------------------------------

167
BUILTIN(Illegal) {
168
  UNREACHABLE();
169
  return isolate->heap()->undefined_value();  // Make compiler happy.
170 171 172
}


173
BUILTIN(EmptyFunction) {
174
  return isolate->heap()->undefined_value();
175 176 177
}


178 179
static void MoveDoubleElements(FixedDoubleArray* dst, int dst_index,
                               FixedDoubleArray* src, int src_index, int len) {
180
  if (len == 0) return;
181 182
  MemMove(dst->data_start() + dst_index, src->data_start() + src_index,
          len * kDoubleSize);
183 184 185
}


186
static bool ArrayPrototypeHasNoElements(Heap* heap, PrototypeIterator* iter) {
187
  DisallowHeapAllocation no_gc;
188 189 190 191 192 193 194 195
  for (; !iter->IsAtEnd(); iter->Advance()) {
    if (iter->GetCurrent()->IsJSProxy()) return false;
    if (JSObject::cast(iter->GetCurrent())->elements() !=
        heap->empty_fixed_array()) {
      return false;
    }
  }
  return true;
196 197 198
}


199 200 201 202
static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
                                                     JSArray* receiver) {
  DisallowHeapAllocation no_gc;
  PrototypeIterator iter(heap->isolate(), receiver);
203
  return ArrayPrototypeHasNoElements(heap, &iter);
204 205 206
}


207
// Returns empty handle if not applicable.
208
MUST_USE_RESULT
209
static inline MaybeHandle<FixedArrayBase> EnsureJSArrayWithWritableFastElements(
210 211 212 213
    Isolate* isolate,
    Handle<Object> receiver,
    Arguments* args,
    int first_added_arg) {
214
  if (!receiver->IsJSArray()) return MaybeHandle<FixedArrayBase>();
215
  Handle<JSArray> array = Handle<JSArray>::cast(receiver);
216
  // If there may be elements accessors in the prototype chain, the fast path
217
  // cannot be used if there arguments to add to the array.
218 219
  Heap* heap = isolate->heap();
  if (args != NULL && !IsJSArrayFastElementMovingAllowed(heap, *array)) {
220 221
    return MaybeHandle<FixedArrayBase>();
  }
222 223
  if (array->map()->is_observed()) return MaybeHandle<FixedArrayBase>();
  if (!array->map()->is_extensible()) return MaybeHandle<FixedArrayBase>();
224
  Handle<FixedArrayBase> elms(array->elements(), isolate);
225 226
  Map* map = elms->map();
  if (map == heap->fixed_array_map()) {
227
    if (args == NULL || array->HasFastObjectElements()) return elms;
228
  } else if (map == heap->fixed_cow_array_map()) {
229 230
    elms = JSObject::EnsureWritableFastElements(array);
    if (args == NULL || array->HasFastObjectElements()) return elms;
231 232
  } else if (map == heap->fixed_double_array_map()) {
    if (args == NULL) return elms;
233
  } else {
234
    return MaybeHandle<FixedArrayBase>();
235
  }
236 237 238 239

  // Need to ensure that the arguments passed in args can be contained in
  // the array.
  int args_length = args->length();
240
  if (first_added_arg >= args_length) return handle(array->elements(), isolate);
241

242
  ElementsKind origin_kind = array->map()->elements_kind();
243
  DCHECK(!IsFastObjectElementsKind(origin_kind));
244
  ElementsKind target_kind = origin_kind;
245 246 247 248 249 250 251 252 253 254 255 256 257
  {
    DisallowHeapAllocation no_gc;
    int arg_count = args->length() - first_added_arg;
    Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
    for (int i = 0; i < arg_count; i++) {
      Object* arg = arguments[i];
      if (arg->IsHeapObject()) {
        if (arg->IsHeapNumber()) {
          target_kind = FAST_DOUBLE_ELEMENTS;
        } else {
          target_kind = FAST_ELEMENTS;
          break;
        }
258 259 260 261
      }
    }
  }
  if (target_kind != origin_kind) {
262
    JSObject::TransitionElementsKind(array, target_kind);
263
    return handle(array->elements(), isolate);
264 265
  }
  return elms;
266 267 268
}


269
MUST_USE_RESULT static Object* CallJsBuiltin(
270
    Isolate* isolate,
271 272
    const char* name,
    BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
273
  HandleScope handleScope(isolate);
274

275 276 277 278
  Handle<Object> js_builtin = Object::GetProperty(
      isolate,
      handle(isolate->native_context()->builtins(), isolate),
      name).ToHandleChecked();
279 280 281 282 283
  Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
  int argc = args.length() - 1;
  ScopedVector<Handle<Object> > argv(argc);
  for (int i = 0; i < argc; ++i) {
    argv[i] = args.at<Object>(i + 1);
284
  }
285 286 287 288 289 290 291 292
  Handle<Object> result;
  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
      isolate, result,
      Execution::Call(isolate,
                      function,
                      args.receiver(),
                      argc,
                      argv.start()));
293 294 295 296
  return *result;
}


297
BUILTIN(ArrayPush) {
298 299
  HandleScope scope(isolate);
  Handle<Object> receiver = args.receiver();
300
  MaybeHandle<FixedArrayBase> maybe_elms_obj =
301
      EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
302 303 304 305
  Handle<FixedArrayBase> elms_obj;
  if (!maybe_elms_obj.ToHandle(&elms_obj)) {
    return CallJsBuiltin(isolate, "ArrayPush", args);
  }
306 307

  Handle<JSArray> array = Handle<JSArray>::cast(receiver);
308 309 310 311 312
  int len = Smi::cast(array->length())->value();
  int to_add = args.length() - 1;
  if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
    return CallJsBuiltin(isolate, "ArrayPush", args);
  }
313
  DCHECK(!array->map()->is_observed());
314

315
  ElementsKind kind = array->GetElementsKind();
316

317
  if (IsFastSmiOrObjectElementsKind(kind)) {
318
    Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
319 320
    if (to_add == 0) {
      return Smi::FromInt(len);
321
    }
322 323
    // Currently fixed arrays cannot grow too big, so
    // we should never hit this case.
324
    DCHECK(to_add <= (Smi::kMaxValue - len));
325

326
    int new_length = len + to_add;
327

328 329 330
    if (new_length > elms->length()) {
      // New backing storage is needed.
      int capacity = new_length + (new_length >> 1) + 16;
331 332
      Handle<FixedArray> new_elms =
          isolate->factory()->NewUninitializedFixedArray(capacity);
333

334
      ElementsAccessor* accessor = array->GetElementsAccessor();
335
      accessor->CopyElements(
336 337
          elms_obj, 0, kind, new_elms, 0,
          ElementsAccessor::kCopyToEndAndInitializeToHole);
338

339 340
      elms = new_elms;
    }
341

342
    // Add the provided values.
343
    DisallowHeapAllocation no_gc;
344 345 346 347
    WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
    for (int index = 0; index < to_add; index++) {
      elms->set(index + len, args[index + 1], mode);
    }
348

349 350
    if (*elms != array->elements()) {
      array->set_elements(*elms);
351 352 353 354 355 356 357 358 359 360 361 362
    }

    // Set the length.
    array->set_length(Smi::FromInt(new_length));
    return Smi::FromInt(new_length);
  } else {
    int elms_len = elms_obj->length();
    if (to_add == 0) {
      return Smi::FromInt(len);
    }
    // Currently fixed arrays cannot grow too big, so
    // we should never hit this case.
363
    DCHECK(to_add <= (Smi::kMaxValue - len));
364 365 366

    int new_length = len + to_add;

367
    Handle<FixedDoubleArray> new_elms;
368 369 370 371

    if (new_length > elms_len) {
      // New backing storage is needed.
      int capacity = new_length + (new_length >> 1) + 16;
372 373 374 375
      // Create new backing store; since capacity > 0, we can
      // safely cast to FixedDoubleArray.
      new_elms = Handle<FixedDoubleArray>::cast(
          isolate->factory()->NewFixedDoubleArray(capacity));
376

377
      ElementsAccessor* accessor = array->GetElementsAccessor();
378
      accessor->CopyElements(
379 380
          elms_obj, 0, kind, new_elms, 0,
          ElementsAccessor::kCopyToEndAndInitializeToHole);
381

382 383 384
    } else {
      // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
      // empty_fixed_array.
385
      new_elms = Handle<FixedDoubleArray>::cast(elms_obj);
386 387 388
    }

    // Add the provided values.
389
    DisallowHeapAllocation no_gc;
390 391 392 393 394 395
    int index;
    for (index = 0; index < to_add; index++) {
      Object* arg = args[index + 1];
      new_elms->set(index + len, arg->Number());
    }

396 397
    if (*new_elms != array->elements()) {
      array->set_elements(*new_elms);
398 399 400 401 402 403
    }

    // Set the length.
    array->set_length(Smi::FromInt(new_length));
    return Smi::FromInt(new_length);
  }
404 405 406
}


407
BUILTIN(ArrayPop) {
408 409
  HandleScope scope(isolate);
  Handle<Object> receiver = args.receiver();
410
  MaybeHandle<FixedArrayBase> maybe_elms_obj =
411
      EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
412 413 414 415
  Handle<FixedArrayBase> elms_obj;
  if (!maybe_elms_obj.ToHandle(&elms_obj)) {
    return CallJsBuiltin(isolate, "ArrayPop", args);
  }
416 417

  Handle<JSArray> array = Handle<JSArray>::cast(receiver);
418
  DCHECK(!array->map()->is_observed());
419

420
  int len = Smi::cast(array->length())->value();
421
  if (len == 0) return isolate->heap()->undefined_value();
422

423 424 425 426
  if (JSArray::HasReadOnlyLength(array)) {
    return CallJsBuiltin(isolate, "ArrayPop", args);
  }

427 428
  ElementsAccessor* accessor = array->GetElementsAccessor();
  int new_length = len - 1;
429 430 431 432
  Handle<Object> element =
      accessor->Get(array, array, new_length, elms_obj).ToHandleChecked();
  if (element->IsTheHole()) {
    return CallJsBuiltin(isolate, "ArrayPop", args);
433
  }
434
  RETURN_FAILURE_ON_EXCEPTION(
435 436
      isolate,
      accessor->SetLength(array, handle(Smi::FromInt(new_length), isolate)));
437
  return *element;
438 439 440
}


441
BUILTIN(ArrayShift) {
442
  HandleScope scope(isolate);
443
  Heap* heap = isolate->heap();
444
  Handle<Object> receiver = args.receiver();
445
  MaybeHandle<FixedArrayBase> maybe_elms_obj =
446
      EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
447 448
  Handle<FixedArrayBase> elms_obj;
  if (!maybe_elms_obj.ToHandle(&elms_obj) ||
449
      !IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(*receiver))) {
450
    return CallJsBuiltin(isolate, "ArrayShift", args);
451
  }
452
  Handle<JSArray> array = Handle<JSArray>::cast(receiver);
453
  DCHECK(!array->map()->is_observed());
454

455
  int len = Smi::cast(array->length())->value();
456
  if (len == 0) return heap->undefined_value();
457

458 459 460 461
  if (JSArray::HasReadOnlyLength(array)) {
    return CallJsBuiltin(isolate, "ArrayShift", args);
  }

462
  // Get first element
463
  ElementsAccessor* accessor = array->GetElementsAccessor();
464 465
  Handle<Object> first =
    accessor->Get(array, array, 0, elms_obj).ToHandleChecked();
466
  if (first->IsTheHole()) {
467
    return CallJsBuiltin(isolate, "ArrayShift", args);
468
  }
469

470
  if (heap->CanMoveObjectStart(*elms_obj)) {
471
    array->set_elements(heap->LeftTrimFixedArray(*elms_obj, 1));
472 473
  } else {
    // Shift the elements.
474
    if (elms_obj->IsFixedArray()) {
475
      Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
476
      DisallowHeapAllocation no_gc;
477
      heap->MoveElements(*elms, 0, 1, len - 1);
478 479
      elms->set(len - 1, heap->the_hole_value());
    } else {
480 481
      Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
      MoveDoubleElements(*elms, 0, *elms, 1, len - 1);
482 483
      elms->set_the_hole(len - 1);
    }
484
  }
485 486 487 488

  // Set the length.
  array->set_length(Smi::FromInt(len - 1));

489
  return *first;
490 491 492
}


493
BUILTIN(ArrayUnshift) {
494
  HandleScope scope(isolate);
495
  Heap* heap = isolate->heap();
496
  Handle<Object> receiver = args.receiver();
497
  MaybeHandle<FixedArrayBase> maybe_elms_obj =
498
      EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
499
  Handle<FixedArrayBase> elms_obj;
500
  if (!maybe_elms_obj.ToHandle(&elms_obj)) {
501
    return CallJsBuiltin(isolate, "ArrayUnshift", args);
502
  }
503
  Handle<JSArray> array = Handle<JSArray>::cast(receiver);
504
  DCHECK(!array->map()->is_observed());
505 506 507
  if (!array->HasFastSmiOrObjectElements()) {
    return CallJsBuiltin(isolate, "ArrayUnshift", args);
  }
508 509 510
  int len = Smi::cast(array->length())->value();
  int to_add = args.length() - 1;
  int new_length = len + to_add;
511 512
  // Currently fixed arrays cannot grow too big, so
  // we should never hit this case.
513
  DCHECK(to_add <= (Smi::kMaxValue - len));
514

515 516 517 518 519 520
  if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
    return CallJsBuiltin(isolate, "ArrayUnshift", args);
  }

  Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);

521 522 523
  if (new_length > elms->length()) {
    // New backing storage is needed.
    int capacity = new_length + (new_length >> 1) + 16;
524 525
    Handle<FixedArray> new_elms =
        isolate->factory()->NewUninitializedFixedArray(capacity);
526

527 528
    ElementsKind kind = array->GetElementsKind();
    ElementsAccessor* accessor = array->GetElementsAccessor();
529
    accessor->CopyElements(
530 531
        elms, 0, kind, new_elms, to_add,
        ElementsAccessor::kCopyToEndAndInitializeToHole);
532

533
    elms = new_elms;
534
    array->set_elements(*elms);
535
  } else {
536
    DisallowHeapAllocation no_gc;
537
    heap->MoveElements(*elms, to_add, 0, len);
538 539 540
  }

  // Add the provided values.
541
  DisallowHeapAllocation no_gc;
542 543 544 545 546 547 548 549 550 551 552
  WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
  for (int i = 0; i < to_add; i++) {
    elms->set(i, args[i + 1], mode);
  }

  // Set the length.
  array->set_length(Smi::FromInt(new_length));
  return Smi::FromInt(new_length);
}


553
BUILTIN(ArraySlice) {
554
  HandleScope scope(isolate);
555
  Heap* heap = isolate->heap();
556
  Handle<Object> receiver = args.receiver();
557
  int len = -1;
558 559 560 561 562 563 564 565 566 567
  int relative_start = 0;
  int relative_end = 0;
  {
    DisallowHeapAllocation no_gc;
    if (receiver->IsJSArray()) {
      JSArray* array = JSArray::cast(*receiver);
      if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
        AllowHeapAllocation allow_allocation;
        return CallJsBuiltin(isolate, "ArraySlice", args);
      }
568

569 570 571 572
      if (!array->HasFastElements()) {
        AllowHeapAllocation allow_allocation;
        return CallJsBuiltin(isolate, "ArraySlice", args);
      }
573

574
      len = Smi::cast(array->length())->value();
575
    } else {
576 577
      // Array.slice(arguments, ...) is quite a common idiom (notably more
      // than 50% of invocations in Web apps).  Treat it in C++ as well.
578 579
      Map* arguments_map =
          isolate->context()->native_context()->sloppy_arguments_map();
580 581 582 583 584 585 586 587 588

      bool is_arguments_object_with_fast_elements =
          receiver->IsJSObject() &&
          JSObject::cast(*receiver)->map() == arguments_map;
      if (!is_arguments_object_with_fast_elements) {
        AllowHeapAllocation allow_allocation;
        return CallJsBuiltin(isolate, "ArraySlice", args);
      }
      JSObject* object = JSObject::cast(*receiver);
589

590 591 592 593
      if (!object->HasFastElements()) {
        AllowHeapAllocation allow_allocation;
        return CallJsBuiltin(isolate, "ArraySlice", args);
      }
594

595 596 597 598 599 600 601 602
      Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
      if (!len_obj->IsSmi()) {
        AllowHeapAllocation allow_allocation;
        return CallJsBuiltin(isolate, "ArraySlice", args);
      }
      len = Smi::cast(len_obj)->value();
      if (len > object->elements()->length()) {
        AllowHeapAllocation allow_allocation;
603 604
        return CallJsBuiltin(isolate, "ArraySlice", args);
      }
605
    }
606

607
    DCHECK(len >= 0);
608 609 610 611 612 613 614 615 616 617 618 619 620 621 622
    int n_arguments = args.length() - 1;

    // Note carefully choosen defaults---if argument is missing,
    // it's undefined which gets converted to 0 for relative_start
    // and to len for relative_end.
    relative_start = 0;
    relative_end = len;
    if (n_arguments > 0) {
      Object* arg1 = args[1];
      if (arg1->IsSmi()) {
        relative_start = Smi::cast(arg1)->value();
      } else if (arg1->IsHeapNumber()) {
        double start = HeapNumber::cast(arg1)->value();
        if (start < kMinInt || start > kMaxInt) {
          AllowHeapAllocation allow_allocation;
623 624
          return CallJsBuiltin(isolate, "ArraySlice", args);
        }
625 626 627
        relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
      } else if (!arg1->IsUndefined()) {
        AllowHeapAllocation allow_allocation;
628
        return CallJsBuiltin(isolate, "ArraySlice", args);
629
      }
630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645
      if (n_arguments > 1) {
        Object* arg2 = args[2];
        if (arg2->IsSmi()) {
          relative_end = Smi::cast(arg2)->value();
        } else if (arg2->IsHeapNumber()) {
          double end = HeapNumber::cast(arg2)->value();
          if (end < kMinInt || end > kMaxInt) {
            AllowHeapAllocation allow_allocation;
            return CallJsBuiltin(isolate, "ArraySlice", args);
          }
          relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
        } else if (!arg2->IsUndefined()) {
          AllowHeapAllocation allow_allocation;
          return CallJsBuiltin(isolate, "ArraySlice", args);
        }
      }
646 647 648 649
    }
  }

  // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
650 651
  int k = (relative_start < 0) ? Max(len + relative_start, 0)
                               : Min(relative_start, len);
652 653

  // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
654 655
  int final = (relative_end < 0) ? Max(len + relative_end, 0)
                                 : Min(relative_end, len);
656

657 658
  // Calculate the length of result array.
  int result_len = Max(final - k, 0);
659

660 661 662
  Handle<JSObject> object = Handle<JSObject>::cast(receiver);
  Handle<FixedArrayBase> elms(object->elements(), isolate);

663 664
  ElementsKind kind = object->GetElementsKind();
  if (IsHoleyElementsKind(kind)) {
665
    DisallowHeapAllocation no_gc;
666 667 668
    bool packed = true;
    ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
    for (int i = k; i < final; i++) {
669
      if (!accessor->HasElement(object, i, elms)) {
670 671 672 673 674 675 676
        packed = false;
        break;
      }
    }
    if (packed) {
      kind = GetPackedElementsKind(kind);
    } else if (!receiver->IsJSArray()) {
677
      AllowHeapAllocation allow_allocation;
678 679 680 681
      return CallJsBuiltin(isolate, "ArraySlice", args);
    }
  }

682 683
  Handle<JSArray> result_array =
      isolate->factory()->NewJSArray(kind, result_len, result_len);
684

685
  DisallowHeapAllocation no_gc;
686
  if (result_len == 0) return *result_array;
687

688
  ElementsAccessor* accessor = object->GetElementsAccessor();
689 690
  accessor->CopyElements(
      elms, k, kind, handle(result_array->elements(), isolate), 0, result_len);
691
  return *result_array;
692 693 694
}


695
BUILTIN(ArraySplice) {
696
  HandleScope scope(isolate);
697
  Heap* heap = isolate->heap();
698
  Handle<Object> receiver = args.receiver();
699
  MaybeHandle<FixedArrayBase> maybe_elms_obj =
700
      EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3);
701
  Handle<FixedArrayBase> elms_obj;
702
  if (!maybe_elms_obj.ToHandle(&elms_obj)) {
703
    return CallJsBuiltin(isolate, "ArraySplice", args);
704
  }
705
  Handle<JSArray> array = Handle<JSArray>::cast(receiver);
706
  DCHECK(!array->map()->is_observed());
707

708 709 710 711
  int len = Smi::cast(array->length())->value();

  int n_arguments = args.length() - 1;

712
  int relative_start = 0;
713
  if (n_arguments > 0) {
714 715
    DisallowHeapAllocation no_gc;
    Object* arg1 = args[1];
716
    if (arg1->IsSmi()) {
717
      relative_start = Smi::cast(arg1)->value();
718
    } else if (arg1->IsHeapNumber()) {
719
      double start = HeapNumber::cast(arg1)->value();
720
      if (start < kMinInt || start > kMaxInt) {
721
        AllowHeapAllocation allow_allocation;
722 723
        return CallJsBuiltin(isolate, "ArraySplice", args);
      }
724
      relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
725
    } else if (!arg1->IsUndefined()) {
726
      AllowHeapAllocation allow_allocation;
727
      return CallJsBuiltin(isolate, "ArraySplice", args);
728
    }
729
  }
730 731
  int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
                                          : Min(relative_start, len);
732 733

  // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
734 735
  // given as a request to delete all the elements from the start.
  // And it differs from the case of undefined delete count.
736 737
  // This does not follow ECMA-262, but we do the same for
  // compatibility.
738 739
  int actual_delete_count;
  if (n_arguments == 1) {
740
    DCHECK(len - actual_start >= 0);
741 742 743 744
    actual_delete_count = len - actual_start;
  } else {
    int value = 0;  // ToInteger(undefined) == 0
    if (n_arguments > 1) {
745
      DisallowHeapAllocation no_gc;
746 747 748 749
      Object* arg2 = args[2];
      if (arg2->IsSmi()) {
        value = Smi::cast(arg2)->value();
      } else {
750
        AllowHeapAllocation allow_allocation;
751
        return CallJsBuiltin(isolate, "ArraySplice", args);
752
      }
753
    }
754
    actual_delete_count = Min(Max(value, 0), len - actual_start);
755 756
  }

757 758 759 760 761 762 763 764 765 766
  ElementsKind elements_kind = array->GetElementsKind();

  int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
  int new_length = len - actual_delete_count + item_count;

  // For double mode we do not support changing the length.
  if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
    return CallJsBuiltin(isolate, "ArraySplice", args);
  }

767 768 769 770 771
  if (new_length != len && JSArray::HasReadOnlyLength(array)) {
    AllowHeapAllocation allow_allocation;
    return CallJsBuiltin(isolate, "ArraySplice", args);
  }

772
  if (new_length == 0) {
773
    Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(
774 775 776
        elms_obj, elements_kind, actual_delete_count);
    array->set_elements(heap->empty_fixed_array());
    array->set_length(Smi::FromInt(0));
777
    return *result;
778 779
  }

780 781 782 783
  Handle<JSArray> result_array =
      isolate->factory()->NewJSArray(elements_kind,
                                     actual_delete_count,
                                     actual_delete_count);
784

785
  if (actual_delete_count > 0) {
786
    DisallowHeapAllocation no_gc;
787
    ElementsAccessor* accessor = array->GetElementsAccessor();
788
    accessor->CopyElements(
789 790
        elms_obj, actual_start, elements_kind,
        handle(result_array->elements(), isolate), 0, actual_delete_count);
791
  }
792

793
  bool elms_changed = false;
794
  if (item_count < actual_delete_count) {
795
    // Shrink the array.
796
    const bool trim_array = !heap->lo_space()->Contains(*elms_obj) &&
797 798 799 800 801
      ((actual_start + item_count) <
          (len - actual_delete_count - actual_start));
    if (trim_array) {
      const int delta = actual_delete_count - item_count;

802
      if (elms_obj->IsFixedDoubleArray()) {
803 804 805
        Handle<FixedDoubleArray> elms =
            Handle<FixedDoubleArray>::cast(elms_obj);
        MoveDoubleElements(*elms, delta, *elms, 0, actual_start);
806
      } else {
807
        Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
808
        DisallowHeapAllocation no_gc;
809
        heap->MoveElements(*elms, delta, 0, actual_start);
810 811
      }

812 813
      if (heap->CanMoveObjectStart(*elms_obj)) {
        // On the fast path we move the start of the object in memory.
814
        elms_obj = handle(heap->LeftTrimFixedArray(*elms_obj, delta));
815 816 817 818 819 820 821
      } else {
        // This is the slow path. We are going to move the elements to the left
        // by copying them. For trimmed values we store the hole.
        if (elms_obj->IsFixedDoubleArray()) {
          Handle<FixedDoubleArray> elms =
              Handle<FixedDoubleArray>::cast(elms_obj);
          MoveDoubleElements(*elms, 0, *elms, delta, len - delta);
822
          elms->FillWithHoles(len - delta, len);
823 824 825 826
        } else {
          Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
          DisallowHeapAllocation no_gc;
          heap->MoveElements(*elms, 0, delta, len - delta);
827
          elms->FillWithHoles(len - delta, len);
828 829
        }
      }
830
      elms_changed = true;
831
    } else {
832
      if (elms_obj->IsFixedDoubleArray()) {
833 834 835 836
        Handle<FixedDoubleArray> elms =
            Handle<FixedDoubleArray>::cast(elms_obj);
        MoveDoubleElements(*elms, actual_start + item_count,
                           *elms, actual_start + actual_delete_count,
837
                           (len - actual_delete_count - actual_start));
838
        elms->FillWithHoles(new_length, len);
839
      } else {
840
        Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
841
        DisallowHeapAllocation no_gc;
842
        heap->MoveElements(*elms, actual_start + item_count,
843 844
                           actual_start + actual_delete_count,
                           (len - actual_delete_count - actual_start));
845
        elms->FillWithHoles(new_length, len);
846
      }
847
    }
848
  } else if (item_count > actual_delete_count) {
849
    Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
850 851
    // Currently fixed arrays cannot grow too big, so
    // we should never hit this case.
852
    DCHECK((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
853

854 855 856 857
    // Check if array need to grow.
    if (new_length > elms->length()) {
      // New backing storage is needed.
      int capacity = new_length + (new_length >> 1) + 16;
858 859
      Handle<FixedArray> new_elms =
          isolate->factory()->NewUninitializedFixedArray(capacity);
860

861
      DisallowHeapAllocation no_gc;
862

863 864
      ElementsKind kind = array->GetElementsKind();
      ElementsAccessor* accessor = array->GetElementsAccessor();
865 866
      if (actual_start > 0) {
        // Copy the part before actual_start as is.
867
        accessor->CopyElements(
868
            elms, 0, kind, new_elms, 0, actual_start);
869
      }
870
      accessor->CopyElements(
871
          elms, actual_start + actual_delete_count, kind,
872
          new_elms, actual_start + item_count,
873
          ElementsAccessor::kCopyToEndAndInitializeToHole);
874

875
      elms_obj = new_elms;
876
      elms_changed = true;
877
    } else {
878
      DisallowHeapAllocation no_gc;
879
      heap->MoveElements(*elms, actual_start + item_count,
880 881
                         actual_start + actual_delete_count,
                         (len - actual_delete_count - actual_start));
882 883 884
    }
  }

885
  if (IsFastDoubleElementsKind(elements_kind)) {
886
    Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
887 888 889 890 891 892 893 894 895
    for (int k = actual_start; k < actual_start + item_count; k++) {
      Object* arg = args[3 + k - actual_start];
      if (arg->IsSmi()) {
        elms->set(k, Smi::cast(arg)->value());
      } else {
        elms->set(k, HeapNumber::cast(arg)->value());
      }
    }
  } else {
896
    Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
897
    DisallowHeapAllocation no_gc;
898 899 900 901
    WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
    for (int k = actual_start; k < actual_start + item_count; k++) {
      elms->set(k, args[3 + k - actual_start], mode);
    }
902 903
  }

904
  if (elms_changed) {
905
    array->set_elements(*elms_obj);
906
  }
907 908 909
  // Set the length.
  array->set_length(Smi::FromInt(new_length));

910
  return *result_array;
911 912 913
}


914
BUILTIN(ArrayConcat) {
915
  HandleScope scope(isolate);
916

917 918
  int n_arguments = args.length();
  int result_len = 0;
919
  ElementsKind elements_kind = GetInitialFastElementsKind();
920
  bool has_double = false;
921
  {
922
    DisallowHeapAllocation no_gc;
923 924
    Heap* heap = isolate->heap();
    Context* native_context = isolate->context()->native_context();
925 926 927 928
    Object* array_proto = native_context->array_function()->prototype();
    PrototypeIterator iter(isolate, array_proto,
                           PrototypeIterator::START_AT_RECEIVER);
    if (!ArrayPrototypeHasNoElements(heap, &iter)) {
929
      AllowHeapAllocation allow_allocation;
930
      return CallJsBuiltin(isolate, "ArrayConcatJS", args);
931
    }
932

933 934 935 936 937
    // Iterate through all the arguments performing checks
    // and calculating total length.
    bool is_holey = false;
    for (int i = 0; i < n_arguments; i++) {
      Object* arg = args[i];
938 939 940
      PrototypeIterator iter(isolate, arg);
      if (!arg->IsJSArray() || !JSArray::cast(arg)->HasFastElements() ||
          iter.GetCurrent() != array_proto) {
941
        AllowHeapAllocation allow_allocation;
942
        return CallJsBuiltin(isolate, "ArrayConcatJS", args);
943 944
      }
      int len = Smi::cast(JSArray::cast(arg)->length())->value();
945

946 947 948 949 950
      // We shouldn't overflow when adding another len.
      const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
      STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
      USE(kHalfOfMaxInt);
      result_len += len;
951
      DCHECK(result_len >= 0);
952

953 954
      if (result_len > FixedDoubleArray::kMaxLength) {
        AllowHeapAllocation allow_allocation;
955
        return CallJsBuiltin(isolate, "ArrayConcatJS", args);
956 957 958 959 960 961 962 963
      }

      ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
      has_double = has_double || IsFastDoubleElementsKind(arg_kind);
      is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
      if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
        elements_kind = arg_kind;
      }
964
    }
965
    if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
966 967
  }

968 969 970 971 972 973
  // If a double array is concatted into a fast elements array, the fast
  // elements array needs to be initialized to contain proper holes, since
  // boxing doubles may cause incremental marking.
  ArrayStorageAllocationMode mode =
      has_double && IsFastObjectElementsKind(elements_kind)
      ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
974 975 976 977 978 979
  Handle<JSArray> result_array =
      isolate->factory()->NewJSArray(elements_kind,
                                     result_len,
                                     result_len,
                                     mode);
  if (result_len == 0) return *result_array;
980

981
  int j = 0;
982
  Handle<FixedArrayBase> storage(result_array->elements(), isolate);
983
  ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
984
  for (int i = 0; i < n_arguments; i++) {
985 986
    // It is crucial to keep |array| in a raw pointer form to avoid performance
    // degradation.
987
    JSArray* array = JSArray::cast(args[i]);
988
    int len = Smi::cast(array->length())->value();
989
    if (len > 0) {
990
      ElementsKind from_kind = array->GetElementsKind();
991
      accessor->CopyElements(array, 0, from_kind, storage, j, len);
992 993
      j += len;
    }
994
  }
995

996
  DCHECK(j == result_len);
997

998
  return *result_array;
999 1000 1001
}


1002
// -----------------------------------------------------------------------------
1003 1004
// Throwers for restricted function properties and strict arguments object
// properties
1005 1006


1007
BUILTIN(RestrictedFunctionPropertiesThrower) {
1008
  HandleScope scope(isolate);
1009 1010 1011
  THROW_NEW_ERROR_RETURN_FAILURE(isolate,
                                 NewTypeError("restricted_function_properties",
                                              HandleVector<Object>(NULL, 0)));
1012 1013
}

1014

1015
BUILTIN(RestrictedStrictArgumentsPropertiesThrower) {
1016
  HandleScope scope(isolate);
1017 1018
  THROW_NEW_ERROR_RETURN_FAILURE(
      isolate,
1019
      NewTypeError("strict_poison_pill", HandleVector<Object>(NULL, 0)));
1020 1021 1022
}


1023 1024 1025 1026
// -----------------------------------------------------------------------------
//


1027
template <bool is_construct>
1028 1029
MUST_USE_RESULT static MaybeHandle<Object> HandleApiCallHelper(
    Isolate* isolate, BuiltinArguments<NEEDS_CALLED_FUNCTION>& args) {
1030
  HandleScope scope(isolate);
1031
  Handle<JSFunction> function = args.called_function();
1032 1033
  // TODO(ishell): turn this back to a DCHECK.
  CHECK(function->shared()->IsApiFunction());
1034

1035 1036
  Handle<FunctionTemplateInfo> fun_data(
      function->shared()->get_api_func_data(), isolate);
1037
  if (is_construct) {
1038
    ASSIGN_RETURN_ON_EXCEPTION(
1039
        isolate, fun_data,
1040 1041
        ApiNatives::ConfigureInstance(isolate, fun_data,
                                      Handle<JSObject>::cast(args.receiver())),
1042
        Object);
1043 1044
  }

dcarney's avatar
dcarney committed
1045 1046
  DCHECK(!args[0]->IsNull());
  if (args[0]->IsUndefined()) args[0] = function->global_proxy();
1047

1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058
  if (!is_construct && !fun_data->accept_any_receiver()) {
    Handle<Object> receiver(&args[0]);
    if (receiver->IsJSObject() && receiver->IsAccessCheckNeeded()) {
      Handle<JSObject> js_receiver = Handle<JSObject>::cast(receiver);
      if (!isolate->MayAccess(js_receiver)) {
        isolate->ReportFailedAccessCheck(js_receiver);
        RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
      }
    }
  }

1059
  Object* raw_holder = fun_data->GetCompatibleReceiver(isolate, args[0]);
1060 1061 1062

  if (raw_holder->IsNull()) {
    // This function cannot be called with the given receiver.  Abort!
1063 1064 1065
    THROW_NEW_ERROR(
        isolate, NewTypeError("illegal_invocation", HandleVector(&function, 1)),
        Object);
1066 1067 1068 1069
  }

  Object* raw_call_data = fun_data->call_code();
  if (!raw_call_data->IsUndefined()) {
1070 1071
    // TODO(ishell): remove this debugging code.
    CHECK(raw_call_data->IsCallHandlerInfo());
1072 1073
    CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
    Object* callback_obj = call_data->callback();
1074 1075
    v8::FunctionCallback callback =
        v8::ToCData<v8::FunctionCallback>(callback_obj);
1076 1077
    Object* data_obj = call_data->data();

1078
    LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1079
    DCHECK(raw_holder->IsJSObject());
1080

1081 1082 1083 1084 1085 1086
    FunctionCallbackArguments custom(isolate,
                                     data_obj,
                                     *function,
                                     raw_holder,
                                     &args[0] - 1,
                                     args.length() - 1,
1087
                                     is_construct);
1088

1089
    v8::Handle<v8::Value> value = custom.Call(callback);
1090
    Handle<Object> result;
1091
    if (value.IsEmpty()) {
1092
      result = isolate->factory()->undefined_value();
1093
    } else {
1094
      result = v8::Utils::OpenHandle(*value);
1095
      result->VerifyApiCallResultType();
1096 1097
    }

1098 1099 1100 1101
    RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
    if (!is_construct || result->IsJSObject()) {
      return scope.CloseAndEscape(result);
    }
1102 1103
  }

1104
  return scope.CloseAndEscape(args.receiver());
1105 1106 1107 1108
}


BUILTIN(HandleApiCall) {
1109 1110 1111 1112 1113 1114
  HandleScope scope(isolate);
  DCHECK(!CalledAsConstructor(isolate));
  Handle<Object> result;
  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
                                     HandleApiCallHelper<false>(isolate, args));
  return *result;
1115 1116 1117 1118
}


BUILTIN(HandleApiCallConstruct) {
1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176
  HandleScope scope(isolate);
  DCHECK(CalledAsConstructor(isolate));
  Handle<Object> result;
  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
                                     HandleApiCallHelper<true>(isolate, args));
  return *result;
}


namespace {

class RelocatableArguments : public BuiltinArguments<NEEDS_CALLED_FUNCTION>,
                             public Relocatable {
 public:
  RelocatableArguments(Isolate* isolate, int length, Object** arguments)
      : BuiltinArguments<NEEDS_CALLED_FUNCTION>(length, arguments),
        Relocatable(isolate) {}

  virtual inline void IterateInstance(ObjectVisitor* v) {
    if (length() == 0) return;
    v->VisitPointers(lowest_address(), highest_address() + 1);
  }

 private:
  DISALLOW_COPY_AND_ASSIGN(RelocatableArguments);
};

}  // namespace


MaybeHandle<Object> Builtins::InvokeApiFunction(Handle<JSFunction> function,
                                                Handle<Object> receiver,
                                                int argc,
                                                Handle<Object> args[]) {
  // Construct BuiltinArguments object: function, arguments reversed, receiver.
  const int kBufferSize = 32;
  Object* small_argv[kBufferSize];
  Object** argv;
  if (argc + 2 <= kBufferSize) {
    argv = small_argv;
  } else {
    argv = new Object* [argc + 2];
  }
  argv[argc + 1] = *receiver;
  for (int i = 0; i < argc; ++i) {
    argv[argc - i] = *args[i];
  }
  argv[0] = *function;
  MaybeHandle<Object> result;
  {
    auto isolate = function->GetIsolate();
    RelocatableArguments arguments(isolate, argc + 2, &argv[argc + 1]);
    result = HandleApiCallHelper<false>(isolate, arguments);
  }
  if (argv != small_argv) {
    delete[] argv;
  }
  return result;
1177 1178 1179
}


1180 1181 1182
// Helper function to handle calls to non-function objects created through the
// API. The object can be called as either a constructor (using new) or just as
// a function (without new).
1183
MUST_USE_RESULT static Object* HandleApiCallAsFunctionOrConstructor(
1184
    Isolate* isolate,
1185 1186
    bool is_construct_call,
    BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1187 1188
  // Non-functions are never called as constructors. Even if this is an object
  // called as a constructor the delegate call is not a construct call.
1189
  DCHECK(!CalledAsConstructor(isolate));
1190
  Heap* heap = isolate->heap();
1191

1192
  Handle<Object> receiver = args.receiver();
1193

1194
  // Get the object called.
1195
  JSObject* obj = JSObject::cast(*receiver);
1196 1197 1198

  // Get the invocation callback from the function descriptor that was
  // used to create the called object.
1199
  DCHECK(obj->map()->has_instance_call_handler());
1200
  JSFunction* constructor = JSFunction::cast(obj->map()->GetConstructor());
1201 1202
  // TODO(ishell): turn this back to a DCHECK.
  CHECK(constructor->shared()->IsApiFunction());
1203
  Object* handler =
1204
      constructor->shared()->get_api_func_data()->instance_call_handler();
1205
  DCHECK(!handler->IsUndefined());
1206 1207
  // TODO(ishell): remove this debugging code.
  CHECK(handler->IsCallHandlerInfo());
1208 1209
  CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
  Object* callback_obj = call_data->callback();
1210 1211
  v8::FunctionCallback callback =
      v8::ToCData<v8::FunctionCallback>(callback_obj);
1212 1213 1214

  // Get the data for the call and perform the callback.
  Object* result;
1215
  {
1216 1217
    HandleScope scope(isolate);
    LOG(isolate, ApiObjectAccess("call non-function", obj));
1218

1219 1220 1221 1222 1223 1224 1225
    FunctionCallbackArguments custom(isolate,
                                     call_data->data(),
                                     constructor,
                                     obj,
                                     &args[0] - 1,
                                     args.length() - 1,
                                     is_construct_call);
1226
    v8::Handle<v8::Value> value = custom.Call(callback);
1227
    if (value.IsEmpty()) {
1228
      result = heap->undefined_value();
1229 1230
    } else {
      result = *reinterpret_cast<Object**>(*value);
1231
      result->VerifyApiCallResultType();
1232 1233 1234
    }
  }
  // Check for exceptions and return result.
1235
  RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
1236 1237
  return result;
}
1238 1239 1240 1241 1242


// Handle calls to non-function objects created through the API. This delegate
// function is used when the call is a normal function call.
BUILTIN(HandleApiCallAsFunction) {
1243
  return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1244 1245 1246 1247 1248 1249
}


// Handle calls to non-function objects created through the API. This delegate
// function is used when the call is a construct call.
BUILTIN(HandleApiCallAsConstructor) {
1250
  return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1251
}
1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263


static void Generate_LoadIC_Miss(MacroAssembler* masm) {
  LoadIC::GenerateMiss(masm);
}


static void Generate_LoadIC_Normal(MacroAssembler* masm) {
  LoadIC::GenerateNormal(masm);
}


1264
static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1265
  NamedLoadHandlerCompiler::GenerateLoadViaGetterForDeopt(masm);
1266 1267 1268
}


1269 1270 1271 1272 1273
static void Generate_LoadIC_Slow(MacroAssembler* masm) {
  LoadIC::GenerateRuntimeGetProperty(masm);
}


1274 1275 1276 1277 1278
static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
  KeyedLoadIC::GenerateInitialize(masm);
}


danno@chromium.org's avatar
danno@chromium.org committed
1279 1280 1281 1282 1283
static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
  KeyedLoadIC::GenerateRuntimeGetProperty(masm);
}


1284
static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1285
  KeyedLoadIC::GenerateMiss(masm);
1286 1287 1288
}


1289 1290
static void Generate_KeyedLoadIC_Megamorphic(MacroAssembler* masm) {
  KeyedLoadIC::GenerateMegamorphic(masm);
1291 1292 1293 1294 1295 1296 1297
}


static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
  KeyedLoadIC::GeneratePreMonomorphic(masm);
}

1298

1299 1300 1301 1302 1303
static void Generate_StoreIC_Miss(MacroAssembler* masm) {
  StoreIC::GenerateMiss(masm);
}


1304 1305 1306 1307 1308
static void Generate_StoreIC_Normal(MacroAssembler* masm) {
  StoreIC::GenerateNormal(masm);
}


1309 1310 1311 1312 1313 1314 1315 1316 1317 1318
static void Generate_StoreIC_Slow(MacroAssembler* masm) {
  NamedStoreHandlerCompiler::GenerateSlow(masm);
}


static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
  ElementHandlerCompiler::GenerateStoreSlow(masm);
}


1319
static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1320
  NamedStoreHandlerCompiler::GenerateStoreViaSetterForDeopt(masm);
1321 1322 1323
}


1324
static void Generate_KeyedStoreIC_Megamorphic(MacroAssembler* masm) {
1325
  KeyedStoreIC::GenerateMegamorphic(masm, SLOPPY);
1326 1327 1328 1329
}


static void Generate_KeyedStoreIC_Megamorphic_Strict(MacroAssembler* masm) {
1330
  KeyedStoreIC::GenerateMegamorphic(masm, STRICT);
1331 1332 1333
}


1334
static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1335
  KeyedStoreIC::GenerateMiss(masm);
danno@chromium.org's avatar
danno@chromium.org committed
1336 1337 1338
}


1339 1340 1341 1342 1343
static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
  KeyedStoreIC::GenerateInitialize(masm);
}


1344 1345 1346 1347
static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
  KeyedStoreIC::GenerateInitialize(masm);
}

1348

1349 1350 1351 1352 1353 1354 1355 1356 1357 1358
static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
  KeyedStoreIC::GeneratePreMonomorphic(masm);
}


static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
  KeyedStoreIC::GeneratePreMonomorphic(masm);
}


1359 1360
static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) {
  KeyedStoreIC::GenerateSloppyArguments(masm);
1361
}
1362

1363

1364
static void Generate_CallICStub_DebugBreak(MacroAssembler* masm) {
1365
  DebugCodegen::GenerateCallICStubDebugBreak(masm);
1366 1367 1368
}


1369
static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1370
  DebugCodegen::GenerateLoadICDebugBreak(masm);
1371 1372 1373 1374
}


static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1375
  DebugCodegen::GenerateStoreICDebugBreak(masm);
1376 1377 1378 1379
}


static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1380
  DebugCodegen::GenerateKeyedLoadICDebugBreak(masm);
1381 1382 1383 1384
}


static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1385
  DebugCodegen::GenerateKeyedStoreICDebugBreak(masm);
1386 1387 1388
}


1389
static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
1390
  DebugCodegen::GenerateCompareNilICDebugBreak(masm);
1391 1392 1393
}


1394
static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1395
  DebugCodegen::GenerateReturnDebugBreak(masm);
1396 1397 1398
}


1399
static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1400
  DebugCodegen::GenerateCallFunctionStubDebugBreak(masm);
1401
}
1402

1403

1404
static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1405
  DebugCodegen::GenerateCallConstructStubDebugBreak(masm);
1406 1407 1408 1409 1410
}


static void Generate_CallConstructStub_Recording_DebugBreak(
    MacroAssembler* masm) {
1411
  DebugCodegen::GenerateCallConstructStubRecordDebugBreak(masm);
1412 1413 1414
}


1415
static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1416
  DebugCodegen::GenerateSlotDebugBreak(masm);
1417 1418 1419
}


1420
static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1421
  DebugCodegen::GeneratePlainReturnLiveEdit(masm);
1422 1423
}

1424

1425
static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1426
  DebugCodegen::GenerateFrameDropperLiveEdit(masm);
1427
}
1428

1429 1430 1431 1432 1433 1434 1435 1436 1437 1438

Builtins::Builtins() : initialized_(false) {
  memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
  memset(names_, 0, sizeof(names_[0]) * builtin_count);
}


Builtins::~Builtins() {
}

1439

1440
#define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1441 1442 1443
Address const Builtins::c_functions_[cfunction_count] = {
  BUILTIN_LIST_C(DEF_ENUM_C)
};
1444 1445 1446 1447
#undef DEF_ENUM_C

#define DEF_JS_NAME(name, ignore) #name,
#define DEF_JS_ARGC(ignore, argc) argc,
1448
const char* const Builtins::javascript_names_[id_count] = {
1449 1450 1451
  BUILTINS_LIST_JS(DEF_JS_NAME)
};

1452
int const Builtins::javascript_argc_[id_count] = {
1453 1454 1455 1456 1457
  BUILTINS_LIST_JS(DEF_JS_ARGC)
};
#undef DEF_JS_NAME
#undef DEF_JS_ARGC

1458 1459 1460 1461 1462 1463 1464 1465
struct BuiltinDesc {
  byte* generator;
  byte* c_code;
  const char* s_name;  // name is only used for generating log information.
  int name;
  Code::Flags flags;
  BuiltinExtraArguments extra_args;
};
1466

1467 1468
#define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }

1469 1470
class BuiltinFunctionTable {
 public:
1471
  BuiltinDesc* functions() {
1472
    base::CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1473
    return functions_;
1474 1475
  }

1476
  base::OnceType once_;
1477
  BuiltinDesc functions_[Builtins::builtin_count + 1];
1478 1479 1480

  friend class Builtins;
};
1481

1482 1483
static BuiltinFunctionTable builtin_function_table =
    BUILTIN_FUNCTION_TABLE_INIT;
1484 1485 1486 1487 1488 1489

// Define array of pointers to generators and C builtin functions.
// We do this in a sort of roundabout way so that we can do the initialization
// within the lexical scope of Builtins:: and within a context where
// Code::Flags names a non-abstract type.
void Builtins::InitBuiltinFunctionTable() {
1490
  BuiltinDesc* functions = builtin_function_table.functions_;
1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510
  functions[builtin_count].generator = NULL;
  functions[builtin_count].c_code = NULL;
  functions[builtin_count].s_name = NULL;
  functions[builtin_count].name = builtin_count;
  functions[builtin_count].flags = static_cast<Code::Flags>(0);
  functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;

#define DEF_FUNCTION_PTR_C(aname, aextra_args)                         \
    functions->generator = FUNCTION_ADDR(Generate_Adaptor);            \
    functions->c_code = FUNCTION_ADDR(Builtin_##aname);                \
    functions->s_name = #aname;                                        \
    functions->name = c_##aname;                                       \
    functions->flags = Code::ComputeFlags(Code::BUILTIN);              \
    functions->extra_args = aextra_args;                               \
    ++functions;

#define DEF_FUNCTION_PTR_A(aname, kind, state, extra)                       \
    functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
    functions->c_code = NULL;                                               \
    functions->s_name = #aname;                                             \
1511
    functions->name = k##aname;                                             \
1512 1513 1514 1515 1516 1517
    functions->flags = Code::ComputeFlags(Code::kind,                       \
                                          state,                            \
                                          extra);                           \
    functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
    ++functions;

1518
#define DEF_FUNCTION_PTR_H(aname, kind)                                     \
1519 1520 1521 1522
    functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
    functions->c_code = NULL;                                               \
    functions->s_name = #aname;                                             \
    functions->name = k##aname;                                             \
1523
    functions->flags = Code::ComputeHandlerFlags(Code::kind);               \
1524 1525 1526
    functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
    ++functions;

1527 1528
  BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
  BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
1529
  BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
1530
  BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
1531 1532 1533

#undef DEF_FUNCTION_PTR_C
#undef DEF_FUNCTION_PTR_A
1534 1535
}

1536

1537
void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
1538
  DCHECK(!initialized_);
1539 1540

  // Create a scope for the handles in the builtins.
1541
  HandleScope scope(isolate);
1542

1543
  const BuiltinDesc* functions = builtin_function_table.functions();
1544 1545

  // For now we generate builtin adaptor code into a stack-allocated
1546 1547
  // buffer, before copying it into individual code objects. Be careful
  // with alignment, some platforms don't like unaligned code.
1548 1549 1550 1551 1552 1553 1554
#ifdef DEBUG
  // We can generate a lot of debug code on Arm64.
  const size_t buffer_size = 32*KB;
#else
  const size_t buffer_size = 8*KB;
#endif
  union { int force_alignment; byte buffer[buffer_size]; } u;
1555 1556 1557 1558 1559

  // Traverse the list of builtins and generate an adaptor in a
  // separate code object for each one.
  for (int i = 0; i < builtin_count; i++) {
    if (create_heap_objects) {
1560
      MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1561
      // Generate the code/adaptor.
1562
      typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1563 1564 1565 1566
      Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
      // We pass all arguments to the generator, but it may not use all of
      // them.  This works because the first arguments are on top of the
      // stack.
1567
      DCHECK(!masm.has_frame());
1568
      g(&masm, functions[i].name, functions[i].extra_args);
1569 1570 1571
      // Move the code into the object heap.
      CodeDesc desc;
      masm.GetCode(&desc);
1572
      Code::Flags flags = functions[i].flags;
1573 1574
      Handle<Code> code =
          isolate->factory()->NewCode(desc, flags, masm.CodeObject());
1575
      // Log the event and add the code to the builtins array.
1576
      PROFILE(isolate,
1577 1578
              CodeCreateEvent(Logger::BUILTIN_TAG, *code, functions[i].s_name));
      builtins_[i] = *code;
1579
      code->set_builtin_index(i);
1580
#ifdef ENABLE_DISASSEMBLER
1581
      if (FLAG_print_builtin_code) {
1582
        CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
1583 1584 1585 1586
        OFStream os(trace_scope.file());
        os << "Builtin: " << functions[i].s_name << "\n";
        code->Disassemble(functions[i].s_name, os);
        os << "\n";
1587 1588
      }
#endif
1589 1590 1591 1592 1593 1594 1595 1596
    } else {
      // Deserializing. The values will be filled in during IterateBuiltins.
      builtins_[i] = NULL;
    }
    names_[i] = functions[i].s_name;
  }

  // Mark as initialized.
1597
  initialized_ = true;
1598 1599 1600 1601
}


void Builtins::TearDown() {
1602
  initialized_ = false;
1603 1604 1605 1606 1607 1608 1609 1610 1611
}


void Builtins::IterateBuiltins(ObjectVisitor* v) {
  v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
}


const char* Builtins::Lookup(byte* pc) {
1612 1613
  // may be called during initialization (disassembler!)
  if (initialized_) {
1614 1615 1616 1617 1618 1619 1620 1621 1622 1623
    for (int i = 0; i < builtin_count; i++) {
      Code* entry = Code::cast(builtins_[i]);
      if (entry->contains(pc)) {
        return names_[i];
      }
    }
  }
  return NULL;
}

1624

1625
void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
1626
  masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
1627 1628 1629 1630
}


void Builtins::Generate_StackCheck(MacroAssembler* masm) {
1631
  masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
1632 1633 1634
}


1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646
#define DEFINE_BUILTIN_ACCESSOR_C(name, ignore)               \
Handle<Code> Builtins::name() {                               \
  Code** code_address =                                       \
      reinterpret_cast<Code**>(builtin_address(k##name));     \
  return Handle<Code>(code_address);                          \
}
#define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
Handle<Code> Builtins::name() {                             \
  Code** code_address =                                     \
      reinterpret_cast<Code**>(builtin_address(k##name));   \
  return Handle<Code>(code_address);                        \
}
1647
#define DEFINE_BUILTIN_ACCESSOR_H(name, kind)               \
1648 1649 1650 1651 1652
Handle<Code> Builtins::name() {                             \
  Code** code_address =                                     \
      reinterpret_cast<Code**>(builtin_address(k##name));   \
  return Handle<Code>(code_address);                        \
}
1653 1654
BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
1655
BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
1656 1657 1658 1659 1660
BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
#undef DEFINE_BUILTIN_ACCESSOR_C
#undef DEFINE_BUILTIN_ACCESSOR_A


1661
} }  // namespace v8::internal