js-objects-inl.h 38 KB
Newer Older
1 2 3 4 5 6 7 8 9 10
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_OBJECTS_JS_OBJECTS_INL_H_
#define V8_OBJECTS_JS_OBJECTS_INL_H_

#include "src/objects/js-objects.h"

#include "src/heap/heap-write-barrier.h"
11
#include "src/objects/elements.h"
12
#include "src/objects/embedder-data-slot-inl.h"
13
#include "src/objects/feedback-cell-inl.h"
14
#include "src/objects/feedback-vector.h"
15
#include "src/objects/field-index-inl.h"
16
#include "src/objects/hash-table-inl.h"
17
#include "src/objects/heap-number-inl.h"
18
#include "src/objects/keys.h"
19
#include "src/objects/lookup-inl.h"
20
#include "src/objects/property-array-inl.h"
21
#include "src/objects/prototype-inl.h"
22
#include "src/objects/shared-function-info.h"
23
#include "src/objects/slots.h"
24
#include "src/objects/smi-inl.h"
25 26 27 28 29 30 31

// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

namespace v8 {
namespace internal {

32
OBJECT_CONSTRUCTORS_IMPL(JSReceiver, HeapObject)
33
TQ_OBJECT_CONSTRUCTORS_IMPL(JSObject)
34 35
TQ_OBJECT_CONSTRUCTORS_IMPL(JSCustomElementsObject)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSSpecialObject)
36
TQ_OBJECT_CONSTRUCTORS_IMPL(JSAsyncFromSyncIterator)
37
TQ_OBJECT_CONSTRUCTORS_IMPL(JSFunctionOrBoundFunction)
38 39
TQ_OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSDate)
40 41
OBJECT_CONSTRUCTORS_IMPL(JSFunction, JSFunctionOrBoundFunction)
OBJECT_CONSTRUCTORS_IMPL(JSGlobalObject, JSSpecialObject)
42
TQ_OBJECT_CONSTRUCTORS_IMPL(JSGlobalProxy)
43 44
JSIteratorResult::JSIteratorResult(Address ptr) : JSObject(ptr) {}
OBJECT_CONSTRUCTORS_IMPL(JSMessageObject, JSObject)
45
TQ_OBJECT_CONSTRUCTORS_IMPL(JSPrimitiveWrapper)
46
TQ_OBJECT_CONSTRUCTORS_IMPL(JSStringIterator)
47 48 49

NEVER_READ_ONLY_SPACE_IMPL(JSReceiver)

50 51 52 53 54
CAST_ACCESSOR(JSFunction)
CAST_ACCESSOR(JSGlobalObject)
CAST_ACCESSOR(JSIteratorResult)
CAST_ACCESSOR(JSMessageObject)
CAST_ACCESSOR(JSReceiver)
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73

MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate,
                                            Handle<JSReceiver> receiver,
                                            Handle<Name> name) {
  LookupIterator it(isolate, receiver, name, receiver);
  if (!it.IsFound()) return it.factory()->undefined_value();
  return Object::GetProperty(&it);
}

MaybeHandle<Object> JSReceiver::GetElement(Isolate* isolate,
                                           Handle<JSReceiver> receiver,
                                           uint32_t index) {
  LookupIterator it(isolate, receiver, index, receiver);
  if (!it.IsFound()) return it.factory()->undefined_value();
  return Object::GetProperty(&it);
}

Handle<Object> JSReceiver::GetDataProperty(Handle<JSReceiver> object,
                                           Handle<Name> name) {
74
  LookupIterator it(object->GetIsolate(), object, name, object,
75 76 77 78 79
                    LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
  if (!it.IsFound()) return it.factory()->undefined_value();
  return GetDataProperty(&it);
}

80 81
MaybeHandle<HeapObject> JSReceiver::GetPrototype(Isolate* isolate,
                                                 Handle<JSReceiver> receiver) {
82 83 84 85 86
  // We don't expect access checks to be needed on JSProxy objects.
  DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject());
  PrototypeIterator iter(isolate, receiver, kStartAtReceiver,
                         PrototypeIterator::END_AT_NON_HIDDEN);
  do {
87
    if (!iter.AdvanceFollowingProxies()) return MaybeHandle<HeapObject>();
88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106
  } while (!iter.IsAtEnd());
  return PrototypeIterator::GetCurrent(iter);
}

MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate,
                                            Handle<JSReceiver> receiver,
                                            const char* name) {
  Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
  return GetProperty(isolate, receiver, str);
}

// static
V8_WARN_UNUSED_RESULT MaybeHandle<FixedArray> JSReceiver::OwnPropertyKeys(
    Handle<JSReceiver> object) {
  return KeyAccumulator::GetKeys(object, KeyCollectionMode::kOwnOnly,
                                 ALL_PROPERTIES,
                                 GetKeysConversion::kConvertToString);
}

107
bool JSObject::PrototypeHasNoElements(Isolate* isolate, JSObject object) {
108
  DisallowHeapAllocation no_gc;
109
  HeapObject prototype = HeapObject::cast(object.map().prototype());
110
  ReadOnlyRoots roots(isolate);
111
  HeapObject null = roots.null_value();
112 113
  FixedArrayBase empty_fixed_array = roots.empty_fixed_array();
  FixedArrayBase empty_slow_element_dictionary =
114 115
      roots.empty_slow_element_dictionary();
  while (prototype != null) {
116 117 118
    Map map = prototype.map();
    if (map.IsCustomElementsReceiverMap()) return false;
    FixedArrayBase elements = JSObject::cast(prototype).elements();
119 120 121 122
    if (elements != empty_fixed_array &&
        elements != empty_slow_element_dictionary) {
      return false;
    }
123
    prototype = HeapObject::cast(map.prototype());
124 125 126 127 128 129 130 131
  }
  return true;
}

ACCESSORS(JSReceiver, raw_properties_or_hash, Object, kPropertiesOrHashOffset)

void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
  JSObject::ValidateElements(*object);
132
  ElementsKind elements_kind = object->map().elements_kind();
133 134 135 136 137 138 139 140 141
  if (!IsObjectElementsKind(elements_kind)) {
    if (IsHoleyElementsKind(elements_kind)) {
      TransitionElementsKind(object, HOLEY_ELEMENTS);
    } else {
      TransitionElementsKind(object, PACKED_ELEMENTS);
    }
  }
}

142 143 144
template <typename TSlot>
void JSObject::EnsureCanContainElements(Handle<JSObject> object, TSlot objects,
                                        uint32_t count,
145
                                        EnsureElementsMode mode) {
146 147 148
  static_assert(std::is_same<TSlot, FullObjectSlot>::value ||
                    std::is_same<TSlot, ObjectSlot>::value,
                "Only ObjectSlot and FullObjectSlot are expected here");
149 150 151 152 153 154 155
  ElementsKind current_kind = object->GetElementsKind();
  ElementsKind target_kind = current_kind;
  {
    DisallowHeapAllocation no_allocation;
    DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
    bool is_holey = IsHoleyElementsKind(current_kind);
    if (current_kind == HOLEY_ELEMENTS) return;
156
    Object the_hole = object->GetReadOnlyRoots().the_hole_value();
157
    for (uint32_t i = 0; i < count; ++i, ++objects) {
158
      Object current = *objects;
159 160 161
      if (current == the_hole) {
        is_holey = true;
        target_kind = GetHoleyElementsKind(target_kind);
162 163
      } else if (!current.IsSmi()) {
        if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current.IsNumber()) {
164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195
          if (IsSmiElementsKind(target_kind)) {
            if (is_holey) {
              target_kind = HOLEY_DOUBLE_ELEMENTS;
            } else {
              target_kind = PACKED_DOUBLE_ELEMENTS;
            }
          }
        } else if (is_holey) {
          target_kind = HOLEY_ELEMENTS;
          break;
        } else {
          target_kind = PACKED_ELEMENTS;
        }
      }
    }
  }
  if (target_kind != current_kind) {
    TransitionElementsKind(object, target_kind);
  }
}

void JSObject::EnsureCanContainElements(Handle<JSObject> object,
                                        Handle<FixedArrayBase> elements,
                                        uint32_t length,
                                        EnsureElementsMode mode) {
  ReadOnlyRoots roots = object->GetReadOnlyRoots();
  if (elements->map() != roots.fixed_double_array_map()) {
    DCHECK(elements->map() == roots.fixed_array_map() ||
           elements->map() == roots.fixed_cow_array_map());
    if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
      mode = DONT_ALLOW_DOUBLE_ELEMENTS;
    }
196
    ObjectSlot objects =
197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219
        Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
    EnsureCanContainElements(object, objects, length, mode);
    return;
  }

  DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
  if (object->GetElementsKind() == HOLEY_SMI_ELEMENTS) {
    TransitionElementsKind(object, HOLEY_DOUBLE_ELEMENTS);
  } else if (object->GetElementsKind() == PACKED_SMI_ELEMENTS) {
    Handle<FixedDoubleArray> double_array =
        Handle<FixedDoubleArray>::cast(elements);
    for (uint32_t i = 0; i < length; ++i) {
      if (double_array->is_the_hole(i)) {
        TransitionElementsKind(object, HOLEY_DOUBLE_ELEMENTS);
        return;
      }
    }
    TransitionElementsKind(object, PACKED_DOUBLE_ELEMENTS);
  }
}

void JSObject::SetMapAndElements(Handle<JSObject> object, Handle<Map> new_map,
                                 Handle<FixedArrayBase> value) {
220 221
  Isolate* isolate = object->GetIsolate();
  JSObject::MigrateToMap(isolate, object, new_map);
222
  DCHECK((object->map().has_fast_smi_or_object_elements() ||
223
          (*value == ReadOnlyRoots(isolate).empty_fixed_array()) ||
224
          object->map().has_fast_string_wrapper_elements()) ==
225 226 227
         (value->map() == ReadOnlyRoots(isolate).fixed_array_map() ||
          value->map() == ReadOnlyRoots(isolate).fixed_cow_array_map()));
  DCHECK((*value == ReadOnlyRoots(isolate).empty_fixed_array()) ||
228
         (object->map().has_fast_double_elements() ==
229 230 231 232 233
          value->IsFixedDoubleArray()));
  object->set_elements(*value);
}

void JSObject::initialize_elements() {
234
  FixedArrayBase elements = map().GetInitialElements();
235
  set_elements(elements, SKIP_WRITE_BARRIER);
236 237
}

238 239
DEF_GETTER(JSObject, GetIndexedInterceptor, InterceptorInfo) {
  return map(isolate).GetIndexedInterceptor(isolate);
240 241
}

242 243
DEF_GETTER(JSObject, GetNamedInterceptor, InterceptorInfo) {
  return map(isolate).GetNamedInterceptor(isolate);
244 245
}

246 247
// static
int JSObject::GetHeaderSize(Map map) {
248 249 250
  // Check for the most common kind of JavaScript object before
  // falling into the generic switch. This speeds up the internal
  // field operations considerably on average.
251
  InstanceType instance_type = map.instance_type();
252 253
  return instance_type == JS_OBJECT_TYPE
             ? JSObject::kHeaderSize
254
             : GetHeaderSize(instance_type, map.has_prototype_slot());
255 256
}

257
// static
258
int JSObject::GetEmbedderFieldsStartOffset(Map map) {
259 260
  // Embedder fields are located after the object header.
  return GetHeaderSize(map);
261 262 263 264 265 266
}

int JSObject::GetEmbedderFieldsStartOffset() {
  return GetEmbedderFieldsStartOffset(map());
}

267
// static
268
int JSObject::GetEmbedderFieldCount(Map map) {
269
  int instance_size = map.instance_size();
270
  if (instance_size == kVariableSizeSentinel) return 0;
271 272 273 274 275 276 277
  // Embedder fields are located after the object header, whereas in-object
  // properties are located at the end of the object. We don't have to round up
  // the header size here because division by kEmbedderDataSlotSizeInTaggedSlots
  // will swallow potential padding in case of (kTaggedSize !=
  // kSystemPointerSize) anyway.
  return (((instance_size - GetEmbedderFieldsStartOffset(map)) >>
           kTaggedSizeLog2) -
278
          map.GetInObjectProperties()) /
279
         kEmbedderDataSlotSizeInTaggedSlots;
280 281 282 283 284 285 286
}

int JSObject::GetEmbedderFieldCount() const {
  return GetEmbedderFieldCount(map());
}

int JSObject::GetEmbedderFieldOffset(int index) {
287 288 289
  DCHECK_LT(static_cast<unsigned>(index),
            static_cast<unsigned>(GetEmbedderFieldCount()));
  return GetEmbedderFieldsStartOffset() + (kEmbedderDataSlotSize * index);
290 291
}

292
Object JSObject::GetEmbedderField(int index) {
293
  return EmbedderDataSlot(*this, index).load_tagged();
294 295
}

296
void JSObject::SetEmbedderField(int index, Object value) {
297
  EmbedderDataSlot::store_tagged(*this, index, value);
298 299
}

300
void JSObject::SetEmbedderField(int index, Smi value) {
301
  EmbedderDataSlot(*this, index).store_smi(value);
302 303
}

304
bool JSObject::IsUnboxedDoubleField(FieldIndex index) const {
305
  const Isolate* isolate = GetIsolateForPtrCompr(*this);
306 307 308
  return IsUnboxedDoubleField(isolate, index);
}

309 310
bool JSObject::IsUnboxedDoubleField(const Isolate* isolate,
                                    FieldIndex index) const {
311
  if (!FLAG_unbox_double_fields) return false;
312
  return map(isolate).IsUnboxedDoubleField(isolate, index);
313 314 315 316 317
}

// Access fast-case object properties at index. The use of these routines
// is needed to correctly distinguish between properties stored in-object and
// properties stored in the properties array.
318
Object JSObject::RawFastPropertyAt(FieldIndex index) const {
319
  const Isolate* isolate = GetIsolateForPtrCompr(*this);
320 321 322
  return RawFastPropertyAt(isolate, index);
}

323 324
Object JSObject::RawFastPropertyAt(const Isolate* isolate,
                                   FieldIndex index) const {
325
  DCHECK(!IsUnboxedDoubleField(isolate, index));
326
  if (index.is_inobject()) {
327
    return TaggedField<Object>::load(isolate, *this, index.offset());
328
  } else {
329
    return property_array(isolate).get(isolate, index.outobject_array_index());
330 331 332
  }
}

333
double JSObject::RawFastDoublePropertyAt(FieldIndex index) const {
334
  DCHECK(IsUnboxedDoubleField(index));
335
  return ReadField<double>(index.offset());
336 337
}

338
uint64_t JSObject::RawFastDoublePropertyAsBitsAt(FieldIndex index) const {
339
  DCHECK(IsUnboxedDoubleField(index));
340
  return ReadField<uint64_t>(index.offset());
341 342
}

343 344 345 346 347 348 349 350
void JSObject::RawFastInobjectPropertyAtPut(FieldIndex index, Object value,
                                            WriteBarrierMode mode) {
  DCHECK(index.is_inobject());
  int offset = index.offset();
  WRITE_FIELD(*this, offset, value);
  CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
}

351 352
void JSObject::RawFastPropertyAtPut(FieldIndex index, Object value,
                                    WriteBarrierMode mode) {
353
  if (index.is_inobject()) {
354
    RawFastInobjectPropertyAtPut(index, value, mode);
355
  } else {
356
    DCHECK_EQ(UPDATE_WRITE_BARRIER, mode);
357
    property_array().set(index.outobject_array_index(), value);
358 359 360 361 362
  }
}

void JSObject::RawFastDoublePropertyAsBitsAtPut(FieldIndex index,
                                                uint64_t bits) {
363 364 365
  // Double unboxing is enabled only on 64-bit platforms without pointer
  // compression.
  DCHECK_EQ(kDoubleSize, kTaggedSize);
366
  Address field_addr = FIELD_ADDR(*this, index.offset());
367 368 369 370
  base::Relaxed_Store(reinterpret_cast<base::AtomicWord*>(field_addr),
                      static_cast<base::AtomicWord>(bits));
}

371
void JSObject::FastPropertyAtPut(FieldIndex index, Object value) {
372
  if (IsUnboxedDoubleField(index)) {
373
    DCHECK(value.IsHeapNumber());
374
    // Ensure that all bits of the double value are preserved.
375 376
    RawFastDoublePropertyAsBitsAtPut(index,
                                     HeapNumber::cast(value).value_as_bits());
377 378 379 380 381
  } else {
    RawFastPropertyAtPut(index, value);
  }
}

382
void JSObject::WriteToField(InternalIndex descriptor, PropertyDetails details,
383
                            Object value) {
384 385 386 387 388 389 390 391 392 393
  DCHECK_EQ(kField, details.location());
  DCHECK_EQ(kData, details.kind());
  DisallowHeapAllocation no_gc;
  FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
  if (details.representation().IsDouble()) {
    // Manipulating the signaling NaN used for the hole and uninitialized
    // double field sentinel in C++, e.g. with bit_cast or value()/set_value(),
    // will change its value on ia32 (the x87 stack is used to return values
    // and stores to the stack silently clear the signalling bit).
    uint64_t bits;
394
    if (value.IsSmi()) {
395
      bits = bit_cast<uint64_t>(static_cast<double>(Smi::ToInt(value)));
396 397
    } else if (value.IsUninitialized()) {
      bits = kHoleNanInt64;
398
    } else {
399 400
      DCHECK(value.IsHeapNumber());
      bits = HeapNumber::cast(value).value_as_bits();
401 402 403 404
    }
    if (IsUnboxedDoubleField(index)) {
      RawFastDoublePropertyAsBitsAtPut(index, bits);
    } else {
405
      auto box = HeapNumber::cast(RawFastPropertyAt(index));
406
      box.set_value_as_bits(bits);
407 408 409 410 411 412 413
    }
  } else {
    RawFastPropertyAtPut(index, value);
  }
}

int JSObject::GetInObjectPropertyOffset(int index) {
414
  return map().GetInObjectPropertyOffset(index);
415 416
}

417
Object JSObject::InObjectPropertyAt(int index) {
418
  int offset = GetInObjectPropertyOffset(index);
419
  return TaggedField<Object>::load(*this, offset);
420 421
}

422 423
Object JSObject::InObjectPropertyAtPut(int index, Object value,
                                       WriteBarrierMode mode) {
424 425
  // Adjust for the number of properties stored in the object.
  int offset = GetInObjectPropertyOffset(index);
426 427
  WRITE_FIELD(*this, offset, value);
  CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
428 429 430
  return value;
}

431
void JSObject::InitializeBody(Map map, int start_offset,
432
                              Object pre_allocated_value, Object filler_value) {
433
  DCHECK_IMPLIES(filler_value.IsHeapObject(),
434
                 !ObjectInYoungGeneration(filler_value));
435
  DCHECK_IMPLIES(pre_allocated_value.IsHeapObject(),
436
                 !ObjectInYoungGeneration(pre_allocated_value));
437
  int size = map.instance_size();
438 439 440
  int offset = start_offset;
  if (filler_value != pre_allocated_value) {
    int end_of_pre_allocated_offset =
441
        size - (map.UnusedPropertyFields() * kTaggedSize);
442 443
    DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset);
    while (offset < end_of_pre_allocated_offset) {
444
      WRITE_FIELD(*this, offset, pre_allocated_value);
445
      offset += kTaggedSize;
446 447 448
    }
  }
  while (offset < size) {
449
    WRITE_FIELD(*this, offset, filler_value);
450
    offset += kTaggedSize;
451 452 453
  }
}

454
ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell, kFeedbackCellOffset)
455

456
ACCESSORS(JSGlobalObject, native_context, NativeContext, kNativeContextOffset)
457
ACCESSORS(JSGlobalObject, global_proxy, JSGlobalProxy, kGlobalProxyOffset)
458

459 460 461 462
DEF_GETTER(JSGlobalObject, native_context_unchecked, Object) {
  return TaggedField<Object, kNativeContextOffset>::load(isolate, *this);
}

463
FeedbackVector JSFunction::feedback_vector() const {
464
  DCHECK(has_feedback_vector());
465
  return FeedbackVector::cast(raw_feedback_cell().value());
466 467
}

468
ClosureFeedbackCellArray JSFunction::closure_feedback_cell_array() const {
469
  DCHECK(has_closure_feedback_cell_array());
470
  return ClosureFeedbackCellArray::cast(raw_feedback_cell().value());
471 472
}

473 474 475 476 477 478 479
// Code objects that are marked for deoptimization are not considered to be
// optimized. This is because the JSFunction might have been already
// deoptimized but its code() still needs to be unlinked, which will happen on
// its next activation.
// TODO(jupvfranco): rename this function. Maybe RunOptimizedCode,
// or IsValidOptimizedCode.
bool JSFunction::IsOptimized() {
480 481
  return is_compiled() && code().kind() == Code::OPTIMIZED_FUNCTION &&
         !code().marked_for_deoptimization();
482 483 484 485
}

bool JSFunction::HasOptimizedCode() {
  return IsOptimized() ||
486 487
         (has_feedback_vector() && feedback_vector().has_optimized_code() &&
          !feedback_vector().optimized_code().marked_for_deoptimization());
488 489 490
}

bool JSFunction::HasOptimizationMarker() {
491
  return has_feedback_vector() && feedback_vector().has_optimization_marker();
492 493 494 495
}

void JSFunction::ClearOptimizationMarker() {
  DCHECK(has_feedback_vector());
496
  feedback_vector().ClearOptimizationMarker();
497 498 499 500 501
}

// Optimized code marked for deoptimization will tier back down to running
// interpreted on its next activation, and already doesn't count as IsOptimized.
bool JSFunction::IsInterpreted() {
502 503 504
  return is_compiled() && (code().is_interpreter_trampoline_builtin() ||
                           (code().kind() == Code::OPTIMIZED_FUNCTION &&
                            code().marked_for_deoptimization()));
505 506 507
}

bool JSFunction::ChecksOptimizationMarker() {
508
  return code().checks_optimization_marker();
509 510 511
}

bool JSFunction::IsMarkedForOptimization() {
512
  return has_feedback_vector() && feedback_vector().optimization_marker() ==
513 514 515 516 517
                                      OptimizationMarker::kCompileOptimized;
}

bool JSFunction::IsMarkedForConcurrentOptimization() {
  return has_feedback_vector() &&
518
         feedback_vector().optimization_marker() ==
519 520 521 522
             OptimizationMarker::kCompileOptimizedConcurrent;
}

bool JSFunction::IsInOptimizationQueue() {
523
  return has_feedback_vector() && feedback_vector().optimization_marker() ==
524 525 526 527 528
                                      OptimizationMarker::kInOptimizationQueue;
}

void JSFunction::CompleteInobjectSlackTrackingIfActive() {
  if (!has_prototype_slot()) return;
529 530
  if (has_initial_map() && initial_map().IsInobjectSlackTrackingInProgress()) {
    initial_map().CompleteInobjectSlackTracking(GetIsolate());
531 532 533
  }
}

534
AbstractCode JSFunction::abstract_code() {
535
  if (IsInterpreted()) {
536
    return AbstractCode::cast(shared().GetBytecodeArray());
537 538 539 540 541
  } else {
    return AbstractCode::cast(code());
  }
}

542
int JSFunction::length() { return shared().length(); }
543

544
Code JSFunction::code() const {
545
  return Code::cast(RELAXED_READ_FIELD(*this, kCodeOffset));
546
}
547

548
void JSFunction::set_code(Code value) {
549
  DCHECK(!ObjectInYoungGeneration(value));
550
  RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
551
#ifndef V8_DISABLE_WRITE_BARRIERS
552
  MarkingBarrier(*this, RawField(kCodeOffset), value);
553
#endif
554 555
}

556
void JSFunction::set_code_no_write_barrier(Code value) {
557
  DCHECK(!ObjectInYoungGeneration(value));
558 559 560
  RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
}

561 562
// TODO(ishell): Why relaxed read but release store?
DEF_GETTER(JSFunction, shared, SharedFunctionInfo) {
563 564 565 566 567 568 569 570
  return SharedFunctionInfo::cast(
      RELAXED_READ_FIELD(*this, kSharedFunctionInfoOffset));
}

void JSFunction::set_shared(SharedFunctionInfo value, WriteBarrierMode mode) {
  // Release semantics to support acquire read in NeedsResetDueToFlushedBytecode
  RELEASE_WRITE_FIELD(*this, kSharedFunctionInfoOffset, value);
  CONDITIONAL_WRITE_BARRIER(*this, kSharedFunctionInfoOffset, value, mode);
571 572 573
}

void JSFunction::ClearOptimizedCodeSlot(const char* reason) {
574
  if (has_feedback_vector() && feedback_vector().has_optimized_code()) {
575 576 577 578 579 580
    if (FLAG_trace_opt) {
      PrintF("[evicting entry from optimizing code feedback slot (%s) for ",
             reason);
      ShortPrint();
      PrintF("]\n");
    }
581
    feedback_vector().ClearOptimizedCode();
582 583 584 585 586 587 588 589
  }
}

void JSFunction::SetOptimizationMarker(OptimizationMarker marker) {
  DCHECK(has_feedback_vector());
  DCHECK(ChecksOptimizationMarker());
  DCHECK(!HasOptimizedCode());

590
  feedback_vector().SetOptimizationMarker(marker);
591 592 593
}

bool JSFunction::has_feedback_vector() const {
594 595
  return shared().is_compiled() &&
         raw_feedback_cell().value().IsFeedbackVector();
596 597 598
}

bool JSFunction::has_closure_feedback_cell_array() const {
599 600
  return shared().is_compiled() &&
         raw_feedback_cell().value().IsClosureFeedbackCellArray();
601 602
}

603
Context JSFunction::context() {
604
  return TaggedField<Context, kContextOffset>::load(*this);
605 606 607
}

bool JSFunction::has_context() const {
608
  return TaggedField<HeapObject, kContextOffset>::load(*this).IsContext();
609 610
}

611
JSGlobalProxy JSFunction::global_proxy() { return context().global_proxy(); }
612

613
NativeContext JSFunction::native_context() {
614
  return context().native_context();
615
}
616

617
void JSFunction::set_context(HeapObject value) {
618
  DCHECK(value.IsUndefined() || value.IsContext());
619 620
  WRITE_FIELD(*this, kContextOffset, value);
  WRITE_BARRIER(*this, kContextOffset, value);
621 622
}

623
ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, HeapObject,
624
                  kPrototypeOrInitialMapOffset, map().has_prototype_slot())
625

626 627
DEF_GETTER(JSFunction, has_prototype_slot, bool) {
  return map(isolate).has_prototype_slot();
628 629
}

630 631 632
DEF_GETTER(JSFunction, initial_map, Map) {
  return Map::cast(prototype_or_initial_map(isolate));
}
633

634 635 636
DEF_GETTER(JSFunction, has_initial_map, bool) {
  DCHECK(has_prototype_slot(isolate));
  return prototype_or_initial_map(isolate).IsMap(isolate);
637 638
}

639 640 641 642 643 644 645
DEF_GETTER(JSFunction, has_instance_prototype, bool) {
  DCHECK(has_prototype_slot(isolate));
  // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
  // i::GetIsolateForPtrCompr(HeapObject).
  return has_initial_map(isolate) ||
         !prototype_or_initial_map(isolate).IsTheHole(
             GetReadOnlyRoots(isolate));
646 647
}

648 649 650 651
DEF_GETTER(JSFunction, has_prototype, bool) {
  DCHECK(has_prototype_slot(isolate));
  return map(isolate).has_non_instance_prototype() ||
         has_instance_prototype(isolate);
652 653
}

654 655 656
DEF_GETTER(JSFunction, has_prototype_property, bool) {
  return (has_prototype_slot(isolate) && IsConstructor(isolate)) ||
         IsGeneratorFunction(shared(isolate).kind());
657 658
}

659 660 661
DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
  return !has_prototype_property(isolate) ||
         map(isolate).has_non_instance_prototype();
662 663
}

664 665 666
DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
  DCHECK(has_instance_prototype(isolate));
  if (has_initial_map(isolate)) return initial_map(isolate).prototype(isolate);
667 668
  // When there is no initial map and the prototype is a JSReceiver, the
  // initial map field is used for the prototype field.
669
  return HeapObject::cast(prototype_or_initial_map(isolate));
670 671
}

672 673
DEF_GETTER(JSFunction, prototype, Object) {
  DCHECK(has_prototype(isolate));
674 675
  // If the function's prototype property has been set to a non-JSReceiver
  // value, that value is stored in the constructor field of the map.
676 677
  if (map(isolate).has_non_instance_prototype()) {
    Object prototype = map(isolate).GetConstructor(isolate);
678
    // The map must have a prototype in that field, not a back pointer.
679 680
    DCHECK(!prototype.IsMap(isolate));
    DCHECK(!prototype.IsFunctionTemplateInfo(isolate));
681 682
    return prototype;
  }
683
  return instance_prototype(isolate);
684 685
}

686
bool JSFunction::is_compiled() const {
687 688
  return code().builtin_index() != Builtins::kCompileLazy &&
         shared().is_compiled();
689 690
}

691 692 693 694 695 696 697
bool JSFunction::NeedsResetDueToFlushedBytecode() {
  // Do a raw read for shared and code fields here since this function may be
  // called on a concurrent thread and the JSFunction might not be fully
  // initialized yet.
  Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
  Object maybe_code = RELAXED_READ_FIELD(*this, kCodeOffset);

698
  if (!maybe_shared.IsSharedFunctionInfo() || !maybe_code.IsCode()) {
699 700 701 702 703
    return false;
  }

  SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
  Code code = Code::cast(maybe_code);
704 705
  return !shared.is_compiled() &&
         code.builtin_index() != Builtins::kCompileLazy;
706 707
}

708 709 710 711
void JSFunction::ResetIfBytecodeFlushed(
    base::Optional<std::function<void(HeapObject object, ObjectSlot slot,
                                      HeapObject target)>>
        gc_notify_updated_slot) {
712
  if (FLAG_flush_bytecode && NeedsResetDueToFlushedBytecode()) {
713 714
    // Bytecode was flushed and function is now uncompiled, reset JSFunction
    // by setting code to CompileLazy and clearing the feedback vector.
715
    set_code(GetIsolate()->builtins()->builtin(i::Builtins::kCompileLazy));
716
    raw_feedback_cell().reset_feedback_vector(gc_notify_updated_slot);
717
  }
718 719
}

720
bool JSMessageObject::DidEnsureSourcePositionsAvailable() const {
721
  return shared_info().IsUndefined();
722 723 724 725 726 727 728 729 730 731 732 733
}

int JSMessageObject::GetStartPosition() const {
  DCHECK(DidEnsureSourcePositionsAvailable());
  return start_position();
}

int JSMessageObject::GetEndPosition() const {
  DCHECK(DidEnsureSourcePositionsAvailable());
  return end_position();
}

734
MessageTemplate JSMessageObject::type() const {
735
  return MessageTemplateFromInt(raw_type());
736
}
737

738
void JSMessageObject::set_type(MessageTemplate value) {
739
  set_raw_type(static_cast<int>(value));
740
}
741

742
ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
743
ACCESSORS(JSMessageObject, script, Script, kScriptOffset)
744
ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
745 746
ACCESSORS(JSMessageObject, shared_info, HeapObject, kSharedInfoOffset)
ACCESSORS(JSMessageObject, bytecode_offset, Smi, kBytecodeOffsetOffset)
747 748 749
SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset)
750
SMI_ACCESSORS(JSMessageObject, raw_type, kMessageTypeOffset)
751

752 753
DEF_GETTER(JSObject, GetElementsKind, ElementsKind) {
  ElementsKind kind = map(isolate).elements_kind();
754
#if VERIFY_HEAP && DEBUG
755 756
  FixedArrayBase fixed_array = FixedArrayBase::unchecked_cast(
      TaggedField<HeapObject, kElementsOffset>::load(isolate, *this));
757 758 759

  // If a GC was caused while constructing this object, the elements
  // pointer may point to a one pointer filler map.
760 761
  if (ElementsAreSafeToExamine(isolate)) {
    Map map = fixed_array.map(isolate);
762
    if (IsSmiOrObjectElementsKind(kind)) {
763 764
      DCHECK(map == GetReadOnlyRoots(isolate).fixed_array_map() ||
             map == GetReadOnlyRoots(isolate).fixed_cow_array_map());
765
    } else if (IsDoubleElementsKind(kind)) {
766 767
      DCHECK(fixed_array.IsFixedDoubleArray(isolate) ||
             fixed_array == GetReadOnlyRoots(isolate).empty_fixed_array());
768
    } else if (kind == DICTIONARY_ELEMENTS) {
769 770
      DCHECK(fixed_array.IsFixedArray(isolate));
      DCHECK(fixed_array.IsNumberDictionary(isolate));
771
    } else {
772 773
      DCHECK(kind > DICTIONARY_ELEMENTS ||
             IsAnyNonextensibleElementsKind(kind));
774
    }
775 776 777
    DCHECK(
        !IsSloppyArgumentsElementsKind(kind) ||
        (elements(isolate).IsFixedArray() && elements(isolate).length() >= 2));
778 779 780 781 782
  }
#endif
  return kind;
}

783 784 785 786 787 788
DEF_GETTER(JSObject, GetElementsAccessor, ElementsAccessor*) {
  return ElementsAccessor::ForKind(GetElementsKind(isolate));
}

DEF_GETTER(JSObject, HasObjectElements, bool) {
  return IsObjectElementsKind(GetElementsKind(isolate));
789 790
}

791 792 793
DEF_GETTER(JSObject, HasSmiElements, bool) {
  return IsSmiElementsKind(GetElementsKind(isolate));
}
794

795 796
DEF_GETTER(JSObject, HasSmiOrObjectElements, bool) {
  return IsSmiOrObjectElementsKind(GetElementsKind(isolate));
797 798
}

799 800
DEF_GETTER(JSObject, HasDoubleElements, bool) {
  return IsDoubleElementsKind(GetElementsKind(isolate));
801 802
}

803 804
DEF_GETTER(JSObject, HasHoleyElements, bool) {
  return IsHoleyElementsKind(GetElementsKind(isolate));
805 806
}

807 808
DEF_GETTER(JSObject, HasFastElements, bool) {
  return IsFastElementsKind(GetElementsKind(isolate));
809 810
}

811 812
DEF_GETTER(JSObject, HasFastPackedElements, bool) {
  return IsFastPackedElementsKind(GetElementsKind(isolate));
813 814
}

815 816
DEF_GETTER(JSObject, HasDictionaryElements, bool) {
  return GetElementsKind(isolate) == DICTIONARY_ELEMENTS;
817 818
}

819 820
DEF_GETTER(JSObject, HasPackedElements, bool) {
  return GetElementsKind(isolate) == PACKED_ELEMENTS;
821 822
}

823 824
DEF_GETTER(JSObject, HasAnyNonextensibleElements, bool) {
  return IsAnyNonextensibleElementsKind(GetElementsKind(isolate));
825 826
}

827 828
DEF_GETTER(JSObject, HasSealedElements, bool) {
  return IsSealedElementsKind(GetElementsKind(isolate));
829 830
}

831 832 833 834
DEF_GETTER(JSObject, HasNonextensibleElements, bool) {
  return IsNonextensibleElementsKind(GetElementsKind(isolate));
}

835 836
DEF_GETTER(JSObject, HasFastArgumentsElements, bool) {
  return GetElementsKind(isolate) == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
837 838
}

839 840
DEF_GETTER(JSObject, HasSlowArgumentsElements, bool) {
  return GetElementsKind(isolate) == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
841 842
}

843 844
DEF_GETTER(JSObject, HasSloppyArgumentsElements, bool) {
  return IsSloppyArgumentsElementsKind(GetElementsKind(isolate));
845 846
}

847 848
DEF_GETTER(JSObject, HasStringWrapperElements, bool) {
  return IsStringWrapperElementsKind(GetElementsKind(isolate));
849 850
}

851 852
DEF_GETTER(JSObject, HasFastStringWrapperElements, bool) {
  return GetElementsKind(isolate) == FAST_STRING_WRAPPER_ELEMENTS;
853 854
}

855 856
DEF_GETTER(JSObject, HasSlowStringWrapperElements, bool) {
  return GetElementsKind(isolate) == SLOW_STRING_WRAPPER_ELEMENTS;
857 858
}

859 860 861
DEF_GETTER(JSObject, HasTypedArrayElements, bool) {
  DCHECK(!elements(isolate).is_null());
  return map(isolate).has_typed_array_elements();
862 863
}

864
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
865 866
  DEF_GETTER(JSObject, HasFixed##Type##Elements, bool) {    \
    return map(isolate).elements_kind() == TYPE##_ELEMENTS; \
867 868 869 870 871 872
  }

TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)

#undef FIXED_TYPED_ELEMENTS_CHECK

873 874 875
DEF_GETTER(JSObject, HasNamedInterceptor, bool) {
  return map(isolate).has_named_interceptor();
}
876

877 878
DEF_GETTER(JSObject, HasIndexedInterceptor, bool) {
  return map(isolate).has_indexed_interceptor();
879 880
}

881 882 883 884
DEF_GETTER(JSGlobalObject, global_dictionary, GlobalDictionary) {
  DCHECK(!HasFastProperties(isolate));
  DCHECK(IsJSGlobalObject(isolate));
  return GlobalDictionary::cast(raw_properties_or_hash(isolate));
885 886
}

887
void JSGlobalObject::set_global_dictionary(GlobalDictionary dictionary) {
888
  DCHECK(IsJSGlobalObject());
889
  set_raw_properties_or_hash(dictionary);
890 891
}

892 893 894 895
DEF_GETTER(JSObject, element_dictionary, NumberDictionary) {
  DCHECK(HasDictionaryElements(isolate) ||
         HasSlowStringWrapperElements(isolate));
  return NumberDictionary::cast(elements(isolate));
896 897
}

898 899
void JSReceiver::initialize_properties(Isolate* isolate) {
  ReadOnlyRoots roots(isolate);
900 901
  DCHECK(!ObjectInYoungGeneration(roots.empty_fixed_array()));
  DCHECK(!ObjectInYoungGeneration(roots.empty_property_dictionary()));
902
  if (map(isolate).is_dictionary_map()) {
903
    WRITE_FIELD(*this, kPropertiesOrHashOffset,
904
                roots.empty_property_dictionary());
905
  } else {
906
    WRITE_FIELD(*this, kPropertiesOrHashOffset, roots.empty_fixed_array());
907 908 909
  }
}

910 911 912 913 914 915
DEF_GETTER(JSReceiver, HasFastProperties, bool) {
  DCHECK(raw_properties_or_hash(isolate).IsSmi() ||
         ((raw_properties_or_hash(isolate).IsGlobalDictionary(isolate) ||
           raw_properties_or_hash(isolate).IsNameDictionary(isolate)) ==
          map(isolate).is_dictionary_map()));
  return !map(isolate).is_dictionary_map();
916 917
}

918 919 920 921 922 923
DEF_GETTER(JSReceiver, property_dictionary, NameDictionary) {
  DCHECK(!IsJSGlobalObject(isolate));
  DCHECK(!HasFastProperties(isolate));
  // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
  // i::GetIsolateForPtrCompr(HeapObject).
  Object prop = raw_properties_or_hash(isolate);
924
  if (prop.IsSmi()) {
925
    return GetReadOnlyRoots(isolate).empty_property_dictionary();
926 927 928 929 930 931
  }
  return NameDictionary::cast(prop);
}

// TODO(gsathya): Pass isolate directly to this function and access
// the heap from this.
932 933 934 935 936 937 938
DEF_GETTER(JSReceiver, property_array, PropertyArray) {
  DCHECK(HasFastProperties(isolate));
  // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
  // i::GetIsolateForPtrCompr(HeapObject).
  Object prop = raw_properties_or_hash(isolate);
  if (prop.IsSmi() || prop == GetReadOnlyRoots(isolate).empty_fixed_array()) {
    return GetReadOnlyRoots(isolate).empty_property_array();
939 940 941 942 943 944
  }
  return PropertyArray::cast(prop);
}

Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
                                    Handle<Name> name) {
945 946 947
  Isolate* isolate = object->GetIsolate();
  LookupIterator::Key key(isolate, name);
  LookupIterator it(isolate, object, key, object);
948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968
  return HasProperty(&it);
}

Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
                                       uint32_t index) {
  if (object->IsJSModuleNamespace()) return Just(false);

  if (object->IsJSObject()) {  // Shortcut.
    LookupIterator it(object->GetIsolate(), object, index, object,
                      LookupIterator::OWN);
    return HasProperty(&it);
  }

  Maybe<PropertyAttributes> attributes =
      JSReceiver::GetOwnPropertyAttributes(object, index);
  MAYBE_RETURN(attributes, Nothing<bool>());
  return Just(attributes.FromJust() != ABSENT);
}

Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
    Handle<JSReceiver> object, Handle<Name> name) {
969 970 971
  Isolate* isolate = object->GetIsolate();
  LookupIterator::Key key(isolate, name);
  LookupIterator it(isolate, object, key, object);
972 973 974 975 976
  return GetPropertyAttributes(&it);
}

Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
    Handle<JSReceiver> object, Handle<Name> name) {
977 978 979
  Isolate* isolate = object->GetIsolate();
  LookupIterator::Key key(isolate, name);
  LookupIterator it(isolate, object, key, object, LookupIterator::OWN);
980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009
  return GetPropertyAttributes(&it);
}

Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
    Handle<JSReceiver> object, uint32_t index) {
  LookupIterator it(object->GetIsolate(), object, index, object,
                    LookupIterator::OWN);
  return GetPropertyAttributes(&it);
}

Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
  LookupIterator it(object->GetIsolate(), object, index, object);
  return HasProperty(&it);
}

Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
    Handle<JSReceiver> object, uint32_t index) {
  Isolate* isolate = object->GetIsolate();
  LookupIterator it(isolate, object, index, object);
  return GetPropertyAttributes(&it);
}

Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
    Handle<JSReceiver> object, uint32_t index) {
  Isolate* isolate = object->GetIsolate();
  LookupIterator it(isolate, object, index, object, LookupIterator::OWN);
  return GetPropertyAttributes(&it);
}

bool JSGlobalObject::IsDetached() {
1010
  return global_proxy().IsDetachedFrom(*this);
1011 1012
}

1013 1014
bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject global) const {
  const PrototypeIterator iter(this->GetIsolate(), *this);
1015 1016 1017 1018 1019
  return iter.GetCurrent() != global;
}

inline int JSGlobalProxy::SizeWithEmbedderFields(int embedder_field_count) {
  DCHECK_GE(embedder_field_count, 0);
1020
  return kHeaderSize + embedder_field_count * kEmbedderDataSlotSize;
1021 1022 1023 1024 1025
}

ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)

1026
TQ_SMI_ACCESSORS(JSStringIterator, index)
1027

1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038
// If the fast-case backing storage takes up much more memory than a dictionary
// backing storage would, the object should have slow elements.
// static
static inline bool ShouldConvertToSlowElements(uint32_t used_elements,
                                               uint32_t new_capacity) {
  uint32_t size_threshold = NumberDictionary::kPreferFastElementsSizeFactor *
                            NumberDictionary::ComputeCapacity(used_elements) *
                            NumberDictionary::kEntrySize;
  return size_threshold <= new_capacity;
}

1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054
static inline bool ShouldConvertToSlowElements(JSObject object,
                                               uint32_t capacity,
                                               uint32_t index,
                                               uint32_t* new_capacity) {
  STATIC_ASSERT(JSObject::kMaxUncheckedOldFastElementsLength <=
                JSObject::kMaxUncheckedFastElementsLength);
  if (index < capacity) {
    *new_capacity = capacity;
    return false;
  }
  if (index - capacity >= JSObject::kMaxGap) return true;
  *new_capacity = JSObject::NewElementsCapacity(index + 1);
  DCHECK_LT(index, *new_capacity);
  // TODO(ulan): Check if it works with young large objects.
  if (*new_capacity <= JSObject::kMaxUncheckedOldFastElementsLength ||
      (*new_capacity <= JSObject::kMaxUncheckedFastElementsLength &&
1055
       ObjectInYoungGeneration(object))) {
1056 1057
    return false;
  }
1058
  return ShouldConvertToSlowElements(object.GetFastElementsUsage(),
1059
                                     *new_capacity);
1060 1061
}

1062 1063 1064 1065 1066 1067
}  // namespace internal
}  // namespace v8

#include "src/objects/object-macros-undef.h"

#endif  // V8_OBJECTS_JS_OBJECTS_INL_H_