map-inl.h 26.8 KB
Newer Older
1 2 3 4 5 6 7 8
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_OBJECTS_MAP_INL_H_
#define V8_OBJECTS_MAP_INL_H_

#include "src/objects/map.h"
9 10
#include "src/field-type.h"
#include "src/objects-inl.h"
11
#include "src/objects/api-callbacks-inl.h"
12
#include "src/objects/descriptor-array.h"
13
#include "src/objects/prototype-info-inl.h"
14
#include "src/objects/shared-function-info.h"
15
#include "src/objects/templates-inl.h"
16 17 18 19 20 21 22
#include "src/property.h"
#include "src/transitions.h"

// For pulling in heap/incremental-marking.h which is needed by
// ACCESSORS_CHECKED.
#include "src/heap/heap-inl.h"

23 24 25 26 27 28 29 30
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

namespace v8 {
namespace internal {

CAST_ACCESSOR(Map)

31
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
32 33 34 35 36 37 38
// A freshly allocated layout descriptor can be set on an existing map.
// We need to use release-store and acquire-load accessor pairs to ensure
// that the concurrent marking thread observes initializing stores of the
// layout descriptor.
SYNCHRONIZED_ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor,
                               kLayoutDescriptorOffset,
                               FLAG_unbox_double_fields)
39
WEAK_ACCESSORS(Map, raw_transitions, kTransitionsOrPrototypeInfoOffset)
40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58

// |bit_field| fields.
BIT_FIELD_ACCESSORS(Map, bit_field, has_non_instance_prototype,
                    Map::HasNonInstancePrototypeBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::IsCallableBit)
BIT_FIELD_ACCESSORS(Map, bit_field, has_named_interceptor,
                    Map::HasNamedInterceptorBit)
BIT_FIELD_ACCESSORS(Map, bit_field, has_indexed_interceptor,
                    Map::HasIndexedInterceptorBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable, Map::IsUndetectableBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_access_check_needed,
                    Map::IsAccessCheckNeededBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor, Map::IsConstructorBit)
BIT_FIELD_ACCESSORS(Map, bit_field, has_prototype_slot,
                    Map::HasPrototypeSlotBit)

// |bit_field2| fields.
BIT_FIELD_ACCESSORS(Map, bit_field2, is_extensible, Map::IsExtensibleBit)
BIT_FIELD_ACCESSORS(Map, bit_field2, is_prototype_map, Map::IsPrototypeMapBit)
59 60
BIT_FIELD_ACCESSORS(Map, bit_field2, is_in_retained_map_list,
                    Map::IsInRetainedMapListBit)
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76

// |bit_field3| fields.
BIT_FIELD_ACCESSORS(Map, bit_field3, owns_descriptors, Map::OwnsDescriptorsBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, has_hidden_prototype,
                    Map::HasHiddenPrototypeBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_deprecated, Map::IsDeprecatedBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_migration_target,
                    Map::IsMigrationTargetBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_immutable_proto,
                    Map::IsImmutablePrototypeBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, new_target_is_base,
                    Map::NewTargetIsBaseBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols,
                    Map::MayHaveInterestingSymbolsBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter,
                    Map::ConstructionCounterBits)
77

78 79 80 81 82 83 84 85 86 87 88 89
InterceptorInfo* Map::GetNamedInterceptor() {
  DCHECK(has_named_interceptor());
  FunctionTemplateInfo* info = GetFunctionTemplateInfo();
  return InterceptorInfo::cast(info->named_property_handler());
}

InterceptorInfo* Map::GetIndexedInterceptor() {
  DCHECK(has_indexed_interceptor());
  FunctionTemplateInfo* info = GetFunctionTemplateInfo();
  return InterceptorInfo::cast(info->indexed_property_handler());
}

90 91
bool Map::IsInplaceGeneralizableField(PropertyConstness constness,
                                      Representation representation,
92
                                      FieldType field_type) {
93
  if (FLAG_track_constant_fields && FLAG_modify_map_inplace &&
94 95 96
      (constness == PropertyConstness::kConst)) {
    // VariableMode::kConst -> PropertyConstness::kMutable field generalization
    // may happen in-place.
97 98 99 100 101 102 103 104
    return true;
  }
  if (representation.IsHeapObject() && !field_type->IsAny()) {
    return true;
  }
  return false;
}

105 106 107 108 109 110 111 112 113
bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) {
  return instance_type == JS_ARRAY_TYPE || instance_type == JS_VALUE_TYPE ||
         instance_type == JS_ARGUMENTS_TYPE;
}

bool Map::CanHaveFastTransitionableElementsKind() const {
  return CanHaveFastTransitionableElementsKind(instance_type());
}

114
// static
115 116
void Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
    Isolate* isolate, InstanceType instance_type, PropertyConstness* constness,
117
    Representation* representation, Handle<FieldType>* field_type) {
118
  if (CanHaveFastTransitionableElementsKind(instance_type)) {
119 120 121 122 123 124 125
    // We don't support propagation of field generalization through elements
    // kind transitions because they are inserted into the transition tree
    // before field transitions. In order to avoid complexity of handling
    // such a case we ensure that all maps with transitionable elements kinds
    // do not have fields that can be generalized in-place (without creation
    // of a new map).
    if (FLAG_track_constant_fields && FLAG_modify_map_inplace) {
126 127 128
      // The constness is either already PropertyConstness::kMutable or should
      // become PropertyConstness::kMutable if it was VariableMode::kConst.
      *constness = PropertyConstness::kMutable;
129 130 131 132 133 134 135 136 137
    }
    if (representation->IsHeapObject()) {
      // The field type is either already Any or should become Any if it was
      // something else.
      *field_type = FieldType::Any(isolate);
    }
  }
}

138 139 140 141 142 143
bool Map::IsUnboxedDoubleField(FieldIndex index) const {
  if (!FLAG_unbox_double_fields) return false;
  if (index.is_hidden_field() || !index.is_inobject()) return false;
  return !layout_descriptor()->IsTagged(index.property_index());
}

144
bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
145 146
  if (UnusedPropertyFields() != 0) return false;
  if (is_prototype_map()) return false;
147
  int minimum = store_origin == StoreOrigin::kNamed ? 128 : 12;
148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
  int limit = Max(minimum, GetInObjectProperties());
  int external = NumberOfFields() - GetInObjectProperties();
  return external > limit;
}

PropertyDetails Map::GetLastDescriptorDetails() const {
  return instance_descriptors()->GetDetails(LastAdded());
}

int Map::LastAdded() const {
  int number_of_own_descriptors = NumberOfOwnDescriptors();
  DCHECK_GT(number_of_own_descriptors, 0);
  return number_of_own_descriptors - 1;
}

int Map::NumberOfOwnDescriptors() const {
  return NumberOfOwnDescriptorsBits::decode(bit_field3());
}

void Map::SetNumberOfOwnDescriptors(int number) {
168 169 170
  DCHECK_LE(number, instance_descriptors()->number_of_descriptors());
  CHECK_LE(static_cast<unsigned>(number),
           static_cast<unsigned>(kMaxNumberOfDescriptors));
171 172 173 174 175 176 177
  set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
}

int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); }

void Map::SetEnumLength(int length) {
  if (length != kInvalidEnumCacheSentinel) {
178 179 180
    DCHECK_LE(length, NumberOfOwnDescriptors());
    CHECK_LE(static_cast<unsigned>(length),
             static_cast<unsigned>(kMaxNumberOfDescriptors));
181 182 183 184 185 186 187
  }
  set_bit_field3(EnumLengthBits::update(bit_field3(), length));
}

FixedArrayBase* Map::GetInitialElements() const {
  FixedArrayBase* result = nullptr;
  if (has_fast_elements() || has_fast_string_wrapper_elements()) {
188
    result = GetReadOnlyRoots().empty_fixed_array();
189
  } else if (has_fast_sloppy_arguments_elements()) {
190
    result = GetReadOnlyRoots().empty_sloppy_arguments_elements();
191
  } else if (has_fixed_typed_array_elements()) {
192
    result = GetReadOnlyRoots().EmptyFixedTypedArrayForMap(this);
193
  } else if (has_dictionary_elements()) {
194
    result = GetReadOnlyRoots().empty_slow_element_dictionary();
195 196 197
  } else {
    UNREACHABLE();
  }
198
  DCHECK(!Heap::InNewSpace(result));
199 200 201 202
  return result;
}

VisitorId Map::visitor_id() const {
203 204
  return static_cast<VisitorId>(
      RELAXED_READ_BYTE_FIELD(this, kVisitorIdOffset));
205 206 207
}

void Map::set_visitor_id(VisitorId id) {
208
  CHECK_LT(static_cast<unsigned>(id), 256);
209
  RELAXED_WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225
}

int Map::instance_size_in_words() const {
  return RELAXED_READ_BYTE_FIELD(this, kInstanceSizeInWordsOffset);
}

void Map::set_instance_size_in_words(int value) {
  RELAXED_WRITE_BYTE_FIELD(this, kInstanceSizeInWordsOffset,
                           static_cast<byte>(value));
}

int Map::instance_size() const {
  return instance_size_in_words() << kPointerSizeLog2;
}

void Map::set_instance_size(int value) {
226
  CHECK_EQ(0, value & (kPointerSize - 1));
227
  value >>= kPointerSizeLog2;
228
  CHECK_LT(static_cast<unsigned>(value), 256);
229 230 231 232 233 234 235 236 237 238
  set_instance_size_in_words(value);
}

int Map::inobject_properties_start_or_constructor_function_index() const {
  return RELAXED_READ_BYTE_FIELD(
      this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset);
}

void Map::set_inobject_properties_start_or_constructor_function_index(
    int value) {
239
  CHECK_LT(static_cast<unsigned>(value), 256);
240 241 242 243 244 245 246 247 248 249 250
  RELAXED_WRITE_BYTE_FIELD(
      this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
      static_cast<byte>(value));
}

int Map::GetInObjectPropertiesStartInWords() const {
  DCHECK(IsJSObjectMap());
  return inobject_properties_start_or_constructor_function_index();
}

void Map::SetInObjectPropertiesStartInWords(int value) {
251
  CHECK(IsJSObjectMap());
252 253 254 255 256 257 258 259 260 261 262 263 264 265
  set_inobject_properties_start_or_constructor_function_index(value);
}

int Map::GetInObjectProperties() const {
  DCHECK(IsJSObjectMap());
  return instance_size_in_words() - GetInObjectPropertiesStartInWords();
}

int Map::GetConstructorFunctionIndex() const {
  DCHECK(IsPrimitiveMap());
  return inobject_properties_start_or_constructor_function_index();
}

void Map::SetConstructorFunctionIndex(int value) {
266
  CHECK(IsPrimitiveMap());
267 268 269 270 271 272 273 274
  set_inobject_properties_start_or_constructor_function_index(value);
}

int Map::GetInObjectPropertyOffset(int index) const {
  return (GetInObjectPropertiesStartInWords() + index) * kPointerSize;
}

Handle<Map> Map::AddMissingTransitionsForTesting(
275 276
    Isolate* isolate, Handle<Map> split_map,
    Handle<DescriptorArray> descriptors,
277
    Handle<LayoutDescriptor> full_layout_descriptor) {
278 279
  return AddMissingTransitions(isolate, split_map, descriptors,
                               full_layout_descriptor);
280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304
}

InstanceType Map::instance_type() const {
  return static_cast<InstanceType>(
      READ_UINT16_FIELD(this, kInstanceTypeOffset));
}

void Map::set_instance_type(InstanceType value) {
  WRITE_UINT16_FIELD(this, kInstanceTypeOffset, value);
}

int Map::UnusedPropertyFields() const {
  int value = used_or_unused_instance_size_in_words();
  DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
  int unused;
  if (value >= JSObject::kFieldsAdded) {
    unused = instance_size_in_words() - value;
  } else {
    // For out of object properties "used_or_unused_instance_size_in_words"
    // byte encodes the slack in the property array.
    unused = value;
  }
  return unused;
}

305 306 307 308 309 310 311 312 313 314 315
int Map::UnusedInObjectProperties() const {
  // Like Map::UnusedPropertyFields(), but returns 0 for out of object
  // properties.
  int value = used_or_unused_instance_size_in_words();
  DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
  if (value >= JSObject::kFieldsAdded) {
    return instance_size_in_words() - value;
  }
  return 0;
}

316 317 318 319 320
int Map::used_or_unused_instance_size_in_words() const {
  return RELAXED_READ_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset);
}

void Map::set_used_or_unused_instance_size_in_words(int value) {
321
  CHECK_LE(static_cast<unsigned>(value), 255);
322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338
  RELAXED_WRITE_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset,
                           static_cast<byte>(value));
}

int Map::UsedInstanceSize() const {
  int words = used_or_unused_instance_size_in_words();
  if (words < JSObject::kFieldsAdded) {
    // All in-object properties are used and the words is tracking the slack
    // in the property array.
    return instance_size();
  }
  return words * kPointerSize;
}

void Map::SetInObjectUnusedPropertyFields(int value) {
  STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
  if (!IsJSObjectMap()) {
339
    CHECK_EQ(0, value);
340 341 342 343
    set_used_or_unused_instance_size_in_words(0);
    DCHECK_EQ(0, UnusedPropertyFields());
    return;
  }
344
  CHECK_LE(0, value);
345 346 347 348 349 350 351 352 353
  DCHECK_LE(value, GetInObjectProperties());
  int used_inobject_properties = GetInObjectProperties() - value;
  set_used_or_unused_instance_size_in_words(
      GetInObjectPropertyOffset(used_inobject_properties) / kPointerSize);
  DCHECK_EQ(value, UnusedPropertyFields());
}

void Map::SetOutOfObjectUnusedPropertyFields(int value) {
  STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
354
  CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded);
355 356 357 358 359 360 361 362 363 364 365 366
  // For out of object properties "used_instance_size_in_words" byte encodes
  // the slack in the property array.
  set_used_or_unused_instance_size_in_words(value);
  DCHECK_EQ(value, UnusedPropertyFields());
}

void Map::CopyUnusedPropertyFields(Map* map) {
  set_used_or_unused_instance_size_in_words(
      map->used_or_unused_instance_size_in_words());
  DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
}

367 368 369 370 371 372 373 374 375 376 377
void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map* map) {
  int value = map->used_or_unused_instance_size_in_words();
  if (value >= JSValue::kFieldsAdded) {
    // Unused in-object fields. Adjust the offset from the object’s start
    // so it matches the distance to the object’s end.
    value += instance_size_in_words() - map->instance_size_in_words();
  }
  set_used_or_unused_instance_size_in_words(value);
  DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
}

378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403
void Map::AccountAddedPropertyField() {
  // Update used instance size and unused property fields number.
  STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
#ifdef DEBUG
  int new_unused = UnusedPropertyFields() - 1;
  if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
#endif
  int value = used_or_unused_instance_size_in_words();
  if (value >= JSObject::kFieldsAdded) {
    if (value == instance_size_in_words()) {
      AccountAddedOutOfObjectPropertyField(0);
    } else {
      // The property is added in-object, so simply increment the counter.
      set_used_or_unused_instance_size_in_words(value + 1);
    }
  } else {
    AccountAddedOutOfObjectPropertyField(value);
  }
  DCHECK_EQ(new_unused, UnusedPropertyFields());
}

void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
  unused_in_property_array--;
  if (unused_in_property_array < 0) {
    unused_in_property_array += JSObject::kFieldsAdded;
  }
404 405
  CHECK_LT(static_cast<unsigned>(unused_in_property_array),
           JSObject::kFieldsAdded);
406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431
  set_used_or_unused_instance_size_in_words(unused_in_property_array);
  DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
}

byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }

void Map::set_bit_field(byte value) {
  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
}

byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }

void Map::set_bit_field2(byte value) {
  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
}

bool Map::is_abandoned_prototype_map() const {
  return is_prototype_map() && !owns_descriptors();
}

bool Map::should_be_fast_prototype_map() const {
  if (!prototype_info()->IsPrototypeInfo()) return false;
  return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
}

void Map::set_elements_kind(ElementsKind elements_kind) {
432
  CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479
  set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
}

ElementsKind Map::elements_kind() const {
  return Map::ElementsKindBits::decode(bit_field2());
}

bool Map::has_fast_smi_elements() const {
  return IsSmiElementsKind(elements_kind());
}

bool Map::has_fast_object_elements() const {
  return IsObjectElementsKind(elements_kind());
}

bool Map::has_fast_smi_or_object_elements() const {
  return IsSmiOrObjectElementsKind(elements_kind());
}

bool Map::has_fast_double_elements() const {
  return IsDoubleElementsKind(elements_kind());
}

bool Map::has_fast_elements() const {
  return IsFastElementsKind(elements_kind());
}

bool Map::has_sloppy_arguments_elements() const {
  return IsSloppyArgumentsElementsKind(elements_kind());
}

bool Map::has_fast_sloppy_arguments_elements() const {
  return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
}

bool Map::has_fast_string_wrapper_elements() const {
  return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
}

bool Map::has_fixed_typed_array_elements() const {
  return IsFixedTypedArrayElementsKind(elements_kind());
}

bool Map::has_dictionary_elements() const {
  return IsDictionaryElementsKind(elements_kind());
}

480 481 482
void Map::set_is_dictionary_map(bool value) {
  uint32_t new_bit_field3 = IsDictionaryMapBit::update(bit_field3(), value);
  new_bit_field3 = IsUnstableBit::update(new_bit_field3, value);
483 484 485 486
  set_bit_field3(new_bit_field3);
}

bool Map::is_dictionary_map() const {
487
  return IsDictionaryMapBit::decode(bit_field3());
488 489 490
}

void Map::mark_unstable() {
491
  set_bit_field3(IsUnstableBit::update(bit_field3(), true));
492 493
}

494
bool Map::is_stable() const { return !IsUnstableBit::decode(bit_field3()); }
495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510

bool Map::CanBeDeprecated() const {
  int descriptor = LastAdded();
  for (int i = 0; i <= descriptor; i++) {
    PropertyDetails details = instance_descriptors()->GetDetails(i);
    if (details.representation().IsNone()) return true;
    if (details.representation().IsSmi()) return true;
    if (details.representation().IsDouble()) return true;
    if (details.representation().IsHeapObject()) return true;
    if (details.kind() == kData && details.location() == kDescriptor) {
      return true;
    }
  }
  return false;
}

511
void Map::NotifyLeafMapLayoutChange(Isolate* isolate) {
512 513 514
  if (is_stable()) {
    mark_unstable();
    dependent_code()->DeoptimizeDependentCodeGroup(
515
        isolate, DependentCode::kPrototypeCheckGroup);
516 517 518 519 520
  }
}

bool Map::CanTransition() const {
  // Only JSObject and subtypes have map transitions and back pointers.
521
  return InstanceTypeChecker::IsJSObject(instance_type());
522 523
}

524 525 526 527 528 529 530
#define DEF_TESTER(Type, ...)                              \
  bool Map::Is##Type##Map() const {                        \
    return InstanceTypeChecker::Is##Type(instance_type()); \
  }
INSTANCE_TYPE_CHECKERS(DEF_TESTER)
#undef DEF_TESTER

531 532 533
bool Map::IsBooleanMap() const {
  return this == GetReadOnlyRoots().boolean_map();
}
534 535

bool Map::IsNullOrUndefinedMap() const {
536 537
  return this == GetReadOnlyRoots().null_map() ||
         this == GetReadOnlyRoots().undefined_map();
538 539
}

540 541 542 543 544 545 546
bool Map::IsPrimitiveMap() const {
  return instance_type() <= LAST_PRIMITIVE_TYPE;
}

Object* Map::prototype() const { return READ_FIELD(this, kPrototypeOffset); }

void Map::set_prototype(Object* value, WriteBarrierMode mode) {
547
  DCHECK(value->IsNull() || value->IsJSReceiver());
548
  WRITE_FIELD(this, kPrototypeOffset, value);
549
  CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, value, mode);
550 551 552 553
}

LayoutDescriptor* Map::layout_descriptor_gc_safe() const {
  DCHECK(FLAG_unbox_double_fields);
554 555 556 557
  // The loaded value can be dereferenced on background thread to load the
  // bitmap. We need acquire load in order to ensure that the bitmap
  // initializing stores are also visible to the background thread.
  Object* layout_desc = ACQUIRE_READ_FIELD(this, kLayoutDescriptorOffset);
558 559 560 561 562
  return LayoutDescriptor::cast_gc_safe(layout_desc);
}

bool Map::HasFastPointerLayout() const {
  DCHECK(FLAG_unbox_double_fields);
563 564
  // The loaded value is used for SMI check only and is not dereferenced,
  // so relaxed load is safe.
565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653
  Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset);
  return LayoutDescriptor::IsFastPointerLayout(layout_desc);
}

void Map::UpdateDescriptors(DescriptorArray* descriptors,
                            LayoutDescriptor* layout_desc) {
  set_instance_descriptors(descriptors);
  if (FLAG_unbox_double_fields) {
    if (layout_descriptor()->IsSlowLayout()) {
      set_layout_descriptor(layout_desc);
    }
#ifdef VERIFY_HEAP
    // TODO(ishell): remove these checks from VERIFY_HEAP mode.
    if (FLAG_verify_heap) {
      CHECK(layout_descriptor()->IsConsistentWithMap(this));
      CHECK_EQ(Map::GetVisitorId(this), visitor_id());
    }
#else
    SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
    DCHECK(visitor_id() == Map::GetVisitorId(this));
#endif
  }
}

void Map::InitializeDescriptors(DescriptorArray* descriptors,
                                LayoutDescriptor* layout_desc) {
  int len = descriptors->number_of_descriptors();
  set_instance_descriptors(descriptors);
  SetNumberOfOwnDescriptors(len);

  if (FLAG_unbox_double_fields) {
    set_layout_descriptor(layout_desc);
#ifdef VERIFY_HEAP
    // TODO(ishell): remove these checks from VERIFY_HEAP mode.
    if (FLAG_verify_heap) {
      CHECK(layout_descriptor()->IsConsistentWithMap(this));
    }
#else
    SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
#endif
    set_visitor_id(Map::GetVisitorId(this));
  }
}

void Map::set_bit_field3(uint32_t bits) {
  if (kInt32Size != kPointerSize) {
    WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
  }
  WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
}

uint32_t Map::bit_field3() const {
  return READ_UINT32_FIELD(this, kBitField3Offset);
}

LayoutDescriptor* Map::GetLayoutDescriptor() const {
  return FLAG_unbox_double_fields ? layout_descriptor()
                                  : LayoutDescriptor::FastPointerLayout();
}

void Map::AppendDescriptor(Descriptor* desc) {
  DescriptorArray* descriptors = instance_descriptors();
  int number_of_own_descriptors = NumberOfOwnDescriptors();
  DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
  descriptors->Append(desc);
  SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);

  // Properly mark the map if the {desc} is an "interesting symbol".
  if (desc->GetKey()->IsInterestingSymbol()) {
    set_may_have_interesting_symbols(true);
  }
  PropertyDetails details = desc->GetDetails();
  if (details.location() == kField) {
    DCHECK_GT(UnusedPropertyFields(), 0);
    AccountAddedPropertyField();
  }

// This function does not support appending double field descriptors and
// it should never try to (otherwise, layout descriptor must be updated too).
#ifdef DEBUG
  DCHECK(details.location() != kField || !details.representation().IsDouble());
#endif
}

Object* Map::GetBackPointer() const {
  Object* object = constructor_or_backpointer();
  if (object->IsMap()) {
    return object;
  }
654
  return GetReadOnlyRoots().undefined_value();
655 656 657 658
}

Map* Map::ElementsTransitionMap() {
  DisallowHeapAllocation no_gc;
659 660 661 662
  // TODO(delphick): While it's safe to pass nullptr for Isolate* here as
  // SearchSpecial doesn't need it, this is really ugly. Perhaps factor out a
  // base class for methods not requiring an Isolate?
  return TransitionsAccessor(nullptr, this, &no_gc)
663
      .SearchSpecial(GetReadOnlyRoots().elements_transition_symbol());
664 665 666 667 668 669 670 671
}

Object* Map::prototype_info() const {
  DCHECK(is_prototype_map());
  return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
}

void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
672
  CHECK(is_prototype_map());
673
  WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
674
  CONDITIONAL_WRITE_BARRIER(this, Map::kTransitionsOrPrototypeInfoOffset, value,
675
                            mode);
676 677 678
}

void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
679 680
  CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
  CHECK(value->IsMap());
681
  CHECK(GetBackPointer()->IsUndefined());
682 683
  CHECK_IMPLIES(value->IsMap(), Map::cast(value)->GetConstructor() ==
                                    constructor_or_backpointer());
684 685 686 687
  set_constructor_or_backpointer(value, mode);
}

ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
688
ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset)
689 690 691
ACCESSORS(Map, constructor_or_backpointer, Object,
          kConstructorOrBackPointerOffset)

692 693 694 695 696 697 698
bool Map::IsPrototypeValidityCellValid() const {
  Object* validity_cell = prototype_validity_cell();
  Object* value = validity_cell->IsSmi() ? Smi::cast(validity_cell)
                                         : Cell::cast(validity_cell)->value();
  return value == Smi::FromInt(Map::kPrototypeChainValid);
}

699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720
Object* Map::GetConstructor() const {
  Object* maybe_constructor = constructor_or_backpointer();
  // Follow any back pointers.
  while (maybe_constructor->IsMap()) {
    maybe_constructor =
        Map::cast(maybe_constructor)->constructor_or_backpointer();
  }
  return maybe_constructor;
}

FunctionTemplateInfo* Map::GetFunctionTemplateInfo() const {
  Object* constructor = GetConstructor();
  if (constructor->IsJSFunction()) {
    DCHECK(JSFunction::cast(constructor)->shared()->IsApiFunction());
    return JSFunction::cast(constructor)->shared()->get_api_func_data();
  }
  DCHECK(constructor->IsFunctionTemplateInfo());
  return FunctionTemplateInfo::cast(constructor);
}

void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
  // Never overwrite a back pointer with a constructor.
721
  CHECK(!constructor_or_backpointer()->IsMap());
722 723 724
  set_constructor_or_backpointer(constructor, mode);
}

725 726 727
Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map) {
  return CopyInitialMap(isolate, map, map->instance_size(),
                        map->GetInObjectProperties(),
728 729 730 731 732 733 734
                        map->UnusedPropertyFields());
}

bool Map::IsInobjectSlackTrackingInProgress() const {
  return construction_counter() != Map::kNoSlackTracking;
}

735
void Map::InobjectSlackTrackingStep(Isolate* isolate) {
736
  // Slack tracking should only be performed on an initial map.
737
  DCHECK(GetBackPointer()->IsUndefined());
738 739 740 741
  if (!IsInobjectSlackTrackingInProgress()) return;
  int counter = construction_counter();
  set_construction_counter(counter - 1);
  if (counter == kSlackTrackingCounterEnd) {
742
    CompleteInobjectSlackTracking(isolate);
743 744 745 746 747 748 749 750 751 752 753 754 755
  }
}

int Map::SlackForArraySize(int old_size, int size_limit) {
  const int max_slack = size_limit - old_size;
  CHECK_LE(0, max_slack);
  if (old_size < 4) {
    DCHECK_LE(1, max_slack);
    return 1;
  }
  return Min(max_slack, old_size / 4);
}

756 757 758 759
int NormalizedMapCache::GetIndex(Handle<Map> map) {
  return map->Hash() % NormalizedMapCache::kEntries;
}

760
bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) {
761 762
  if (!obj->IsWeakFixedArray()) return false;
  if (WeakFixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
763 764 765 766
    return false;
  }
#ifdef VERIFY_HEAP
  if (FLAG_verify_heap) {
767 768 769
    NormalizedMapCache* cache =
        reinterpret_cast<NormalizedMapCache*>(const_cast<HeapObject*>(obj));
    cache->NormalizedMapCacheVerify(cache->GetIsolate());
770 771 772 773 774 775 776 777 778 779 780
  }
#endif
  return true;
}

}  // namespace internal
}  // namespace v8

#include "src/objects/object-macros-undef.h"

#endif  // V8_OBJECTS_MAP_INL_H_