map.cc 90 KB
Newer Older
1 2 3 4 5 6
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/objects/map.h"

7 8
#include "src/execution/frames.h"
#include "src/execution/isolate.h"
9 10
#include "src/handles/handles-inl.h"
#include "src/handles/maybe-handles.h"
11
#include "src/heap/heap-write-barrier-inl.h"
12
#include "src/init/bootstrapper.h"
13
#include "src/logging/log.h"
14
#include "src/objects/arguments-inl.h"
15
#include "src/objects/descriptor-array.h"
16
#include "src/objects/elements-kind.h"
17
#include "src/objects/field-type.h"
18
#include "src/objects/js-objects.h"
19
#include "src/objects/map-updater.h"
20 21
#include "src/objects/maybe-object.h"
#include "src/objects/oddball.h"
22 23
#include "src/objects/property.h"
#include "src/objects/transitions-inl.h"
24
#include "src/roots/roots.h"
25
#include "src/utils/ostreams.h"
26
#include "src/zone/zone-containers.h"
27
#include "torque-generated/field-offsets.h"
28 29 30 31 32

namespace v8 {
namespace internal {

Map Map::GetPrototypeChainRootMap(Isolate* isolate) const {
33
  DisallowGarbageCollection no_alloc;
34 35 36 37 38
  if (IsJSReceiverMap()) {
    return *this;
  }
  int constructor_function_index = GetConstructorFunctionIndex();
  if (constructor_function_index != Map::kNoConstructorFunctionIndex) {
39
    Context native_context = isolate->context().native_context();
40
    JSFunction constructor_function =
41 42
        JSFunction::cast(native_context.get(constructor_function_index));
    return constructor_function.initial_map();
43
  }
44
  return ReadOnlyRoots(isolate).null_value().map();
45 46 47
}

// static
48 49 50 51 52
base::Optional<JSFunction> Map::GetConstructorFunction(Map map,
                                                       Context native_context) {
  DisallowGarbageCollection no_gc;
  if (map.IsPrimitiveMap()) {
    int const constructor_function_index = map.GetConstructorFunctionIndex();
53
    if (constructor_function_index != kNoConstructorFunctionIndex) {
54
      return JSFunction::cast(native_context.get(constructor_function_index));
55 56
    }
  }
57
  return {};
58 59
}

60
Map Map::GetInstanceTypeMap(ReadOnlyRoots roots, InstanceType type) {
61 62
  Map map;
  switch (type) {
63 64 65
#define MAKE_CASE(TYPE, Name, name) \
  case TYPE:                        \
    map = roots.name##_map();       \
66 67
    break;
    STRUCT_LIST(MAKE_CASE)
68
#undef MAKE_CASE
69 70 71
#define MAKE_CASE(TYPE, Name, name) \
  case TYPE:                        \
    map = roots.name##_map();       \
72
    break;
73
    TORQUE_DEFINED_INSTANCE_TYPE_LIST(MAKE_CASE)
74 75 76 77 78 79 80
#undef MAKE_CASE
    default:
      UNREACHABLE();
  }
  return map;
}

81 82 83
VisitorId Map::GetVisitorId(Map map) {
  STATIC_ASSERT(kVisitorIdCount <= 256);

84
  const int instance_type = map.instance_type();
85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127

  if (instance_type < FIRST_NONSTRING_TYPE) {
    switch (instance_type & kStringRepresentationMask) {
      case kSeqStringTag:
        if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
          return kVisitSeqOneByteString;
        } else {
          return kVisitSeqTwoByteString;
        }

      case kConsStringTag:
        if (IsShortcutCandidate(instance_type)) {
          return kVisitShortcutCandidate;
        } else {
          return kVisitConsString;
        }

      case kSlicedStringTag:
        return kVisitSlicedString;

      case kExternalStringTag:
        return kVisitDataObject;

      case kThinStringTag:
        return kVisitThinString;
    }
    UNREACHABLE();
  }

  switch (instance_type) {
    case BYTE_ARRAY_TYPE:
      return kVisitByteArray;

    case BYTECODE_ARRAY_TYPE:
      return kVisitBytecodeArray;

    case FREE_SPACE_TYPE:
      return kVisitFreeSpace;

    case EMBEDDER_DATA_ARRAY_TYPE:
      return kVisitEmbedderDataArray;

    case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
128
    case CLOSURE_FEEDBACK_CELL_ARRAY_TYPE:
129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165
    case HASH_TABLE_TYPE:
    case ORDERED_HASH_MAP_TYPE:
    case ORDERED_HASH_SET_TYPE:
    case ORDERED_NAME_DICTIONARY_TYPE:
    case NAME_DICTIONARY_TYPE:
    case GLOBAL_DICTIONARY_TYPE:
    case NUMBER_DICTIONARY_TYPE:
    case SIMPLE_NUMBER_DICTIONARY_TYPE:
    case SCRIPT_CONTEXT_TABLE_TYPE:
      return kVisitFixedArray;

    case AWAIT_CONTEXT_TYPE:
    case BLOCK_CONTEXT_TYPE:
    case CATCH_CONTEXT_TYPE:
    case DEBUG_EVALUATE_CONTEXT_TYPE:
    case EVAL_CONTEXT_TYPE:
    case FUNCTION_CONTEXT_TYPE:
    case MODULE_CONTEXT_TYPE:
    case SCRIPT_CONTEXT_TYPE:
    case WITH_CONTEXT_TYPE:
      return kVisitContext;

    case NATIVE_CONTEXT_TYPE:
      return kVisitNativeContext;

    case EPHEMERON_HASH_TABLE_TYPE:
      return kVisitEphemeronHashTable;

    case FIXED_DOUBLE_ARRAY_TYPE:
      return kVisitFixedDoubleArray;

    case PROPERTY_ARRAY_TYPE:
      return kVisitPropertyArray;

    case FEEDBACK_CELL_TYPE:
      return kVisitFeedbackCell;

166 167 168
    case FEEDBACK_METADATA_TYPE:
      return kVisitFeedbackMetadata;

169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203
    case MAP_TYPE:
      return kVisitMap;

    case CODE_TYPE:
      return kVisitCode;

    case CELL_TYPE:
      return kVisitCell;

    case PROPERTY_CELL_TYPE:
      return kVisitPropertyCell;

    case TRANSITION_ARRAY_TYPE:
      return kVisitTransitionArray;

    case JS_WEAK_MAP_TYPE:
    case JS_WEAK_SET_TYPE:
      return kVisitJSWeakCollection;

    case CALL_HANDLER_INFO_TYPE:
      return kVisitStruct;

    case JS_PROXY_TYPE:
      return kVisitStruct;

    case SYMBOL_TYPE:
      return kVisitSymbol;

    case JS_ARRAY_BUFFER_TYPE:
      return kVisitJSArrayBuffer;

    case JS_DATA_VIEW_TYPE:
      return kVisitJSDataView;

    case JS_FUNCTION_TYPE:
204 205 206 207 208 209 210
    case JS_PROMISE_CONSTRUCTOR_TYPE:
    case JS_REG_EXP_CONSTRUCTOR_TYPE:
    case JS_ARRAY_CONSTRUCTOR_TYPE:
#define TYPED_ARRAY_CONSTRUCTORS_SWITCH(Type, type, TYPE, Ctype) \
  case TYPE##_TYPED_ARRAY_CONSTRUCTOR_TYPE:
      TYPED_ARRAYS(TYPED_ARRAY_CONSTRUCTORS_SWITCH)
#undef TYPED_ARRAY_CONSTRUCTORS_SWITCH
211 212 213 214 215 216 217 218 219 220 221 222 223 224
      return kVisitJSFunction;

    case JS_TYPED_ARRAY_TYPE:
      return kVisitJSTypedArray;

    case SMALL_ORDERED_HASH_MAP_TYPE:
      return kVisitSmallOrderedHashMap;

    case SMALL_ORDERED_HASH_SET_TYPE:
      return kVisitSmallOrderedHashSet;

    case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
      return kVisitSmallOrderedNameDictionary;

225 226 227
    case SWISS_NAME_DICTIONARY_TYPE:
      return kVisitSwissNameDictionary;

228 229 230 231 232 233
    case CODE_DATA_CONTAINER_TYPE:
      return kVisitCodeDataContainer;

    case PREPARSE_DATA_TYPE:
      return kVisitPreparseData;

234 235 236
    case COVERAGE_INFO_TYPE:
      return kVisitCoverageInfo;

237
    case JS_ARGUMENTS_OBJECT_TYPE:
238 239 240
    case JS_ARRAY_ITERATOR_PROTOTYPE_TYPE:
    case JS_ARRAY_ITERATOR_TYPE:
    case JS_ARRAY_TYPE:
241 242 243
    case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
    case JS_ASYNC_FUNCTION_OBJECT_TYPE:
    case JS_ASYNC_GENERATOR_OBJECT_TYPE:
244
    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
245
    case JS_DATE_TYPE:
246 247 248 249 250
    case JS_ERROR_TYPE:
    case JS_FINALIZATION_REGISTRY_TYPE:
    case JS_GENERATOR_OBJECT_TYPE:
    case JS_ITERATOR_PROTOTYPE_TYPE:
    case JS_MAP_ITERATOR_PROTOTYPE_TYPE:
251 252
    case JS_MAP_KEY_ITERATOR_TYPE:
    case JS_MAP_KEY_VALUE_ITERATOR_TYPE:
253
    case JS_MAP_TYPE:
254
    case JS_MAP_VALUE_ITERATOR_TYPE:
255 256 257 258 259 260
    case JS_MESSAGE_OBJECT_TYPE:
    case JS_MODULE_NAMESPACE_TYPE:
    case JS_OBJECT_PROTOTYPE_TYPE:
    case JS_OBJECT_TYPE:
    case JS_PRIMITIVE_WRAPPER_TYPE:
    case JS_PROMISE_PROTOTYPE_TYPE:
261
    case JS_PROMISE_TYPE:
262
    case JS_REG_EXP_PROTOTYPE_TYPE:
263
    case JS_REG_EXP_STRING_ITERATOR_TYPE:
264 265 266 267 268 269 270 271
    case JS_REG_EXP_TYPE:
    case JS_SET_ITERATOR_PROTOTYPE_TYPE:
    case JS_SET_KEY_VALUE_ITERATOR_TYPE:
    case JS_SET_PROTOTYPE_TYPE:
    case JS_SET_TYPE:
    case JS_SET_VALUE_ITERATOR_TYPE:
    case JS_STRING_ITERATOR_PROTOTYPE_TYPE:
    case JS_STRING_ITERATOR_TYPE:
272
    case JS_TYPED_ARRAY_PROTOTYPE_TYPE:
273
#ifdef V8_INTL_SUPPORT
274 275 276
    case JS_V8_BREAK_ITERATOR_TYPE:
    case JS_COLLATOR_TYPE:
    case JS_DATE_TIME_FORMAT_TYPE:
277
    case JS_DISPLAY_NAMES_TYPE:
278 279 280 281 282 283 284
    case JS_LIST_FORMAT_TYPE:
    case JS_LOCALE_TYPE:
    case JS_NUMBER_FORMAT_TYPE:
    case JS_PLURAL_RULES_TYPE:
    case JS_RELATIVE_TIME_FORMAT_TYPE:
    case JS_SEGMENT_ITERATOR_TYPE:
    case JS_SEGMENTER_TYPE:
285
    case JS_SEGMENTS_TYPE:
286
#endif  // V8_INTL_SUPPORT
287
#if V8_ENABLE_WEBASSEMBLY
288 289 290 291 292
    case WASM_EXCEPTION_OBJECT_TYPE:
    case WASM_GLOBAL_OBJECT_TYPE:
    case WASM_MEMORY_OBJECT_TYPE:
    case WASM_MODULE_OBJECT_TYPE:
    case WASM_TABLE_OBJECT_TYPE:
293
    case WASM_VALUE_OBJECT_TYPE:
294
#endif  // V8_ENABLE_WEBASSEMBLY
295 296
    case JS_BOUND_FUNCTION_TYPE: {
      const bool has_raw_data_fields =
297
          COMPRESS_POINTERS_BOOL && JSObject::GetEmbedderFieldCount(map) > 0;
298 299 300
      return has_raw_data_fields ? kVisitJSObject : kVisitJSObjectFast;
    }
    case JS_API_OBJECT_TYPE:
301 302
    case JS_GLOBAL_PROXY_TYPE:
    case JS_GLOBAL_OBJECT_TYPE:
303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328
    case JS_SPECIAL_API_OBJECT_TYPE:
      return kVisitJSApiObject;

    case JS_WEAK_REF_TYPE:
      return kVisitJSWeakRef;

    case WEAK_CELL_TYPE:
      return kVisitWeakCell;

    case FILLER_TYPE:
    case FOREIGN_TYPE:
    case HEAP_NUMBER_TYPE:
      return kVisitDataObject;

    case BIGINT_TYPE:
      return kVisitBigInt;

    case ALLOCATION_SITE_TYPE:
      return kVisitAllocationSite;

#define MAKE_STRUCT_CASE(TYPE, Name, name) case TYPE:
      STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
      if (instance_type == PROTOTYPE_INFO_TYPE) {
        return kVisitPrototypeInfo;
      }
329
#if V8_ENABLE_WEBASSEMBLY
330 331 332
      if (instance_type == WASM_CAPI_FUNCTION_DATA_TYPE) {
        return kVisitWasmCapiFunctionData;
      }
333 334 335
      if (instance_type == WASM_INDIRECT_FUNCTION_TABLE_TYPE) {
        return kVisitWasmIndirectFunctionTable;
      }
336
#endif  // V8_ENABLE_WEBASSEMBLY
337 338 339 340 341 342
      return kVisitStruct;

    case LOAD_HANDLER_TYPE:
    case STORE_HANDLER_TYPE:
      return kVisitDataHandler;

343 344
    case SOURCE_TEXT_MODULE_TYPE:
      return kVisitSourceTextModule;
345 346
    case SYNTHETIC_MODULE_TYPE:
      return kVisitSyntheticModule;
347

348 349 350
#if V8_ENABLE_WEBASSEMBLY
    case WASM_INSTANCE_OBJECT_TYPE:
      return kVisitWasmInstanceObject;
351 352
    case WASM_ARRAY_TYPE:
      return kVisitWasmArray;
353
    case WASM_STRUCT_TYPE:
354
      return kVisitWasmStruct;
355 356
    case WASM_TYPE_INFO_TYPE:
      return kVisitWasmTypeInfo;
357
#endif  // V8_ENABLE_WEBASSEMBLY
358

359 360 361
#define MAKE_TQ_CASE(TYPE, Name) \
  case TYPE:                     \
    return kVisit##Name;
362
      TORQUE_INSTANCE_TYPE_TO_BODY_DESCRIPTOR_LIST(MAKE_TQ_CASE)
363 364
#undef MAKE_TQ_CASE

365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395
    default:
      UNREACHABLE();
  }
}

// static
MaybeObjectHandle Map::WrapFieldType(Isolate* isolate, Handle<FieldType> type) {
  if (type->IsClass()) {
    return MaybeObjectHandle::Weak(type->AsClass(), isolate);
  }
  return MaybeObjectHandle(type);
}

// static
FieldType Map::UnwrapFieldType(MaybeObject wrapped_type) {
  if (wrapped_type->IsCleared()) {
    return FieldType::None();
  }
  HeapObject heap_object;
  if (wrapped_type->GetHeapObjectIfWeak(&heap_object)) {
    return FieldType::cast(heap_object);
  }
  return wrapped_type->cast<FieldType>();
}

MaybeHandle<Map> Map::CopyWithField(Isolate* isolate, Handle<Map> map,
                                    Handle<Name> name, Handle<FieldType> type,
                                    PropertyAttributes attributes,
                                    PropertyConstness constness,
                                    Representation representation,
                                    TransitionFlag flag) {
396
  DCHECK(map->instance_descriptors(isolate)
397 398
             .Search(*name, map->NumberOfOwnDescriptors())
             .is_not_found());
399 400 401 402 403 404 405 406 407 408 409 410 411 412 413

  // Ensure the descriptor array does not get too big.
  if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
    return MaybeHandle<Map>();
  }

  // Compute the new index for new field.
  int index = map->NextFreePropertyIndex();

  if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) {
    constness = PropertyConstness::kMutable;
    representation = Representation::Tagged();
    type = FieldType::Any(isolate);
  } else {
    Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
414
        isolate, map->instance_type(), &representation, &type);
415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435
  }

  MaybeObjectHandle wrapped_type = WrapFieldType(isolate, type);

  Descriptor d = Descriptor::DataField(name, index, attributes, constness,
                                       representation, wrapped_type);
  Handle<Map> new_map = Map::CopyAddDescriptor(isolate, map, &d, flag);
  new_map->AccountAddedPropertyField();
  return new_map;
}

MaybeHandle<Map> Map::CopyWithConstant(Isolate* isolate, Handle<Map> map,
                                       Handle<Name> name,
                                       Handle<Object> constant,
                                       PropertyAttributes attributes,
                                       TransitionFlag flag) {
  // Ensure the descriptor array does not get too big.
  if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
    return MaybeHandle<Map>();
  }

436
  Representation representation = constant->OptimalRepresentation(isolate);
437 438 439
  Handle<FieldType> type = constant->OptimalType(isolate, representation);
  return CopyWithField(isolate, map, name, type, attributes,
                       PropertyConstness::kConst, representation, flag);
440 441 442
}

bool Map::InstancesNeedRewriting(Map target) const {
443 444 445
  int target_number_of_fields = target.NumberOfFields();
  int target_inobject = target.GetInObjectProperties();
  int target_unused = target.UnusedPropertyFields();
446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461
  int old_number_of_fields;

  return InstancesNeedRewriting(target, target_number_of_fields,
                                target_inobject, target_unused,
                                &old_number_of_fields);
}

bool Map::InstancesNeedRewriting(Map target, int target_number_of_fields,
                                 int target_inobject, int target_unused,
                                 int* old_number_of_fields) const {
  // If fields were added (or removed), rewrite the instance.
  *old_number_of_fields = NumberOfFields();
  DCHECK(target_number_of_fields >= *old_number_of_fields);
  if (target_number_of_fields != *old_number_of_fields) return true;

  // If smi descriptors were replaced by double descriptors, rewrite.
462 463
  DescriptorArray old_desc = instance_descriptors();
  DescriptorArray new_desc = target.instance_descriptors();
464
  for (InternalIndex i : IterateOwnDescriptors()) {
465 466
    if (new_desc.GetDetails(i).representation().IsDouble() !=
        old_desc.GetDetails(i).representation().IsDouble()) {
467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486
      return true;
    }
  }

  // If no fields were added, and no inobject properties were removed, setting
  // the map is sufficient.
  if (target_inobject == GetInObjectProperties()) return false;
  // In-object slack tracking may have reduced the object size of the new map.
  // In that case, succeed if all existing fields were inobject, and they still
  // fit within the new inobject size.
  DCHECK(target_inobject < GetInObjectProperties());
  if (target_number_of_fields <= target_inobject) {
    DCHECK(target_number_of_fields + target_unused == target_inobject);
    return false;
  }
  // Otherwise, properties will need to be moved to the backing store.
  return true;
}

int Map::NumberOfFields() const {
487
  DescriptorArray descriptors = instance_descriptors();
488
  int result = 0;
489
  for (InternalIndex i : IterateOwnDescriptors()) {
490
    if (descriptors.GetDetails(i).location() == kField) result++;
491 492 493 494 495
  }
  return result;
}

Map::FieldCounts Map::GetFieldCounts() const {
496
  DescriptorArray descriptors = instance_descriptors();
497 498
  int mutable_count = 0;
  int const_count = 0;
499
  for (InternalIndex i : IterateOwnDescriptors()) {
500
    PropertyDetails details = descriptors.GetDetails(i);
501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520
    if (details.location() == kField) {
      switch (details.constness()) {
        case PropertyConstness::kMutable:
          mutable_count++;
          break;
        case PropertyConstness::kConst:
          const_count++;
          break;
      }
    }
  }
  return FieldCounts(mutable_count, const_count);
}

bool Map::HasOutOfObjectProperties() const {
  return GetInObjectProperties() < NumberOfFields();
}

void Map::DeprecateTransitionTree(Isolate* isolate) {
  if (is_deprecated()) return;
521
  DisallowGarbageCollection no_gc;
522 523 524
  TransitionsAccessor transitions(isolate, *this, &no_gc);
  int num_transitions = transitions.NumberOfTransitions();
  for (int i = 0; i < num_transitions; ++i) {
525
    transitions.GetTarget(i).DeprecateTransitionTree(isolate);
526
  }
527
  DCHECK(!constructor_or_back_pointer().IsFunctionTemplateInfo());
528
  DCHECK(CanBeDeprecated());
529
  set_is_deprecated(true);
530
  if (FLAG_log_maps) {
531
    LOG(isolate, MapEvent("Deprecate", handle(*this, isolate), Handle<Map>()));
532
  }
533
  dependent_code().DeoptimizeDependentCodeGroup(
534
      DependentCode::kTransitionGroup);
535 536 537 538 539
  NotifyLeafMapLayoutChange(isolate);
}

// Installs |new_descriptors| over the current instance_descriptors to ensure
// proper sharing of descriptor arrays.
540 541
void Map::ReplaceDescriptors(Isolate* isolate,
                             DescriptorArray new_descriptors) {
542
  // Don't overwrite the empty descriptor array or initial map's descriptors.
543 544
  if (NumberOfOwnDescriptors() == 0 ||
      GetBackPointer(isolate).IsUndefined(isolate)) {
545 546 547
    return;
  }

548
  DescriptorArray to_replace = instance_descriptors(isolate);
549 550 551 552
  // Replace descriptors by new_descriptors in all maps that share it. The old
  // descriptors will not be trimmed in the mark-compactor, we need to mark
  // all its elements.
  Map current = *this;
553
#ifndef V8_DISABLE_WRITE_BARRIERS
554
  WriteBarrier::Marking(to_replace, to_replace.number_of_descriptors());
555
#endif
556
  while (current.instance_descriptors(isolate) == to_replace) {
557
    Object next = current.GetBackPointer(isolate);
558 559
    if (next.IsUndefined(isolate)) break;  // Stop overwriting at initial map.
    current.SetEnumLength(kInvalidEnumCacheSentinel);
560
    current.UpdateDescriptors(isolate, new_descriptors,
561
                              current.NumberOfOwnDescriptors());
562 563 564 565 566 567
    current = Map::cast(next);
  }
  set_owns_descriptors(false);
}

Map Map::FindRootMap(Isolate* isolate) const {
568
  DisallowGarbageCollection no_gc;
569 570
  Map result = *this;
  while (true) {
571
    Object back = result.GetBackPointer(isolate);
572
    if (back.IsUndefined(isolate)) {
573 574
      // Initial map must not contain descriptors in the descriptors array
      // that do not belong to the map.
575 576 577
      DCHECK_LE(result.NumberOfOwnDescriptors(),
                result.instance_descriptors(isolate, kRelaxedLoad)
                    .number_of_descriptors());
578 579 580 581 582 583
      return result;
    }
    result = Map::cast(back);
  }
}

584
Map Map::FindFieldOwner(Isolate* isolate, InternalIndex descriptor) const {
585
  DisallowGarbageCollection no_gc;
586 587 588
  DCHECK_EQ(kField, instance_descriptors(isolate, kRelaxedLoad)
                        .GetDetails(descriptor)
                        .location());
589 590
  Map result = *this;
  while (true) {
591
    Object back = result.GetBackPointer(isolate);
592
    if (back.IsUndefined(isolate)) break;
593
    const Map parent = Map::cast(back);
594
    if (parent.NumberOfOwnDescriptors() <= descriptor.as_int()) break;
595 596 597 598 599 600 601 602
    result = parent;
  }
  return result;
}

namespace {

Map SearchMigrationTarget(Isolate* isolate, Map old_map) {
603
  DisallowGarbageCollection no_gc;
604 605 606 607
  DisallowDeoptimization no_deoptimization(isolate);

  Map target = old_map;
  do {
608
    target = TransitionsAccessor(isolate, target, &no_gc).GetMigrationTarget();
609
  } while (!target.is_null() && target.is_deprecated());
610 611 612 613 614 615 616 617 618 619
  if (target.is_null()) return Map();

  // TODO(ishell): if this validation ever become a bottleneck consider adding a
  // bit to the Map telling whether it contains fields whose field types may be
  // cleared.
  // TODO(ishell): revisit handling of cleared field types in
  // TryReplayPropertyTransitions() and consider checking the target map's field
  // types instead of old_map's types.
  // Go to slow map updating if the old_map has fast properties with cleared
  // field types.
620
  DescriptorArray old_descriptors = old_map.instance_descriptors(isolate);
621
  for (InternalIndex i : old_map.IterateOwnDescriptors()) {
622
    PropertyDetails old_details = old_descriptors.GetDetails(i);
623
    if (old_details.location() == kField && old_details.kind() == kData) {
624
      FieldType old_type = old_descriptors.GetFieldType(i);
625
      if (Map::FieldTypeIsCleared(old_details.representation(), old_type)) {
626 627 628 629 630 631 632 633 634 635 636 637 638
        return Map();
      }
    }
  }

  SLOW_DCHECK(Map::TryUpdateSlow(isolate, old_map) == target);
  return target;
}
}  // namespace

// TODO(ishell): Move TryUpdate() and friends to MapUpdater
// static
MaybeHandle<Map> Map::TryUpdate(Isolate* isolate, Handle<Map> old_map) {
639
  DisallowGarbageCollection no_gc;
640 641 642 643 644 645 646 647 648 649 650 651 652 653
  DisallowDeoptimization no_deoptimization(isolate);

  if (!old_map->is_deprecated()) return old_map;

  if (FLAG_fast_map_update) {
    Map target_map = SearchMigrationTarget(isolate, *old_map);
    if (!target_map.is_null()) {
      return handle(target_map, isolate);
    }
  }

  Map new_map = TryUpdateSlow(isolate, *old_map);
  if (new_map.is_null()) return MaybeHandle<Map>();
  if (FLAG_fast_map_update) {
654
    TransitionsAccessor(isolate, *old_map, &no_gc).SetMigrationTarget(new_map);
655 656 657 658
  }
  return handle(new_map, isolate);
}

659 660 661 662 663 664 665 666 667 668 669 670 671
namespace {

struct IntegrityLevelTransitionInfo {
  explicit IntegrityLevelTransitionInfo(Map map)
      : integrity_level_source_map(map) {}

  bool has_integrity_level_transition = false;
  PropertyAttributes integrity_level = NONE;
  Map integrity_level_source_map;
  Symbol integrity_level_symbol;
};

IntegrityLevelTransitionInfo DetectIntegrityLevelTransitions(
672
    Map map, Isolate* isolate, DisallowGarbageCollection* no_gc) {
673 674
  IntegrityLevelTransitionInfo info(map);

675 676
  // Figure out the most restrictive integrity level transition (it should
  // be the last one in the transition tree).
677
  DCHECK(!map.is_extensible());
678
  Map previous = Map::cast(map.GetBackPointer(isolate));
679
  TransitionsAccessor last_transitions(isolate, previous, no_gc);
680 681 682 683 684 685 686 687
  if (!last_transitions.HasIntegrityLevelTransitionTo(
          map, &(info.integrity_level_symbol), &(info.integrity_level))) {
    // The last transition was not integrity level transition - just bail out.
    // This can happen in the following cases:
    // - there are private symbol transitions following the integrity level
    //   transitions (see crbug.com/v8/8854).
    // - there is a getter added in addition to an existing setter (or a setter
    //   in addition to an existing getter).
688
    return info;
689 690
  }

691 692 693 694
  Map source_map = previous;
  // Now walk up the back pointer chain and skip all integrity level
  // transitions. If we encounter any non-integrity level transition interleaved
  // with integrity level transitions, just bail out.
695
  while (!source_map.is_extensible()) {
696
    previous = Map::cast(source_map.GetBackPointer(isolate));
697
    TransitionsAccessor transitions(isolate, previous, no_gc);
698 699 700 701
    if (!transitions.HasIntegrityLevelTransitionTo(source_map)) {
      return info;
    }
    source_map = previous;
702
  }
703

704
  // Integrity-level transitions never change number of descriptors.
705
  CHECK_EQ(map.NumberOfOwnDescriptors(), source_map.NumberOfOwnDescriptors());
706

707 708 709 710 711 712 713
  info.has_integrity_level_transition = true;
  info.integrity_level_source_map = source_map;
  return info;
}

}  // namespace

714
Map Map::TryUpdateSlow(Isolate* isolate, Map old_map) {
715
  DisallowGarbageCollection no_gc;
716 717 718
  DisallowDeoptimization no_deoptimization(isolate);

  // Check the state of the root map.
719 720 721 722 723 724
  Map root_map = old_map.FindRootMap(isolate);
  if (root_map.is_deprecated()) {
    JSFunction constructor = JSFunction::cast(root_map.GetConstructor());
    DCHECK(constructor.has_initial_map());
    DCHECK(constructor.initial_map().is_dictionary_map());
    if (constructor.initial_map().elements_kind() != old_map.elements_kind()) {
725 726
      return Map();
    }
727
    return constructor.initial_map();
728
  }
729
  if (!old_map.EquivalentToForTransition(root_map)) return Map();
730

731 732
  ElementsKind from_kind = root_map.elements_kind();
  ElementsKind to_kind = old_map.elements_kind();
733 734

  IntegrityLevelTransitionInfo info(old_map);
735 736 737
  if (root_map.is_extensible() != old_map.is_extensible()) {
    DCHECK(!old_map.is_extensible());
    DCHECK(root_map.is_extensible());
738
    info = DetectIntegrityLevelTransitions(old_map, isolate, &no_gc);
739 740 741
    // Bail out if there were some private symbol transitions mixed up
    // with the integrity level transitions.
    if (!info.has_integrity_level_transition) return Map();
742
    // Make sure to replay the original elements kind transitions, before
743 744
    // the integrity level transition sets the elements to dictionary mode.
    DCHECK(to_kind == DICTIONARY_ELEMENTS ||
745
           to_kind == SLOW_STRING_WRAPPER_ELEMENTS ||
746
           IsTypedArrayElementsKind(to_kind) ||
747
           IsAnyHoleyNonextensibleElementsKind(to_kind));
748
    to_kind = info.integrity_level_source_map.elements_kind();
749
  }
750 751
  if (from_kind != to_kind) {
    // Try to follow existing elements kind transitions.
752
    root_map = root_map.LookupElementsTransitionMap(isolate, to_kind);
753 754 755
    if (root_map.is_null()) return Map();
    // From here on, use the map with correct elements kind as root map.
  }
756 757

  // Replay the transitions as they were before the integrity level transition.
758
  Map result = root_map.TryReplayPropertyTransitions(
759 760 761 762 763
      isolate, info.integrity_level_source_map);
  if (result.is_null()) return Map();

  if (info.has_integrity_level_transition) {
    // Now replay the integrity level transition.
764
    result = TransitionsAccessor(isolate, result, &no_gc)
765 766
                 .SearchSpecial(info.integrity_level_symbol);
  }
767

768
  DCHECK_IMPLIES(!result.is_null(),
769
                 old_map.elements_kind() == result.elements_kind());
770
  DCHECK_IMPLIES(!result.is_null(),
771
                 old_map.instance_type() == result.instance_type());
772
  return result;
773 774 775
}

Map Map::TryReplayPropertyTransitions(Isolate* isolate, Map old_map) {
776
  DisallowGarbageCollection no_gc;
777 778 779 780
  DisallowDeoptimization no_deoptimization(isolate);

  int root_nof = NumberOfOwnDescriptors();

781
  int old_nof = old_map.NumberOfOwnDescriptors();
782
  DescriptorArray old_descriptors = old_map.instance_descriptors(isolate);
783 784

  Map new_map = *this;
785
  for (InternalIndex i : InternalIndex::Range(root_nof, old_nof)) {
786
    PropertyDetails old_details = old_descriptors.GetDetails(i);
787
    Map transition =
788
        TransitionsAccessor(isolate, new_map, &no_gc)
789
            .SearchTransition(old_descriptors.GetKey(i), old_details.kind(),
790 791 792
                              old_details.attributes());
    if (transition.is_null()) return Map();
    new_map = transition;
793
    DescriptorArray new_descriptors = new_map.instance_descriptors(isolate);
794

795
    PropertyDetails new_details = new_descriptors.GetDetails(i);
796 797 798 799 800 801 802 803 804 805 806
    DCHECK_EQ(old_details.kind(), new_details.kind());
    DCHECK_EQ(old_details.attributes(), new_details.attributes());
    if (!IsGeneralizableTo(old_details.constness(), new_details.constness())) {
      return Map();
    }
    DCHECK(IsGeneralizableTo(old_details.location(), new_details.location()));
    if (!old_details.representation().fits_into(new_details.representation())) {
      return Map();
    }
    if (new_details.location() == kField) {
      if (new_details.kind() == kData) {
807
        FieldType new_type = new_descriptors.GetFieldType(i);
808 809 810 811 812 813
        // Cleared field types need special treatment. They represent lost
        // knowledge, so we must first generalize the new_type to "Any".
        if (FieldTypeIsCleared(new_details.representation(), new_type)) {
          return Map();
        }
        DCHECK_EQ(kData, old_details.kind());
814
        DCHECK_EQ(kField, old_details.location());
815
        FieldType old_type = old_descriptors.GetFieldType(i);
816
        if (FieldTypeIsCleared(old_details.representation(), old_type) ||
817
            !old_type.NowIs(new_type)) {
818
          return Map();
819 820 821 822
        }
      } else {
        DCHECK_EQ(kAccessor, new_details.kind());
#ifdef DEBUG
823 824
        FieldType new_type = new_descriptors.GetFieldType(i);
        DCHECK(new_type.IsAny());
825 826 827 828 829 830
#endif
        UNREACHABLE();
      }
    } else {
      DCHECK_EQ(kDescriptor, new_details.location());
      if (old_details.location() == kField ||
831 832
          old_descriptors.GetStrongValue(i) !=
              new_descriptors.GetStrongValue(i)) {
833 834 835 836
        return Map();
      }
    }
  }
837
  if (new_map.NumberOfOwnDescriptors() != old_nof) return Map();
838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857
  return new_map;
}

// static
Handle<Map> Map::Update(Isolate* isolate, Handle<Map> map) {
  if (!map->is_deprecated()) return map;
  if (FLAG_fast_map_update) {
    Map target_map = SearchMigrationTarget(isolate, *map);
    if (!target_map.is_null()) {
      return handle(target_map, isolate);
    }
  }
  MapUpdater mu(isolate, map);
  return mu.Update();
}

void Map::EnsureDescriptorSlack(Isolate* isolate, Handle<Map> map, int slack) {
  // Only supports adding slack to owned descriptors.
  DCHECK(map->owns_descriptors());

858
  Handle<DescriptorArray> descriptors(map->instance_descriptors(isolate),
859
                                      isolate);
860 861 862 863 864 865
  int old_size = map->NumberOfOwnDescriptors();
  if (slack <= descriptors->number_of_slack_descriptors()) return;

  Handle<DescriptorArray> new_descriptors =
      DescriptorArray::CopyUpTo(isolate, descriptors, old_size, slack);

866
  DisallowGarbageCollection no_gc;
867
  if (old_size == 0) {
868
    map->UpdateDescriptors(isolate, *new_descriptors,
869 870 871 872 873 874 875 876 877 878 879 880 881 882
                           map->NumberOfOwnDescriptors());
    return;
  }

  // If the source descriptors had an enum cache we copy it. This ensures
  // that the maps to which we push the new descriptor array back can rely
  // on a cache always being available once it is set. If the map has more
  // enumerated descriptors than available in the original cache, the cache
  // will be lazily replaced by the extended cache when needed.
  new_descriptors->CopyEnumCacheFrom(*descriptors);

  // Replace descriptors by new_descriptors in all maps that share it. The old
  // descriptors will not be trimmed in the mark-compactor, we need to mark
  // all its elements.
883
#ifndef V8_DISABLE_WRITE_BARRIERS
884
  WriteBarrier::Marking(*descriptors, descriptors->number_of_descriptors());
885
#endif
886

887 888 889 890 891 892 893 894
  // Update the descriptors from {map} (inclusive) until the initial map
  // (exclusive). In the case that {map} is the initial map, update it.
  map->UpdateDescriptors(isolate, *new_descriptors,
                         map->NumberOfOwnDescriptors());
  Object next = map->GetBackPointer();
  if (next.IsUndefined(isolate)) return;

  Map current = Map::cast(next);
895
  while (current.instance_descriptors(isolate) == *descriptors) {
896 897
    next = current.GetBackPointer();
    if (next.IsUndefined(isolate)) break;
898
    current.UpdateDescriptors(isolate, *new_descriptors,
899
                              current.NumberOfOwnDescriptors());
900 901 902 903 904 905 906
    current = Map::cast(next);
  }
}

// static
Handle<Map> Map::GetObjectCreateMap(Isolate* isolate,
                                    Handle<HeapObject> prototype) {
907
  Handle<Map> map(isolate->native_context()->object_function().initial_map(),
908 909 910 911 912 913 914
                  isolate);
  if (map->prototype() == *prototype) return map;
  if (prototype->IsNull(isolate)) {
    return isolate->slow_object_with_null_prototype_map();
  }
  if (prototype->IsJSObject()) {
    Handle<JSObject> js_prototype = Handle<JSObject>::cast(prototype);
915
    if (!js_prototype->map().is_prototype_map()) {
916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936
      JSObject::OptimizeAsPrototype(js_prototype);
    }
    Handle<PrototypeInfo> info =
        Map::GetOrCreatePrototypeInfo(js_prototype, isolate);
    // TODO(verwaest): Use inobject slack tracking for this map.
    if (info->HasObjectCreateMap()) {
      map = handle(info->ObjectCreateMap(), isolate);
    } else {
      map = Map::CopyInitialMap(isolate, map);
      Map::SetPrototype(isolate, map, prototype);
      PrototypeInfo::SetObjectCreateMap(info, map);
    }
    return map;
  }

  return Map::TransitionToPrototype(isolate, map, prototype);
}

// static
MaybeHandle<Map> Map::TryGetObjectCreateMap(Isolate* isolate,
                                            Handle<HeapObject> prototype) {
937
  Handle<Map> map(isolate->native_context()->object_function().initial_map(),
938 939 940 941 942 943 944
                  isolate);
  if (map->prototype() == *prototype) return map;
  if (prototype->IsNull(isolate)) {
    return isolate->slow_object_with_null_prototype_map();
  }
  if (!prototype->IsJSObject()) return MaybeHandle<Map>();
  Handle<JSObject> js_prototype = Handle<JSObject>::cast(prototype);
945
  if (!js_prototype->map().is_prototype_map()) return MaybeHandle<Map>();
946 947 948 949 950 951 952 953 954 955 956 957 958 959
  Handle<PrototypeInfo> info =
      Map::GetOrCreatePrototypeInfo(js_prototype, isolate);
  if (!info->HasObjectCreateMap()) return MaybeHandle<Map>();
  return handle(info->ObjectCreateMap(), isolate);
}

static bool ContainsMap(MapHandles const& maps, Map map) {
  DCHECK(!map.is_null());
  for (Handle<Map> current : maps) {
    if (!current.is_null() && *current == map) return true;
  }
  return false;
}

960 961 962 963 964 965 966 967 968
static bool HasElementsKind(MapHandles const& maps,
                            ElementsKind elements_kind) {
  for (Handle<Map> current : maps) {
    if (!current.is_null() && current->elements_kind() == elements_kind)
      return true;
  }
  return false;
}

969 970
Map Map::FindElementsKindTransitionedMap(Isolate* isolate,
                                         MapHandles const& candidates) {
971
  DisallowGarbageCollection no_gc;
972 973
  DisallowDeoptimization no_deoptimization(isolate);

974
  if (IsDetached(isolate)) return Map();
975 976 977 978 979 980 981 982 983

  ElementsKind kind = elements_kind();
  bool packed = IsFastPackedElementsKind(kind);

  Map transition;
  if (IsTransitionableFastElementsKind(kind)) {
    // Check the state of the root map.
    Map root_map = FindRootMap(isolate);
    if (!EquivalentToForElementsKindTransition(root_map)) return Map();
984
    root_map = root_map.LookupElementsTransitionMap(isolate, kind);
985 986 987 988
    DCHECK(!root_map.is_null());
    // Starting from the next existing elements kind transition try to
    // replay the property transitions that does not involve instance rewriting
    // (ElementsTransitionAndStoreStub does not support that).
989
    for (root_map = root_map.ElementsTransitionMap(isolate);
990
         !root_map.is_null() && root_map.has_fast_elements();
991
         root_map = root_map.ElementsTransitionMap(isolate)) {
992 993
      // If root_map's elements kind doesn't match any of the elements kind in
      // the candidates there is no need to do any additional work.
994 995
      if (!HasElementsKind(candidates, root_map.elements_kind())) continue;
      Map current = root_map.TryReplayPropertyTransitions(isolate, *this);
996 997 998 999
      if (current.is_null()) continue;
      if (InstancesNeedRewriting(current)) continue;

      if (ContainsMap(candidates, current) &&
1000
          (packed || !IsFastPackedElementsKind(current.elements_kind()))) {
1001
        transition = current;
1002
        packed = packed && IsFastPackedElementsKind(current.elements_kind());
1003 1004 1005 1006 1007 1008 1009 1010 1011
      }
    }
  }
  return transition;
}

static Map FindClosestElementsTransition(Isolate* isolate, Map map,
                                         ElementsKind to_kind) {
  // Ensure we are requested to search elements kind transition "near the root".
1012 1013
  DCHECK_EQ(map.FindRootMap(isolate).NumberOfOwnDescriptors(),
            map.NumberOfOwnDescriptors());
1014 1015
  Map current_map = map;

1016
  ElementsKind kind = map.elements_kind();
1017
  while (kind != to_kind) {
1018
    Map next_map = current_map.ElementsTransitionMap(isolate);
1019
    if (next_map.is_null()) return current_map;
1020
    kind = next_map.elements_kind();
1021 1022 1023
    current_map = next_map;
  }

1024
  DCHECK_EQ(to_kind, current_map.elements_kind());
1025 1026 1027 1028 1029
  return current_map;
}

Map Map::LookupElementsTransitionMap(Isolate* isolate, ElementsKind to_kind) {
  Map to_map = FindClosestElementsTransition(isolate, *this, to_kind);
1030
  if (to_map.elements_kind() == to_kind) return to_map;
1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050
  return Map();
}

bool Map::IsMapInArrayPrototypeChain(Isolate* isolate) const {
  if (isolate->initial_array_prototype()->map() == *this) {
    return true;
  }

  if (isolate->initial_object_prototype()->map() == *this) {
    return true;
  }

  return false;
}

Handle<Map> Map::TransitionElementsTo(Isolate* isolate, Handle<Map> map,
                                      ElementsKind to_kind) {
  ElementsKind from_kind = map->elements_kind();
  if (from_kind == to_kind) return map;

1051
  Context native_context = isolate->context().native_context();
1052
  if (from_kind == FAST_SLOPPY_ARGUMENTS_ELEMENTS) {
1053
    if (*map == native_context.fast_aliased_arguments_map()) {
1054
      DCHECK_EQ(SLOW_SLOPPY_ARGUMENTS_ELEMENTS, to_kind);
1055
      return handle(native_context.slow_aliased_arguments_map(), isolate);
1056 1057
    }
  } else if (from_kind == SLOW_SLOPPY_ARGUMENTS_ELEMENTS) {
1058
    if (*map == native_context.slow_aliased_arguments_map()) {
1059
      DCHECK_EQ(FAST_SLOPPY_ARGUMENTS_ELEMENTS, to_kind);
1060
      return handle(native_context.fast_aliased_arguments_map(), isolate);
1061 1062 1063
    }
  } else if (IsFastElementsKind(from_kind) && IsFastElementsKind(to_kind)) {
    // Reuse map transitions for JSArrays.
1064
    DisallowGarbageCollection no_gc;
1065
    if (native_context.GetInitialJSArrayMap(from_kind) == *map) {
1066
      Object maybe_transitioned_map =
1067 1068
          native_context.get(Context::ArrayMapIndex(to_kind));
      if (maybe_transitioned_map.IsMap()) {
1069 1070 1071 1072 1073 1074 1075 1076 1077
        return handle(Map::cast(maybe_transitioned_map), isolate);
      }
    }
  }

  DCHECK(!map->IsUndefined(isolate));
  // Check if we can go back in the elements kind transition chain.
  if (IsHoleyElementsKind(from_kind) &&
      to_kind == GetPackedElementsKind(from_kind) &&
1078 1079
      map->GetBackPointer().IsMap() &&
      Map::cast(map->GetBackPointer()).elements_kind() == to_kind) {
1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094
    return handle(Map::cast(map->GetBackPointer()), isolate);
  }

  bool allow_store_transition = IsTransitionElementsKind(from_kind);
  // Only store fast element maps in ascending generality.
  if (IsFastElementsKind(to_kind)) {
    allow_store_transition =
        allow_store_transition && IsTransitionableFastElementsKind(from_kind) &&
        IsMoreGeneralElementsKindTransition(from_kind, to_kind);
  }

  if (!allow_store_transition) {
    return Map::CopyAsElementsKind(isolate, map, to_kind, OMIT_TRANSITION);
  }

1095
  return MapUpdater{isolate, map}.ReconfigureElementsKind(to_kind);
1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106
}

static Handle<Map> AddMissingElementsTransitions(Isolate* isolate,
                                                 Handle<Map> map,
                                                 ElementsKind to_kind) {
  DCHECK(IsTransitionElementsKind(map->elements_kind()));

  Handle<Map> current_map = map;

  ElementsKind kind = map->elements_kind();
  TransitionFlag flag;
1107
  if (map->IsDetached(isolate)) {
1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143
    flag = OMIT_TRANSITION;
  } else {
    flag = INSERT_TRANSITION;
    if (IsFastElementsKind(kind)) {
      while (kind != to_kind && !IsTerminalElementsKind(kind)) {
        kind = GetNextTransitionElementsKind(kind);
        current_map = Map::CopyAsElementsKind(isolate, current_map, kind, flag);
      }
    }
  }

  // In case we are exiting the fast elements kind system, just add the map in
  // the end.
  if (kind != to_kind) {
    current_map = Map::CopyAsElementsKind(isolate, current_map, to_kind, flag);
  }

  DCHECK(current_map->elements_kind() == to_kind);
  return current_map;
}

// static
Handle<Map> Map::AsElementsKind(Isolate* isolate, Handle<Map> map,
                                ElementsKind kind) {
  Handle<Map> closest_map(FindClosestElementsTransition(isolate, *map, kind),
                          isolate);

  if (closest_map->elements_kind() == kind) {
    return closest_map;
  }

  return AddMissingElementsTransitions(isolate, closest_map, kind);
}

int Map::NumberOfEnumerableProperties() const {
  int result = 0;
1144
  DescriptorArray descs = instance_descriptors(kRelaxedLoad);
1145
  for (InternalIndex i : IterateOwnDescriptors()) {
1146 1147
    if ((descs.GetDetails(i).attributes() & ONLY_ENUMERABLE) == 0 &&
        !descs.GetKey(i).FilterKey(ENUMERABLE_STRINGS)) {
1148 1149 1150 1151 1152 1153 1154 1155
      result++;
    }
  }
  return result;
}

int Map::NextFreePropertyIndex() const {
  int number_of_own_descriptors = NumberOfOwnDescriptors();
1156
  DescriptorArray descs = instance_descriptors(kRelaxedLoad);
1157 1158
  // Search properties backwards to find the last field.
  for (int i = number_of_own_descriptors - 1; i >= 0; --i) {
1159
    PropertyDetails details = descs.GetDetails(InternalIndex(i));
1160
    if (details.location() == kField) {
1161
      return details.field_index() + details.field_width_in_words();
1162 1163
    }
  }
1164
  return 0;
1165 1166 1167 1168 1169 1170
}

bool Map::OnlyHasSimpleProperties() const {
  // Wrapped string elements aren't explicitly stored in the elements backing
  // store, but are loaded indirectly from the underlying string.
  return !IsStringWrapperElementsKind(elements_kind()) &&
1171
         !IsSpecialReceiverMap() && !is_dictionary_map();
1172 1173
}

1174
bool Map::MayHaveReadOnlyElementsInPrototypeChain(Isolate* isolate) {
1175 1176
  for (PrototypeIterator iter(isolate, *this); !iter.IsAtEnd();
       iter.Advance()) {
1177 1178 1179 1180 1181
    // Be conservative, don't look into any JSReceivers that may have custom
    // elements. For example, into JSProxies, String wrappers (which have have
    // non-configurable, non-writable elements), API objects, etc.
    if (iter.GetCurrent().map().IsCustomElementsReceiverMap()) return true;

1182
    JSObject current = iter.GetCurrent<JSObject>();
1183 1184
    ElementsKind elements_kind = current.GetElementsKind(isolate);
    if (IsFrozenElementsKind(elements_kind)) return true;
1185

1186 1187
    if (IsDictionaryElementsKind(elements_kind) &&
        current.element_dictionary(isolate).requires_slow_elements()) {
1188 1189 1190
      return true;
    }

1191
    if (IsSlowArgumentsElementsKind(elements_kind)) {
1192 1193 1194
      SloppyArgumentsElements elements =
          SloppyArgumentsElements::cast(current.elements(isolate));
      Object arguments = elements.arguments();
1195
      if (NumberDictionary::cast(arguments).requires_slow_elements()) {
1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208
        return true;
      }
    }
  }

  return false;
}

Handle<Map> Map::RawCopy(Isolate* isolate, Handle<Map> map, int instance_size,
                         int inobject_properties) {
  Handle<Map> result = isolate->factory()->NewMap(
      map->instance_type(), instance_size, TERMINAL_FAST_ELEMENTS_KIND,
      inobject_properties);
1209
  Handle<HeapObject> prototype(map->prototype(), isolate);
1210
  Map::SetPrototype(isolate, result, prototype);
1211
  result->set_constructor_or_back_pointer(map->GetConstructor());
1212
  result->set_bit_field(map->bit_field());
1213 1214
  result->set_bit_field2(map->bit_field2());
  int new_bit_field3 = map->bit_field3();
1215 1216
  new_bit_field3 = Bits3::OwnsDescriptorsBit::update(new_bit_field3, true);
  new_bit_field3 = Bits3::NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
1217
  new_bit_field3 =
1218 1219 1220
      Bits3::EnumLengthBits::update(new_bit_field3, kInvalidEnumCacheSentinel);
  new_bit_field3 = Bits3::IsDeprecatedBit::update(new_bit_field3, false);
  new_bit_field3 = Bits3::IsInRetainedMapListBit::update(new_bit_field3, false);
1221
  if (!map->is_dictionary_map()) {
1222
    new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, false);
1223
  }
1224
  // Same as bit_field comment above.
1225
  result->set_bit_field3(new_bit_field3);
1226
  result->clear_padding();
1227 1228 1229 1230
  return result;
}

Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
1231
                           ElementsKind new_elements_kind,
1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242
                           PropertyNormalizationMode mode, const char* reason) {
  DCHECK(!fast_map->is_dictionary_map());

  Handle<Object> maybe_cache(isolate->native_context()->normalized_map_cache(),
                             isolate);
  bool use_cache =
      !fast_map->is_prototype_map() && !maybe_cache->IsUndefined(isolate);
  Handle<NormalizedMapCache> cache;
  if (use_cache) cache = Handle<NormalizedMapCache>::cast(maybe_cache);

  Handle<Map> new_map;
1243 1244
  if (use_cache &&
      cache->Get(fast_map, new_elements_kind, mode).ToHandle(&new_map)) {
1245 1246 1247 1248 1249 1250 1251 1252 1253
#ifdef VERIFY_HEAP
    if (FLAG_verify_heap) new_map->DictionaryMapVerify(isolate);
#endif
#ifdef ENABLE_SLOW_DCHECKS
    if (FLAG_enable_slow_asserts) {
      // The cached map should match newly created normalized map bit-by-bit,
      // except for the code cache, which can contain some ICs which can be
      // applied to the shared map, dependent code and weak cell cache.
      Handle<Map> fresh = Map::CopyNormalized(isolate, fast_map, mode);
1254
      fresh->set_elements_kind(new_elements_kind);
1255

1256 1257 1258 1259
      STATIC_ASSERT(Map::kPrototypeValidityCellOffset ==
                    Map::kDependentCodeOffset + kTaggedSize);
      DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),
                          reinterpret_cast<void*>(new_map->address()),
1260 1261 1262 1263
                          Map::kBitField3Offset));
      // The IsInRetainedMapListBit might be different if the {new_map}
      // that we got from the {cache} was already embedded into optimized
      // code somewhere.
1264 1265 1266
      // The IsMigrationTargetBit might be different if the {new_map} from
      // {cache} has already been marked as a migration target.
      constexpr int ignored_bit_field3_bits =
1267 1268
          Bits3::IsInRetainedMapListBit::kMask |
          Bits3::IsMigrationTargetBit::kMask;
1269 1270
      DCHECK_EQ(fresh->bit_field3() & ~ignored_bit_field3_bits,
                new_map->bit_field3() & ~ignored_bit_field3_bits);
1271 1272 1273 1274 1275
      int offset = Map::kBitField3Offset + kInt32Size;
      DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address() + offset),
                          reinterpret_cast<void*>(new_map->address() + offset),
                          Map::kDependentCodeOffset - offset));
      offset = Map::kPrototypeValidityCellOffset + kTaggedSize;
1276 1277
      if (new_map->is_prototype_map()) {
        // For prototype maps, the PrototypeInfo is not copied.
1278 1279 1280
        STATIC_ASSERT(Map::kTransitionsOrPrototypeInfoOffset ==
                      Map::kPrototypeValidityCellOffset + kTaggedSize);
        offset = kTransitionsOrPrototypeInfoOffset + kTaggedSize;
1281
        DCHECK_EQ(fresh->raw_transitions(),
1282
                  MaybeObject::FromObject(Smi::zero()));
1283 1284 1285 1286 1287 1288
      }
      DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address() + offset),
                          reinterpret_cast<void*>(new_map->address() + offset),
                          Map::kSize - offset));
    }
#endif
1289
    if (FLAG_log_maps) {
1290 1291
      LOG(isolate, MapEvent("NormalizeCached", fast_map, new_map, reason));
    }
1292 1293
  } else {
    new_map = Map::CopyNormalized(isolate, fast_map, mode);
1294
    new_map->set_elements_kind(new_elements_kind);
1295 1296 1297 1298
    if (use_cache) {
      cache->Set(fast_map, new_map);
      isolate->counters()->maps_normalized()->Increment();
    }
1299
    if (FLAG_log_maps) {
1300 1301
      LOG(isolate, MapEvent("Normalize", fast_map, new_map, reason));
    }
1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346
  }
  fast_map->NotifyLeafMapLayoutChange(isolate);
  return new_map;
}

Handle<Map> Map::CopyNormalized(Isolate* isolate, Handle<Map> map,
                                PropertyNormalizationMode mode) {
  int new_instance_size = map->instance_size();
  if (mode == CLEAR_INOBJECT_PROPERTIES) {
    new_instance_size -= map->GetInObjectProperties() * kTaggedSize;
  }

  Handle<Map> result = RawCopy(
      isolate, map, new_instance_size,
      mode == CLEAR_INOBJECT_PROPERTIES ? 0 : map->GetInObjectProperties());
  // Clear the unused_property_fields explicitly as this field should not
  // be accessed for normalized maps.
  result->SetInObjectUnusedPropertyFields(0);
  result->set_is_dictionary_map(true);
  result->set_is_migration_target(false);
  result->set_may_have_interesting_symbols(true);
  result->set_construction_counter(kNoSlackTracking);

#ifdef VERIFY_HEAP
  if (FLAG_verify_heap) result->DictionaryMapVerify(isolate);
#endif

  return result;
}

// Return an immutable prototype exotic object version of the input map.
// Never even try to cache it in the transition tree, as it is intended
// for the global object and its prototype chain, and excluding it saves
// memory on the map transition tree.

// static
Handle<Map> Map::TransitionToImmutableProto(Isolate* isolate, Handle<Map> map) {
  Handle<Map> new_map = Map::Copy(isolate, map, "ImmutablePrototype");
  new_map->set_is_immutable_proto(true);
  return new_map;
}

namespace {
void EnsureInitialMap(Isolate* isolate, Handle<Map> map) {
#ifdef DEBUG
1347 1348 1349 1350 1351 1352
  Object maybe_constructor = map->GetConstructor();
  DCHECK((maybe_constructor.IsJSFunction() &&
          *map == JSFunction::cast(maybe_constructor).initial_map()) ||
         // Below are the exceptions to the check above.
         // Strict function maps have Function as a constructor but the
         // Function's initial map is a sloppy function map.
1353 1354
         *map == *isolate->strict_function_map() ||
         *map == *isolate->strict_function_with_name_map() ||
1355
         // Same holds for GeneratorFunction and its initial map.
1356 1357
         *map == *isolate->generator_function_map() ||
         *map == *isolate->generator_function_with_name_map() ||
1358
         // AsyncFunction has Null as a constructor.
1359
         *map == *isolate->async_function_map() ||
1360
         *map == *isolate->async_function_with_name_map());
1361
#endif
1362 1363
  // Initial maps must not contain descriptors in the descriptors array
  // that do not belong to the map.
1364
  DCHECK_EQ(map->NumberOfOwnDescriptors(),
1365
            map->instance_descriptors(isolate).number_of_descriptors());
1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380
}
}  // namespace

// static
Handle<Map> Map::CopyInitialMapNormalized(Isolate* isolate, Handle<Map> map,
                                          PropertyNormalizationMode mode) {
  EnsureInitialMap(isolate, map);
  return CopyNormalized(isolate, map, mode);
}

// static
Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map,
                                int instance_size, int inobject_properties,
                                int unused_property_fields) {
  EnsureInitialMap(isolate, map);
1381

1382 1383 1384 1385 1386 1387 1388 1389
  Handle<Map> result =
      RawCopy(isolate, map, instance_size, inobject_properties);

  // Please note instance_type and instance_size are set when allocated.
  result->SetInObjectUnusedPropertyFields(unused_property_fields);

  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
  if (number_of_own_descriptors > 0) {
1390
    // The copy will use the same descriptors array without ownership.
1391
    DescriptorArray descriptors = map->instance_descriptors(isolate);
1392
    result->set_owns_descriptors(false);
1393
    result->UpdateDescriptors(isolate, descriptors, number_of_own_descriptors);
1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421

    DCHECK_EQ(result->NumberOfFields(),
              result->GetInObjectProperties() - result->UnusedPropertyFields());
  }

  return result;
}

Handle<Map> Map::CopyDropDescriptors(Isolate* isolate, Handle<Map> map) {
  Handle<Map> result =
      RawCopy(isolate, map, map->instance_size(),
              map->IsJSObjectMap() ? map->GetInObjectProperties() : 0);

  // Please note instance_type and instance_size are set when allocated.
  if (map->IsJSObjectMap()) {
    result->CopyUnusedPropertyFields(*map);
  }
  map->NotifyLeafMapLayoutChange(isolate);
  return result;
}

Handle<Map> Map::ShareDescriptor(Isolate* isolate, Handle<Map> map,
                                 Handle<DescriptorArray> descriptors,
                                 Descriptor* descriptor) {
  // Sanity check. This path is only to be taken if the map owns its descriptor
  // array, implying that its NumberOfOwnDescriptors equals the number of
  // descriptors in the descriptor array.
  DCHECK_EQ(map->NumberOfOwnDescriptors(),
1422
            map->instance_descriptors(isolate).number_of_descriptors());
1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439

  Handle<Map> result = CopyDropDescriptors(isolate, map);
  Handle<Name> name = descriptor->GetKey();

  // Properly mark the {result} if the {name} is an "interesting symbol".
  if (name->IsInterestingSymbol()) {
    result->set_may_have_interesting_symbols(true);
  }

  // Ensure there's space for the new descriptor in the shared descriptor array.
  if (descriptors->number_of_slack_descriptors() == 0) {
    int old_size = descriptors->number_of_descriptors();
    if (old_size == 0) {
      descriptors = DescriptorArray::Allocate(isolate, 0, 1);
    } else {
      int slack = SlackForArraySize(old_size, kMaxNumberOfDescriptors);
      EnsureDescriptorSlack(isolate, map, slack);
1440
      descriptors = handle(map->instance_descriptors(isolate), isolate);
1441 1442 1443 1444
    }
  }

  {
1445
    DisallowGarbageCollection no_gc;
1446
    descriptors->Append(descriptor);
1447
    result->InitializeDescriptors(isolate, *descriptors);
1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462
  }

  DCHECK(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1);
  ConnectTransition(isolate, map, result, name, SIMPLE_PROPERTY_TRANSITION);

  return result;
}

void Map::ConnectTransition(Isolate* isolate, Handle<Map> parent,
                            Handle<Map> child, Handle<Name> name,
                            SimpleTransitionFlag flag) {
  DCHECK_IMPLIES(name->IsInterestingSymbol(),
                 child->may_have_interesting_symbols());
  DCHECK_IMPLIES(parent->may_have_interesting_symbols(),
                 child->may_have_interesting_symbols());
1463
  if (!parent->GetBackPointer().IsUndefined(isolate)) {
1464
    parent->set_owns_descriptors(false);
1465
  } else if (!parent->IsDetached(isolate)) {
1466 1467
    // |parent| is initial map and it must not contain descriptors in the
    // descriptors array that do not belong to the map.
1468 1469
    DCHECK_EQ(parent->NumberOfOwnDescriptors(),
              parent->instance_descriptors(isolate).number_of_descriptors());
1470
  }
1471 1472
  if (parent->IsDetached(isolate)) {
    DCHECK(child->IsDetached(isolate));
1473
    if (FLAG_log_maps) {
1474
      LOG(isolate, MapEvent("Transition", parent, child, "prototype", name));
1475 1476 1477
    }
  } else {
    TransitionsAccessor(isolate, parent).Insert(name, child, flag);
1478
    if (FLAG_log_maps) {
1479
      LOG(isolate, MapEvent("Transition", parent, child, "", name));
1480 1481 1482 1483
    }
  }
}

1484 1485 1486 1487 1488 1489
Handle<Map> Map::CopyReplaceDescriptors(Isolate* isolate, Handle<Map> map,
                                        Handle<DescriptorArray> descriptors,
                                        TransitionFlag flag,
                                        MaybeHandle<Name> maybe_name,
                                        const char* reason,
                                        SimpleTransitionFlag simple_flag) {
1490 1491 1492
  DCHECK(descriptors->IsSortedNoDuplicates());

  Handle<Map> result = CopyDropDescriptors(isolate, map);
1493
  bool is_connected = false;
1494 1495 1496 1497 1498 1499 1500

  // Properly mark the {result} if the {name} is an "interesting symbol".
  Handle<Name> name;
  if (maybe_name.ToHandle(&name) && name->IsInterestingSymbol()) {
    result->set_may_have_interesting_symbols(true);
  }

1501
  if (map->is_prototype_map()) {
1502
    result->InitializeDescriptors(isolate, *descriptors);
1503
  } else {
1504 1505
    if (flag == INSERT_TRANSITION &&
        TransitionsAccessor(isolate, map).CanHaveMoreTransitions()) {
1506
      result->InitializeDescriptors(isolate, *descriptors);
1507 1508 1509

      DCHECK(!maybe_name.is_null());
      ConnectTransition(isolate, map, result, name, simple_flag);
1510
      is_connected = true;
1511 1512
    } else {
      descriptors->GeneralizeAllFields();
1513
      result->InitializeDescriptors(isolate, *descriptors);
1514 1515
    }
  }
1516
  if (FLAG_log_maps && !is_connected) {
1517 1518
    LOG(isolate, MapEvent("ReplaceDescriptors", map, result, reason,
                          maybe_name.is_null() ? Handle<HeapObject>() : name));
1519 1520 1521 1522 1523 1524 1525 1526
  }
  return result;
}

// Creates transition tree starting from |split_map| and adding all descriptors
// starting from descriptor with index |split_map|.NumberOfOwnDescriptors().
// The way how it is done is tricky because of GC and special descriptors
// marking logic.
1527 1528
Handle<Map> Map::AddMissingTransitions(Isolate* isolate, Handle<Map> split_map,
                                       Handle<DescriptorArray> descriptors) {
1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543
  DCHECK(descriptors->IsSortedNoDuplicates());
  int split_nof = split_map->NumberOfOwnDescriptors();
  int nof_descriptors = descriptors->number_of_descriptors();
  DCHECK_LT(split_nof, nof_descriptors);

  // Start with creating last map which will own full descriptors array.
  // This is necessary to guarantee that GC will mark the whole descriptor
  // array if any of the allocations happening below fail.
  // Number of unused properties is temporarily incorrect and the layout
  // descriptor could unnecessarily be in slow mode but we will fix after
  // all the other intermediate maps are created.
  // Also the last map might have interesting symbols, we temporarily set
  // the flag and clear it right before the descriptors are installed. This
  // makes heap verification happy and ensures the flag ends up accurate.
  Handle<Map> last_map = CopyDropDescriptors(isolate, split_map);
1544
  last_map->InitializeDescriptors(isolate, *descriptors);
1545 1546 1547 1548 1549 1550 1551 1552 1553
  last_map->SetInObjectUnusedPropertyFields(0);
  last_map->set_may_have_interesting_symbols(true);

  // During creation of intermediate maps we violate descriptors sharing
  // invariant since the last map is not yet connected to the transition tree
  // we create here. But it is safe because GC never trims map's descriptors
  // if there are no dead transitions from that map and this is exactly the
  // case for all the intermediate maps we create here.
  Handle<Map> map = split_map;
1554
  for (InternalIndex i : InternalIndex::Range(split_nof, nof_descriptors - 1)) {
1555
    Handle<Map> new_map = CopyDropDescriptors(isolate, map);
1556
    InstallDescriptors(isolate, map, new_map, i, descriptors);
1557 1558 1559 1560 1561

    map = new_map;
  }
  map->NotifyLeafMapLayoutChange(isolate);
  last_map->set_may_have_interesting_symbols(false);
1562
  InstallDescriptors(isolate, map, last_map, InternalIndex(nof_descriptors - 1),
1563
                     descriptors);
1564 1565 1566 1567 1568 1569
  return last_map;
}

// Since this method is used to rewrite an existing transition tree, it can
// always insert transitions without checking.
void Map::InstallDescriptors(Isolate* isolate, Handle<Map> parent,
1570
                             Handle<Map> child, InternalIndex new_descriptor,
1571
                             Handle<DescriptorArray> descriptors) {
1572 1573
  DCHECK(descriptors->IsSortedNoDuplicates());

1574 1575
  child->SetInstanceDescriptors(isolate, *descriptors,
                                new_descriptor.as_int() + 1);
1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600
  child->CopyUnusedPropertyFields(*parent);
  PropertyDetails details = descriptors->GetDetails(new_descriptor);
  if (details.location() == kField) {
    child->AccountAddedPropertyField();
  }

  Handle<Name> name = handle(descriptors->GetKey(new_descriptor), isolate);
  if (parent->may_have_interesting_symbols() || name->IsInterestingSymbol()) {
    child->set_may_have_interesting_symbols(true);
  }
  ConnectTransition(isolate, parent, child, name, SIMPLE_PROPERTY_TRANSITION);
}

Handle<Map> Map::CopyAsElementsKind(Isolate* isolate, Handle<Map> map,
                                    ElementsKind kind, TransitionFlag flag) {
  // Only certain objects are allowed to have non-terminal fast transitional
  // elements kinds.
  DCHECK(map->IsJSObjectMap());
  DCHECK_IMPLIES(
      !map->CanHaveFastTransitionableElementsKind(),
      IsDictionaryElementsKind(kind) || IsTerminalElementsKind(kind));

  Map maybe_elements_transition_map;
  if (flag == INSERT_TRANSITION) {
    // Ensure we are requested to add elements kind transition "near the root".
1601
    DCHECK_EQ(map->FindRootMap(isolate).NumberOfOwnDescriptors(),
1602 1603
              map->NumberOfOwnDescriptors());

1604
    maybe_elements_transition_map = map->ElementsTransitionMap(isolate);
1605 1606 1607 1608
    DCHECK(
        maybe_elements_transition_map.is_null() ||
        (maybe_elements_transition_map.elements_kind() == DICTIONARY_ELEMENTS &&
         kind == DICTIONARY_ELEMENTS));
1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635
    DCHECK(!IsFastElementsKind(kind) ||
           IsMoreGeneralElementsKindTransition(map->elements_kind(), kind));
    DCHECK(kind != map->elements_kind());
  }

  bool insert_transition =
      flag == INSERT_TRANSITION &&
      TransitionsAccessor(isolate, map).CanHaveMoreTransitions() &&
      maybe_elements_transition_map.is_null();

  if (insert_transition) {
    Handle<Map> new_map = CopyForElementsTransition(isolate, map);
    new_map->set_elements_kind(kind);

    Handle<Name> name = isolate->factory()->elements_transition_symbol();
    ConnectTransition(isolate, map, new_map, name, SPECIAL_TRANSITION);
    return new_map;
  }

  // Create a new free-floating map only if we are not allowed to store it.
  Handle<Map> new_map = Copy(isolate, map, "CopyAsElementsKind");
  new_map->set_elements_kind(kind);
  return new_map;
}

Handle<Map> Map::AsLanguageMode(Isolate* isolate, Handle<Map> initial_map,
                                Handle<SharedFunctionInfo> shared_info) {
1636
  DCHECK(InstanceTypeChecker::IsJSFunction(initial_map->instance_type()));
1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674
  // Initial map for sloppy mode function is stored in the function
  // constructor. Initial maps for strict mode are cached as special transitions
  // using |strict_function_transition_symbol| as a key.
  if (is_sloppy(shared_info->language_mode())) return initial_map;

  Handle<Map> function_map(Map::cast(isolate->native_context()->get(
                               shared_info->function_map_index())),
                           isolate);

  STATIC_ASSERT(LanguageModeSize == 2);
  DCHECK_EQ(LanguageMode::kStrict, shared_info->language_mode());
  Handle<Symbol> transition_symbol =
      isolate->factory()->strict_function_transition_symbol();
  Map maybe_transition = TransitionsAccessor(isolate, initial_map)
                             .SearchSpecial(*transition_symbol);
  if (!maybe_transition.is_null()) {
    return handle(maybe_transition, isolate);
  }
  initial_map->NotifyLeafMapLayoutChange(isolate);

  // Create new map taking descriptors from the |function_map| and all
  // the other details from the |initial_map|.
  Handle<Map> map =
      Map::CopyInitialMap(isolate, function_map, initial_map->instance_size(),
                          initial_map->GetInObjectProperties(),
                          initial_map->UnusedPropertyFields());
  map->SetConstructor(initial_map->GetConstructor());
  map->set_prototype(initial_map->prototype());
  map->set_construction_counter(initial_map->construction_counter());

  if (TransitionsAccessor(isolate, initial_map).CanHaveMoreTransitions()) {
    Map::ConnectTransition(isolate, initial_map, map, transition_symbol,
                           SPECIAL_TRANSITION);
  }
  return map;
}

Handle<Map> Map::CopyForElementsTransition(Isolate* isolate, Handle<Map> map) {
1675
  DCHECK(!map->IsDetached(isolate));
1676 1677 1678 1679 1680 1681
  Handle<Map> new_map = CopyDropDescriptors(isolate, map);

  if (map->owns_descriptors()) {
    // In case the map owned its own descriptors, share the descriptors and
    // transfer ownership to the new map.
    // The properties did not change, so reuse descriptors.
1682
    map->set_owns_descriptors(false);
1683
    new_map->InitializeDescriptors(isolate, map->instance_descriptors(isolate));
1684 1685 1686
  } else {
    // In case the map did not own its own descriptors, a split is forced by
    // copying the map; creating a new descriptor array cell.
1687
    Handle<DescriptorArray> descriptors(map->instance_descriptors(isolate),
1688
                                        isolate);
1689 1690 1691
    int number_of_own_descriptors = map->NumberOfOwnDescriptors();
    Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
        isolate, descriptors, number_of_own_descriptors);
1692
    new_map->InitializeDescriptors(isolate, *new_descriptors);
1693 1694 1695 1696 1697
  }
  return new_map;
}

Handle<Map> Map::Copy(Isolate* isolate, Handle<Map> map, const char* reason) {
1698
  Handle<DescriptorArray> descriptors(map->instance_descriptors(isolate),
1699
                                      isolate);
1700 1701 1702
  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
  Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
      isolate, descriptors, number_of_own_descriptors);
1703 1704 1705
  return CopyReplaceDescriptors(isolate, map, new_descriptors, OMIT_TRANSITION,
                                MaybeHandle<Name>(), reason,
                                SPECIAL_TRANSITION);
1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731
}

Handle<Map> Map::Create(Isolate* isolate, int inobject_properties) {
  Handle<Map> copy =
      Copy(isolate, handle(isolate->object_function()->initial_map(), isolate),
           "MapCreate");

  // Check that we do not overflow the instance size when adding the extra
  // inobject properties. If the instance size overflows, we allocate as many
  // properties as we can as inobject properties.
  if (inobject_properties > JSObject::kMaxInObjectProperties) {
    inobject_properties = JSObject::kMaxInObjectProperties;
  }

  int new_instance_size =
      JSObject::kHeaderSize + kTaggedSize * inobject_properties;

  // Adjust the map with the extra inobject properties.
  copy->set_instance_size(new_instance_size);
  copy->SetInObjectPropertiesStartInWords(JSObject::kHeaderSize / kTaggedSize);
  DCHECK_EQ(copy->GetInObjectProperties(), inobject_properties);
  copy->SetInObjectUnusedPropertyFields(inobject_properties);
  copy->set_visitor_id(Map::GetVisitorId(*copy));
  return copy;
}

1732 1733 1734
Handle<Map> Map::CopyForPreventExtensions(
    Isolate* isolate, Handle<Map> map, PropertyAttributes attrs_to_add,
    Handle<Symbol> transition_marker, const char* reason,
1735
    bool old_map_is_dictionary_elements_kind) {
1736 1737
  int num_descriptors = map->NumberOfOwnDescriptors();
  Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes(
1738
      isolate, handle(map->instance_descriptors(isolate), isolate),
1739
      num_descriptors, attrs_to_add);
1740 1741 1742
  // Do not track transitions during bootstrapping.
  TransitionFlag flag =
      isolate->bootstrapper()->IsActive() ? OMIT_TRANSITION : INSERT_TRANSITION;
1743 1744 1745
  Handle<Map> new_map =
      CopyReplaceDescriptors(isolate, map, new_desc, flag, transition_marker,
                             reason, SPECIAL_TRANSITION);
1746
  new_map->set_is_extensible(false);
1747
  if (!IsTypedArrayElementsKind(map->elements_kind())) {
1748 1749 1750
    ElementsKind new_kind = IsStringWrapperElementsKind(map->elements_kind())
                                ? SLOW_STRING_WRAPPER_ELEMENTS
                                : DICTIONARY_ELEMENTS;
1751 1752
    if (FLAG_enable_sealed_frozen_elements_kind &&
        !old_map_is_dictionary_elements_kind) {
1753 1754
      switch (map->elements_kind()) {
        case PACKED_ELEMENTS:
1755 1756 1757 1758 1759 1760 1761 1762 1763
          if (attrs_to_add == SEALED) {
            new_kind = PACKED_SEALED_ELEMENTS;
          } else if (attrs_to_add == FROZEN) {
            new_kind = PACKED_FROZEN_ELEMENTS;
          } else {
            new_kind = PACKED_NONEXTENSIBLE_ELEMENTS;
          }
          break;
        case PACKED_NONEXTENSIBLE_ELEMENTS:
1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774
          if (attrs_to_add == SEALED) {
            new_kind = PACKED_SEALED_ELEMENTS;
          } else if (attrs_to_add == FROZEN) {
            new_kind = PACKED_FROZEN_ELEMENTS;
          }
          break;
        case PACKED_SEALED_ELEMENTS:
          if (attrs_to_add == FROZEN) {
            new_kind = PACKED_FROZEN_ELEMENTS;
          }
          break;
1775
        case HOLEY_ELEMENTS:
1776 1777 1778 1779 1780 1781 1782 1783 1784
          if (attrs_to_add == SEALED) {
            new_kind = HOLEY_SEALED_ELEMENTS;
          } else if (attrs_to_add == FROZEN) {
            new_kind = HOLEY_FROZEN_ELEMENTS;
          } else {
            new_kind = HOLEY_NONEXTENSIBLE_ELEMENTS;
          }
          break;
        case HOLEY_NONEXTENSIBLE_ELEMENTS:
1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795
          if (attrs_to_add == SEALED) {
            new_kind = HOLEY_SEALED_ELEMENTS;
          } else if (attrs_to_add == FROZEN) {
            new_kind = HOLEY_FROZEN_ELEMENTS;
          }
          break;
        case HOLEY_SEALED_ELEMENTS:
          if (attrs_to_add == FROZEN) {
            new_kind = HOLEY_FROZEN_ELEMENTS;
          }
          break;
1796 1797 1798
        default:
          break;
      }
1799
    }
1800 1801 1802 1803 1804 1805 1806
    new_map->set_elements_kind(new_kind);
  }
  return new_map;
}

namespace {

1807
bool CanHoldValue(DescriptorArray descriptors, InternalIndex descriptor,
1808
                  PropertyConstness constness, Object value) {
1809
  PropertyDetails details = descriptors.GetDetails(descriptor);
1810 1811 1812
  if (details.location() == kField) {
    if (details.kind() == kData) {
      return IsGeneralizableTo(constness, details.constness()) &&
1813 1814
             value.FitsRepresentation(details.representation()) &&
             descriptors.GetFieldType(descriptor).NowContains(value);
1815 1816 1817 1818 1819 1820 1821 1822
    } else {
      DCHECK_EQ(kAccessor, details.kind());
      return false;
    }

  } else {
    DCHECK_EQ(kDescriptor, details.location());
    DCHECK_EQ(PropertyConstness::kConst, details.constness());
1823 1824
    DCHECK_EQ(kAccessor, details.kind());
    return false;
1825 1826 1827 1828 1829
  }
  UNREACHABLE();
}

Handle<Map> UpdateDescriptorForValue(Isolate* isolate, Handle<Map> map,
1830
                                     InternalIndex descriptor,
1831 1832
                                     PropertyConstness constness,
                                     Handle<Object> value) {
1833 1834
  if (CanHoldValue(map->instance_descriptors(isolate), descriptor, constness,
                   *value)) {
1835 1836 1837
    return map;
  }

1838 1839
  PropertyAttributes attributes =
      map->instance_descriptors(isolate).GetDetails(descriptor).attributes();
1840
  Representation representation = value->OptimalRepresentation(isolate);
1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851
  Handle<FieldType> type = value->OptimalType(isolate, representation);

  MapUpdater mu(isolate, map);
  return mu.ReconfigureToDataField(descriptor, attributes, constness,
                                   representation, type);
}

}  // namespace

// static
Handle<Map> Map::PrepareForDataProperty(Isolate* isolate, Handle<Map> map,
1852
                                        InternalIndex descriptor,
1853 1854
                                        PropertyConstness constness,
                                        Handle<Object> value) {
1855 1856
  // Update to the newest map before storing the property.
  map = Update(isolate, map);
1857 1858
  // Dictionaries can store any property value.
  DCHECK(!map->is_dictionary_map());
1859
  return UpdateDescriptorForValue(isolate, map, descriptor, constness, value);
1860 1861 1862 1863 1864 1865 1866 1867
}

Handle<Map> Map::TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
                                          Handle<Name> name,
                                          Handle<Object> value,
                                          PropertyAttributes attributes,
                                          PropertyConstness constness,
                                          StoreOrigin store_origin) {
1868 1869 1870 1871
  RCS_SCOPE(isolate,
            map->IsDetached(isolate)
                ? RuntimeCallCounterId::kPrototypeMap_TransitionToDataProperty
                : RuntimeCallCounterId::kMap_TransitionToDataProperty);
1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882

  DCHECK(name->IsUniqueName());
  DCHECK(!map->is_dictionary_map());

  // Migrate to the newest map before storing the property.
  map = Update(isolate, map);

  Map maybe_transition = TransitionsAccessor(isolate, map)
                             .SearchTransition(*name, kData, attributes);
  if (!maybe_transition.is_null()) {
    Handle<Map> transition(maybe_transition, isolate);
1883
    InternalIndex descriptor = transition->LastAdded();
1884

1885
    DCHECK_EQ(attributes, transition->instance_descriptors(isolate)
1886 1887
                              .GetDetails(descriptor)
                              .attributes());
1888 1889 1890 1891 1892

    return UpdateDescriptorForValue(isolate, transition, descriptor, constness,
                                    value);
  }

1893 1894 1895
  // Do not track transitions during bootstrapping.
  TransitionFlag flag =
      isolate->bootstrapper()->IsActive() ? OMIT_TRANSITION : INSERT_TRANSITION;
1896 1897
  MaybeHandle<Map> maybe_map;
  if (!map->TooManyFastProperties(store_origin)) {
1898
    Representation representation = value->OptimalRepresentation(isolate);
1899 1900 1901
    Handle<FieldType> type = value->OptimalType(isolate, representation);
    maybe_map = Map::CopyWithField(isolate, map, name, type, attributes,
                                   constness, representation, flag);
1902 1903 1904 1905 1906 1907 1908
  }

  Handle<Map> result;
  if (!maybe_map.ToHandle(&result)) {
    const char* reason = "TooManyFastProperties";
#if V8_TRACE_MAPS
    std::unique_ptr<ScopedVector<char>> buffer;
1909
    if (FLAG_log_maps) {
1910 1911 1912
      ScopedVector<char> name_buffer(100);
      name->NameShortPrint(name_buffer);
      buffer.reset(new ScopedVector<char>(128));
1913 1914
      SNPrintF(*buffer, "TooManyFastProperties %s", name_buffer.begin());
      reason = buffer->begin();
1915 1916 1917 1918 1919
    }
#endif
    Handle<Object> maybe_constructor(map->GetConstructor(), isolate);
    if (FLAG_feedback_normalization && map->new_target_is_base() &&
        maybe_constructor->IsJSFunction() &&
1920
        !JSFunction::cast(*maybe_constructor).shared().native()) {
1921 1922 1923
      Handle<JSFunction> constructor =
          Handle<JSFunction>::cast(maybe_constructor);
      DCHECK_NE(*constructor,
1924
                constructor->context().native_context().object_function());
1925 1926 1927 1928
      Handle<Map> initial_map(constructor->initial_map(), isolate);
      result = Map::Normalize(isolate, initial_map, CLEAR_INOBJECT_PROPERTIES,
                              reason);
      initial_map->DeprecateTransitionTree(isolate);
1929
      Handle<HeapObject> prototype(result->prototype(), isolate);
1930
      JSFunction::SetInitialMap(isolate, constructor, result, prototype);
1931 1932

      // Deoptimize all code that embeds the previous initial map.
1933
      initial_map->dependent_code().DeoptimizeDependentCodeGroup(
1934
          DependentCode::kInitialMapChangedGroup);
1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948
      if (!result->EquivalentToForNormalization(*map,
                                                CLEAR_INOBJECT_PROPERTIES)) {
        result =
            Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES, reason);
      }
    } else {
      result = Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES, reason);
    }
  }

  return result;
}

Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
1949 1950
                                              Handle<Name> name,
                                              InternalIndex descriptor,
1951 1952 1953
                                              Handle<Object> getter,
                                              Handle<Object> setter,
                                              PropertyAttributes attributes) {
1954
  RCS_SCOPE(
1955
      isolate,
1956
      map->IsDetached(isolate)
1957 1958 1959 1960 1961 1962 1963 1964 1965 1966
          ? RuntimeCallCounterId::kPrototypeMap_TransitionToAccessorProperty
          : RuntimeCallCounterId::kMap_TransitionToAccessorProperty);

  // At least one of the accessors needs to be a new value.
  DCHECK(!getter->IsNull(isolate) || !setter->IsNull(isolate));
  DCHECK(name->IsUniqueName());

  // Migrate to the newest map before transitioning to the new property.
  map = Update(isolate, map);

1967 1968 1969
  // Dictionary maps can always have additional data properties.
  if (map->is_dictionary_map()) return map;

1970 1971 1972 1973 1974 1975 1976 1977
  PropertyNormalizationMode mode = map->is_prototype_map()
                                       ? KEEP_INOBJECT_PROPERTIES
                                       : CLEAR_INOBJECT_PROPERTIES;

  Map maybe_transition = TransitionsAccessor(isolate, map)
                             .SearchTransition(*name, kAccessor, attributes);
  if (!maybe_transition.is_null()) {
    Handle<Map> transition(maybe_transition, isolate);
1978
    DescriptorArray descriptors = transition->instance_descriptors(isolate);
1979
    InternalIndex descriptor = transition->LastAdded();
1980
    DCHECK(descriptors.GetKey(descriptor).Equals(*name));
1981

1982 1983
    DCHECK_EQ(kAccessor, descriptors.GetDetails(descriptor).kind());
    DCHECK_EQ(attributes, descriptors.GetDetails(descriptor).attributes());
1984

1985
    Handle<Object> maybe_pair(descriptors.GetStrongValue(descriptor), isolate);
1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000
    if (!maybe_pair->IsAccessorPair()) {
      return Map::Normalize(isolate, map, mode,
                            "TransitionToAccessorFromNonPair");
    }

    Handle<AccessorPair> pair = Handle<AccessorPair>::cast(maybe_pair);
    if (!pair->Equals(*getter, *setter)) {
      return Map::Normalize(isolate, map, mode,
                            "TransitionToDifferentAccessor");
    }

    return transition;
  }

  Handle<AccessorPair> pair;
2001
  DescriptorArray old_descriptors = map->instance_descriptors(isolate);
2002
  if (descriptor.is_found()) {
2003 2004 2005
    if (descriptor != map->LastAdded()) {
      return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonLast");
    }
2006
    PropertyDetails old_details = old_descriptors.GetDetails(descriptor);
2007 2008 2009 2010 2011 2012 2013 2014 2015
    if (old_details.kind() != kAccessor) {
      return Map::Normalize(isolate, map, mode,
                            "AccessorsOverwritingNonAccessors");
    }

    if (old_details.attributes() != attributes) {
      return Map::Normalize(isolate, map, mode, "AccessorsWithAttributes");
    }

2016
    Handle<Object> maybe_pair(old_descriptors.GetStrongValue(descriptor),
2017 2018 2019 2020 2021 2022 2023 2024 2025 2026
                              isolate);
    if (!maybe_pair->IsAccessorPair()) {
      return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonPair");
    }

    Handle<AccessorPair> current_pair = Handle<AccessorPair>::cast(maybe_pair);
    if (current_pair->Equals(*getter, *setter)) return map;

    bool overwriting_accessor = false;
    if (!getter->IsNull(isolate) &&
2027
        !current_pair->get(ACCESSOR_GETTER).IsNull(isolate) &&
2028 2029 2030 2031
        current_pair->get(ACCESSOR_GETTER) != *getter) {
      overwriting_accessor = true;
    }
    if (!setter->IsNull(isolate) &&
2032
        !current_pair->get(ACCESSOR_SETTER).IsNull(isolate) &&
2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050 2051
        current_pair->get(ACCESSOR_SETTER) != *setter) {
      overwriting_accessor = true;
    }
    if (overwriting_accessor) {
      return Map::Normalize(isolate, map, mode,
                            "AccessorsOverwritingAccessors");
    }

    pair = AccessorPair::Copy(isolate, Handle<AccessorPair>::cast(maybe_pair));
  } else if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors ||
             map->TooManyFastProperties(StoreOrigin::kNamed)) {
    return Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES,
                          "TooManyAccessors");
  } else {
    pair = isolate->factory()->NewAccessorPair();
  }

  pair->SetComponents(*getter, *setter);

2052 2053 2054
  // Do not track transitions during bootstrapping.
  TransitionFlag flag =
      isolate->bootstrapper()->IsActive() ? OMIT_TRANSITION : INSERT_TRANSITION;
2055 2056 2057 2058 2059 2060 2061
  Descriptor d = Descriptor::AccessorConstant(name, pair, attributes);
  return Map::CopyInsertDescriptor(isolate, map, &d, flag);
}

Handle<Map> Map::CopyAddDescriptor(Isolate* isolate, Handle<Map> map,
                                   Descriptor* descriptor,
                                   TransitionFlag flag) {
2062
  Handle<DescriptorArray> descriptors(map->instance_descriptors(isolate),
2063
                                      isolate);
2064 2065 2066

  // Share descriptors only if map owns descriptors and it not an initial map.
  if (flag == INSERT_TRANSITION && map->owns_descriptors() &&
2067
      !map->GetBackPointer().IsUndefined(isolate) &&
2068 2069 2070 2071
      TransitionsAccessor(isolate, map).CanHaveMoreTransitions()) {
    return ShareDescriptor(isolate, map, descriptors, descriptor);
  }

2072 2073 2074 2075
  int nof = map->NumberOfOwnDescriptors();
  Handle<DescriptorArray> new_descriptors =
      DescriptorArray::CopyUpTo(isolate, descriptors, nof, 1);
  new_descriptors->Append(descriptor);
2076

2077 2078 2079
  return CopyReplaceDescriptors(isolate, map, new_descriptors, flag,
                                descriptor->GetKey(), "CopyAddDescriptor",
                                SIMPLE_PROPERTY_TRANSITION);
2080 2081 2082 2083 2084
}

Handle<Map> Map::CopyInsertDescriptor(Isolate* isolate, Handle<Map> map,
                                      Descriptor* descriptor,
                                      TransitionFlag flag) {
2085 2086
  Handle<DescriptorArray> old_descriptors(map->instance_descriptors(isolate),
                                          isolate);
2087 2088

  // We replace the key if it is already present.
2089
  InternalIndex index =
2090
      old_descriptors->SearchWithCache(isolate, *descriptor->GetKey(), *map);
2091
  if (index.is_found()) {
2092 2093 2094 2095 2096 2097 2098 2099 2100
    return CopyReplaceDescriptor(isolate, map, old_descriptors, descriptor,
                                 index, flag);
  }
  return CopyAddDescriptor(isolate, map, descriptor, flag);
}

Handle<Map> Map::CopyReplaceDescriptor(Isolate* isolate, Handle<Map> map,
                                       Handle<DescriptorArray> descriptors,
                                       Descriptor* descriptor,
2101
                                       InternalIndex insertion_index,
2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115
                                       TransitionFlag flag) {
  Handle<Name> key = descriptor->GetKey();
  DCHECK_EQ(*key, descriptors->GetKey(insertion_index));
  // This function does not support replacing property fields as
  // that would break property field counters.
  DCHECK_NE(kField, descriptor->GetDetails().location());
  DCHECK_NE(kField, descriptors->GetDetails(insertion_index).location());

  Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
      isolate, descriptors, map->NumberOfOwnDescriptors());

  new_descriptors->Replace(insertion_index, descriptor);

  SimpleTransitionFlag simple_flag =
2116
      (insertion_index.as_int() == descriptors->number_of_descriptors() - 1)
2117 2118
          ? SIMPLE_PROPERTY_TRANSITION
          : PROPERTY_TRANSITION;
2119
  return CopyReplaceDescriptors(isolate, map, new_descriptors, flag, key,
2120 2121 2122 2123
                                "CopyReplaceDescriptor", simple_flag);
}

int Map::Hash() {
2124 2125 2126 2127 2128 2129 2130
  // For performance reasons we only hash the 2 most variable fields of a map:
  // prototype map and bit_field2. For predictability reasons  we use objects'
  // offsets in respective pages for hashing instead of raw addresses. We use
  // the map of  the prototype because the prototype itself could be compacted,
  // whereas the map will not be moved.
  // NOTE: If we want to compact maps, this hash function won't work as intended
  // anymore.
2131 2132

  // Shift away the tag.
2133 2134
  int hash = ObjectAddressForHashing(prototype().map().ptr()) >> 2;
  return hash ^ bit_field2();
2135 2136 2137 2138 2139
}

namespace {

bool CheckEquivalent(const Map first, const Map second) {
2140 2141 2142 2143 2144
  return first.GetConstructor() == second.GetConstructor() &&
         first.prototype() == second.prototype() &&
         first.instance_type() == second.instance_type() &&
         first.bit_field() == second.bit_field() &&
         first.is_extensible() == second.is_extensible() &&
2145
         first.new_target_is_base() == second.new_target_is_base();
2146 2147 2148 2149 2150
}

}  // namespace

bool Map::EquivalentToForTransition(const Map other) const {
2151 2152
  CHECK_EQ(GetConstructor(), other.GetConstructor());
  CHECK_EQ(instance_type(), other.instance_type());
2153

2154 2155 2156
  if (bit_field() != other.bit_field()) return false;
  if (new_target_is_base() != other.new_target_is_base()) return false;
  if (prototype() != other.prototype()) return false;
2157
  if (InstanceTypeChecker::IsJSFunction(instance_type())) {
2158 2159
    // JSFunctions require more checks to ensure that sloppy function is
    // not equivalent to strict function.
2160 2161
    int nof =
        std::min(NumberOfOwnDescriptors(), other.NumberOfOwnDescriptors());
2162 2163
    return instance_descriptors().IsEqualUpTo(other.instance_descriptors(),
                                              nof);
2164 2165 2166 2167 2168 2169 2170 2171 2172 2173
  }
  return true;
}

bool Map::EquivalentToForElementsKindTransition(const Map other) const {
  if (!EquivalentToForTransition(other)) return false;
#ifdef DEBUG
  // Ensure that we don't try to generate elements kind transitions from maps
  // with fields that may be generalized in-place. This must already be handled
  // during addition of a new field.
2174
  DescriptorArray descriptors = instance_descriptors();
2175
  for (InternalIndex i : IterateOwnDescriptors()) {
2176
    PropertyDetails details = descriptors.GetDetails(i);
2177
    if (details.location() == kField) {
2178
      DCHECK(IsMostGeneralFieldType(details.representation(),
2179
                                    descriptors.GetFieldType(i)));
2180 2181 2182 2183 2184 2185 2186
    }
  }
#endif
  return true;
}

bool Map::EquivalentToForNormalization(const Map other,
2187
                                       ElementsKind elements_kind,
2188 2189
                                       PropertyNormalizationMode mode) const {
  int properties =
2190
      mode == CLEAR_INOBJECT_PROPERTIES ? 0 : other.GetInObjectProperties();
2191
  // Make sure the elements_kind bits are in bit_field2.
2192 2193
  DCHECK_EQ(this->elements_kind(),
            Map::Bits2::ElementsKindBits::decode(bit_field2()));
2194
  int adjusted_other_bit_field2 =
2195
      Map::Bits2::ElementsKindBits::update(other.bit_field2(), elements_kind);
2196
  return CheckEquivalent(*this, other) &&
2197
         bit_field2() == adjusted_other_bit_field2 &&
2198 2199 2200 2201 2202 2203
         GetInObjectProperties() == properties &&
         JSObject::GetEmbedderFieldCount(*this) ==
             JSObject::GetEmbedderFieldCount(other);
}

static void GetMinInobjectSlack(Map map, void* data) {
2204
  int slack = map.UnusedPropertyFields();
2205 2206 2207 2208 2209 2210
  if (*reinterpret_cast<int*>(data) > slack) {
    *reinterpret_cast<int*>(data) = slack;
  }
}

int Map::ComputeMinObjectSlack(Isolate* isolate) {
2211
  DisallowGarbageCollection no_gc;
2212
  // Has to be an initial map.
2213
  DCHECK(GetBackPointer().IsUndefined(isolate));
2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225

  int slack = UnusedPropertyFields();
  TransitionsAccessor transitions(isolate, *this, &no_gc);
  transitions.TraverseTransitionTree(&GetMinInobjectSlack, &slack);
  return slack;
}

static void ShrinkInstanceSize(Map map, void* data) {
  int slack = *reinterpret_cast<int*>(data);
  DCHECK_GE(slack, 0);
#ifdef DEBUG
  int old_visitor_id = Map::GetVisitorId(map);
2226
  int new_unused = map.UnusedPropertyFields() - slack;
2227
#endif
2228 2229
  map.set_instance_size(map.InstanceSizeFromSlack(slack));
  map.set_construction_counter(Map::kNoSlackTracking);
2230
  DCHECK_EQ(old_visitor_id, Map::GetVisitorId(map));
2231
  DCHECK_EQ(new_unused, map.UnusedPropertyFields());
2232 2233 2234
}

static void StopSlackTracking(Map map, void* data) {
2235
  map.set_construction_counter(Map::kNoSlackTracking);
2236 2237 2238
}

void Map::CompleteInobjectSlackTracking(Isolate* isolate) {
2239
  DisallowGarbageCollection no_gc;
2240
  // Has to be an initial map.
2241
  DCHECK(GetBackPointer().IsUndefined(isolate));
2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254

  int slack = ComputeMinObjectSlack(isolate);
  TransitionsAccessor transitions(isolate, *this, &no_gc);
  if (slack != 0) {
    // Resize the initial map and all maps in its transition tree.
    transitions.TraverseTransitionTree(&ShrinkInstanceSize, &slack);
  } else {
    transitions.TraverseTransitionTree(&StopSlackTracking, nullptr);
  }
}

void Map::SetInstanceDescriptors(Isolate* isolate, DescriptorArray descriptors,
                                 int number_of_own_descriptors) {
2255
  set_instance_descriptors(descriptors, kReleaseStore);
2256
  SetNumberOfOwnDescriptors(number_of_own_descriptors);
2257
#ifndef V8_DISABLE_WRITE_BARRIERS
2258
  WriteBarrier::Marking(descriptors, number_of_own_descriptors);
2259
#endif
2260 2261 2262 2263 2264
}

// static
Handle<PrototypeInfo> Map::GetOrCreatePrototypeInfo(Handle<JSObject> prototype,
                                                    Isolate* isolate) {
2265 2266
  Object maybe_proto_info = prototype->map().prototype_info();
  if (maybe_proto_info.IsPrototypeInfo()) {
2267 2268 2269
    return handle(PrototypeInfo::cast(maybe_proto_info), isolate);
  }
  Handle<PrototypeInfo> proto_info = isolate->factory()->NewPrototypeInfo();
2270
  prototype->map().set_prototype_info(*proto_info);
2271 2272 2273 2274 2275 2276 2277
  return proto_info;
}

// static
Handle<PrototypeInfo> Map::GetOrCreatePrototypeInfo(Handle<Map> prototype_map,
                                                    Isolate* isolate) {
  Object maybe_proto_info = prototype_map->prototype_info();
2278
  if (maybe_proto_info.IsPrototypeInfo()) {
2279 2280 2281 2282 2283 2284 2285 2286 2287 2288
    return handle(PrototypeInfo::cast(maybe_proto_info), isolate);
  }
  Handle<PrototypeInfo> proto_info = isolate->factory()->NewPrototypeInfo();
  prototype_map->set_prototype_info(*proto_info);
  return proto_info;
}

// static
void Map::SetShouldBeFastPrototypeMap(Handle<Map> map, bool value,
                                      Isolate* isolate) {
2289
  if (value == false && !map->prototype_info().IsPrototypeInfo()) {
2290 2291 2292 2293 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306
    // "False" is the implicit default value, so there's nothing to do.
    return;
  }
  GetOrCreatePrototypeInfo(map, isolate)->set_should_be_fast_map(value);
}

// static
Handle<Object> Map::GetOrCreatePrototypeChainValidityCell(Handle<Map> map,
                                                          Isolate* isolate) {
  Handle<Object> maybe_prototype;
  if (map->IsJSGlobalObjectMap()) {
    DCHECK(map->is_prototype_map());
    // Global object is prototype of a global proxy and therefore we can
    // use its validity cell for guarding global object's prototype change.
    maybe_prototype = isolate->global_object();
  } else {
    maybe_prototype =
2307
        handle(map->GetPrototypeChainRootMap(isolate).prototype(), isolate);
2308 2309 2310 2311 2312 2313 2314 2315 2316 2317
  }
  if (!maybe_prototype->IsJSObject()) {
    return handle(Smi::FromInt(Map::kPrototypeChainValid), isolate);
  }
  Handle<JSObject> prototype = Handle<JSObject>::cast(maybe_prototype);
  // Ensure the prototype is registered with its own prototypes so its cell
  // will be invalidated when necessary.
  JSObject::LazyRegisterPrototypeUser(handle(prototype->map(), isolate),
                                      isolate);

2318
  Object maybe_cell = prototype->map().prototype_validity_cell();
2319
  // Return existing cell if it's still valid.
2320
  if (maybe_cell.IsCell()) {
2321 2322 2323 2324 2325 2326 2327 2328
    Handle<Cell> cell(Cell::cast(maybe_cell), isolate);
    if (cell->value() == Smi::FromInt(Map::kPrototypeChainValid)) {
      return cell;
    }
  }
  // Otherwise create a new cell.
  Handle<Cell> cell = isolate->factory()->NewCell(
      handle(Smi::FromInt(Map::kPrototypeChainValid), isolate));
2329
  prototype->map().set_prototype_validity_cell(*cell);
2330 2331 2332 2333 2334
  return cell;
}

// static
bool Map::IsPrototypeChainInvalidated(Map map) {
2335 2336 2337
  DCHECK(map.is_prototype_map());
  Object maybe_cell = map.prototype_validity_cell();
  if (maybe_cell.IsCell()) {
2338
    Cell cell = Cell::cast(maybe_cell);
2339
    return cell.value() != Smi::FromInt(Map::kPrototypeChainValid);
2340 2341 2342 2343 2344 2345
  }
  return true;
}

// static
void Map::SetPrototype(Isolate* isolate, Handle<Map> map,
2346
                       Handle<HeapObject> prototype,
2347
                       bool enable_prototype_setup_mode) {
2348
  RCS_SCOPE(isolate, RuntimeCallCounterId::kMap_SetPrototype);
2349 2350 2351 2352

  if (prototype->IsJSObject()) {
    Handle<JSObject> prototype_jsobj = Handle<JSObject>::cast(prototype);
    JSObject::OptimizeAsPrototype(prototype_jsobj, enable_prototype_setup_mode);
2353 2354
  } else {
    DCHECK(prototype->IsNull(isolate) || prototype->IsJSProxy());
2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368
  }

  WriteBarrierMode wb_mode =
      prototype->IsNull(isolate) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
  map->set_prototype(*prototype, wb_mode);
}

void Map::StartInobjectSlackTracking() {
  DCHECK(!IsInobjectSlackTrackingInProgress());
  if (UnusedPropertyFields() == 0) return;
  set_construction_counter(Map::kSlackTrackingCounterStart);
}

Handle<Map> Map::TransitionToPrototype(Isolate* isolate, Handle<Map> map,
2369
                                       Handle<HeapObject> prototype) {
2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381 2382
  Handle<Map> new_map =
      TransitionsAccessor(isolate, map).GetPrototypeTransition(prototype);
  if (new_map.is_null()) {
    new_map = Copy(isolate, map, "TransitionToPrototype");
    TransitionsAccessor(isolate, map)
        .PutPrototypeTransition(prototype, new_map);
    Map::SetPrototype(isolate, new_map, prototype);
  }
  return new_map;
}

Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
  Handle<WeakFixedArray> array(
2383
      isolate->factory()->NewWeakFixedArray(kEntries, AllocationType::kOld));
2384 2385 2386 2387
  return Handle<NormalizedMapCache>::cast(array);
}

MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
2388
                                         ElementsKind elements_kind,
2389
                                         PropertyNormalizationMode mode) {
2390
  DisallowGarbageCollection no_gc;
2391 2392 2393 2394 2395 2396 2397
  MaybeObject value = WeakFixedArray::Get(GetIndex(fast_map));
  HeapObject heap_object;
  if (!value->GetHeapObjectIfWeak(&heap_object)) {
    return MaybeHandle<Map>();
  }

  Map normalized_map = Map::cast(heap_object);
2398 2399
  if (!normalized_map.EquivalentToForNormalization(*fast_map, elements_kind,
                                                   mode)) {
2400 2401 2402 2403 2404 2405
    return MaybeHandle<Map>();
  }
  return handle(normalized_map, GetIsolate());
}

void NormalizedMapCache::Set(Handle<Map> fast_map, Handle<Map> normalized_map) {
2406
  DisallowGarbageCollection no_gc;
2407 2408 2409 2410 2411 2412 2413
  DCHECK(normalized_map->is_dictionary_map());
  WeakFixedArray::Set(GetIndex(fast_map),
                      HeapObjectReference::Weak(*normalized_map));
}

}  // namespace internal
}  // namespace v8