objects-inl.h 43.4 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4 5 6 7 8 9 10
//
// Review notes:
//
// - The use of macros in these inline functions may seem superfluous
// but it is absolutely needed to make sure gcc generates optimal
// code. gcc is not happy when attempting to inline too deep.
//
11

12 13
#ifndef V8_OBJECTS_OBJECTS_INL_H_
#define V8_OBJECTS_OBJECTS_INL_H_
14

15
#include "src/base/bits.h"
16
#include "src/base/memory.h"
17
#include "src/base/numbers/double.h"
18
#include "src/builtins/builtins.h"
19
#include "src/common/globals.h"
20
#include "src/common/ptr-compr-inl.h"
21
#include "src/handles/handles-inl.h"
22
#include "src/heap/factory.h"
23
#include "src/heap/heap-write-barrier-inl.h"
24
#include "src/heap/read-only-heap-inl.h"
25
#include "src/numbers/conversions-inl.h"
26
#include "src/objects/bigint.h"
27
#include "src/objects/heap-number-inl.h"
28
#include "src/objects/heap-object.h"
29
#include "src/objects/js-proxy-inl.h"  // TODO(jkummerow): Drop.
30
#include "src/objects/keys.h"
31
#include "src/objects/literal-objects.h"
32
#include "src/objects/lookup-inl.h"  // TODO(jkummerow): Drop.
33
#include "src/objects/objects.h"
34
#include "src/objects/oddball-inl.h"
35 36
#include "src/objects/property-details.h"
#include "src/objects/property.h"
37
#include "src/objects/regexp-match-info-inl.h"
38
#include "src/objects/shared-function-info.h"
39
#include "src/objects/slots-inl.h"
40
#include "src/objects/smi-inl.h"
41
#include "src/objects/tagged-field-inl.h"
42
#include "src/objects/tagged-impl-inl.h"
43
#include "src/objects/tagged-index.h"
44
#include "src/objects/templates.h"
Samuel Groß's avatar
Samuel Groß committed
45 46
#include "src/sandbox/external-pointer-inl.h"
#include "src/sandbox/sandboxed-pointer-inl.h"
47

48 49 50
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

51 52
namespace v8 {
namespace internal {
53

54
PropertyDetails::PropertyDetails(Smi smi) { value_ = smi.value(); }
55

56
Smi PropertyDetails::AsSmi() const {
57 58 59 60
  // Ensure the upper 2 bits have the same value by sign extending it. This is
  // necessary to be able to use the 31st bit of the property details.
  int value = value_ << 1;
  return Smi::FromInt(value >> 1);
61 62
}

63
int PropertyDetails::field_width_in_words() const {
64
  DCHECK_EQ(location(), PropertyLocation::kField);
65
  return 1;
66 67
}

68
DEF_GETTER(HeapObject, IsClassBoilerplate, bool) {
69
  return IsFixedArrayExact(cage_base);
70
}
71

72 73 74 75
bool Object::IsTaggedIndex() const {
  return IsSmi() && TaggedIndex::IsValid(TaggedIndex(ptr()).value());
}

76 77 78 79 80 81 82 83
bool Object::InSharedHeap() const {
  return IsHeapObject() && HeapObject::cast(*this).InSharedHeap();
}

bool Object::InSharedWritableHeap() const {
  return IsHeapObject() && HeapObject::cast(*this).InSharedWritableHeap();
}

84 85 86 87 88 89
#define IS_TYPE_FUNCTION_DEF(type_)                                        \
  bool Object::Is##type_() const {                                         \
    return IsHeapObject() && HeapObject::cast(*this).Is##type_();          \
  }                                                                        \
  bool Object::Is##type_(PtrComprCageBase cage_base) const {               \
    return IsHeapObject() && HeapObject::cast(*this).Is##type_(cage_base); \
90
  }
91
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
92 93
IS_TYPE_FUNCTION_DEF(HashTableBase)
IS_TYPE_FUNCTION_DEF(SmallOrderedHashTable)
94
IS_TYPE_FUNCTION_DEF(CodeT)
95
#undef IS_TYPE_FUNCTION_DEF
96

97 98 99 100
#define IS_TYPE_FUNCTION_DEF(Type, Value)                        \
  bool Object::Is##Type(Isolate* isolate) const {                \
    return Is##Type(ReadOnlyRoots(isolate));                     \
  }                                                              \
101
  bool Object::Is##Type(LocalIsolate* isolate) const {           \
102 103
    return Is##Type(ReadOnlyRoots(isolate));                     \
  }                                                              \
104 105 106 107 108 109 110 111 112
  bool Object::Is##Type(ReadOnlyRoots roots) const {             \
    return *this == roots.Value();                               \
  }                                                              \
  bool Object::Is##Type() const {                                \
    return IsHeapObject() && HeapObject::cast(*this).Is##Type(); \
  }                                                              \
  bool HeapObject::Is##Type(Isolate* isolate) const {            \
    return Object::Is##Type(isolate);                            \
  }                                                              \
113
  bool HeapObject::Is##Type(LocalIsolate* isolate) const {       \
114 115
    return Object::Is##Type(isolate);                            \
  }                                                              \
116 117 118
  bool HeapObject::Is##Type(ReadOnlyRoots roots) const {         \
    return Object::Is##Type(roots);                              \
  }                                                              \
119
  bool HeapObject::Is##Type() const { return Is##Type(GetReadOnlyRoots()); }
120 121
ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
#undef IS_TYPE_FUNCTION_DEF
122

123 124
bool Object::IsNullOrUndefined(Isolate* isolate) const {
  return IsNullOrUndefined(ReadOnlyRoots(isolate));
125 126
}

127 128
bool Object::IsNullOrUndefined(ReadOnlyRoots roots) const {
  return IsNull(roots) || IsUndefined(roots);
129 130
}

131
bool Object::IsNullOrUndefined() const {
132
  return IsHeapObject() && HeapObject::cast(*this).IsNullOrUndefined();
133 134
}

135 136
bool Object::IsZero() const { return *this == Smi::zero(); }

137 138 139 140 141 142 143
bool Object::IsPublicSymbol() const {
  return IsSymbol() && !Symbol::cast(*this).is_private();
}
bool Object::IsPrivateSymbol() const {
  return IsSymbol() && Symbol::cast(*this).is_private();
}

144 145 146 147
bool Object::IsNoSharedNameSentinel() const {
  return *this == SharedFunctionInfo::kNoSharedNameSentinel;
}

148 149 150 151 152 153 154 155 156 157 158 159 160 161
template <class T,
          typename std::enable_if<(std::is_arithmetic<T>::value ||
                                   std::is_enum<T>::value) &&
                                      !std::is_floating_point<T>::value,
                                  int>::type>
T Object::Relaxed_ReadField(size_t offset) const {
  // Pointer compression causes types larger than kTaggedSize to be
  // unaligned. Atomic loads must be aligned.
  DCHECK_IMPLIES(COMPRESS_POINTERS_BOOL, sizeof(T) <= kTaggedSize);
  using AtomicT = typename base::AtomicTypeFromByteWidth<sizeof(T)>::type;
  return static_cast<T>(base::AsAtomicImpl<AtomicT>::Relaxed_Load(
      reinterpret_cast<AtomicT*>(field_address(offset))));
}

162 163 164 165 166 167 168 169 170 171 172 173 174 175 176
template <class T,
          typename std::enable_if<(std::is_arithmetic<T>::value ||
                                   std::is_enum<T>::value) &&
                                      !std::is_floating_point<T>::value,
                                  int>::type>
void Object::Relaxed_WriteField(size_t offset, T value) {
  // Pointer compression causes types larger than kTaggedSize to be
  // unaligned. Atomic stores must be aligned.
  DCHECK_IMPLIES(COMPRESS_POINTERS_BOOL, sizeof(T) <= kTaggedSize);
  using AtomicT = typename base::AtomicTypeFromByteWidth<sizeof(T)>::type;
  base::AsAtomicImpl<AtomicT>::Relaxed_Store(
      reinterpret_cast<AtomicT*>(field_address(offset)),
      static_cast<AtomicT>(value));
}

177 178 179 180 181 182 183 184 185
bool HeapObject::InSharedHeap() const {
  if (IsReadOnlyHeapObject(*this)) return V8_SHARED_RO_HEAP_BOOL;
  return InSharedWritableHeap();
}

bool HeapObject::InSharedWritableHeap() const {
  return BasicMemoryChunk::FromHeapObject(*this)->InSharedHeap();
}

186 187
bool HeapObject::IsNullOrUndefined(Isolate* isolate) const {
  return IsNullOrUndefined(ReadOnlyRoots(isolate));
188 189 190
}

bool HeapObject::IsNullOrUndefined(ReadOnlyRoots roots) const {
191
  return Object::IsNullOrUndefined(roots);
192 193 194
}

bool HeapObject::IsNullOrUndefined() const {
195
  return IsNullOrUndefined(GetReadOnlyRoots());
196 197
}

198 199 200 201 202
DEF_GETTER(HeapObject, IsCodeT, bool) {
  return V8_EXTERNAL_CODE_SPACE_BOOL ? IsCodeDataContainer(cage_base)
                                     : IsCode(cage_base);
}

203
DEF_GETTER(HeapObject, IsUniqueName, bool) {
204
  return IsInternalizedString(cage_base) || IsSymbol(cage_base);
205 206
}

207
DEF_GETTER(HeapObject, IsFunction, bool) {
208
  return IsJSFunctionOrBoundFunctionOrWrappedFunction();
209 210
}

211 212 213
DEF_GETTER(HeapObject, IsCallable, bool) {
  return map(cage_base).is_callable();
}
214

215
DEF_GETTER(HeapObject, IsCallableJSProxy, bool) {
216
  return IsCallable(cage_base) && IsJSProxy(cage_base);
217 218
}

219
DEF_GETTER(HeapObject, IsCallableApiObject, bool) {
220 221
  InstanceType type = map(cage_base).instance_type();
  return IsCallable(cage_base) &&
222 223 224
         (type == JS_API_OBJECT_TYPE || type == JS_SPECIAL_API_OBJECT_TYPE);
}

225
DEF_GETTER(HeapObject, IsNonNullForeign, bool) {
226
  return IsForeign(cage_base) &&
227
         Foreign::cast(*this).foreign_address() != kNullAddress;
228 229
}

230
DEF_GETTER(HeapObject, IsConstructor, bool) {
231
  return map(cage_base).is_constructor();
232
}
233

234
DEF_GETTER(HeapObject, IsSourceTextModuleInfo, bool) {
235
  return map(cage_base) == GetReadOnlyRoots(cage_base).module_info_map();
236 237
}

238
DEF_GETTER(HeapObject, IsConsString, bool) {
239 240
  if (!IsString(cage_base)) return false;
  return StringShape(String::cast(*this).map(cage_base)).IsCons();
241 242
}

243
DEF_GETTER(HeapObject, IsThinString, bool) {
244 245
  if (!IsString(cage_base)) return false;
  return StringShape(String::cast(*this).map(cage_base)).IsThin();
246 247
}

248
DEF_GETTER(HeapObject, IsSlicedString, bool) {
249 250
  if (!IsString(cage_base)) return false;
  return StringShape(String::cast(*this).map(cage_base)).IsSliced();
251 252
}

253
DEF_GETTER(HeapObject, IsSeqString, bool) {
254 255
  if (!IsString(cage_base)) return false;
  return StringShape(String::cast(*this).map(cage_base)).IsSequential();
256 257
}

258
DEF_GETTER(HeapObject, IsSeqOneByteString, bool) {
259 260 261
  if (!IsString(cage_base)) return false;
  return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
         String::cast(*this).IsOneByteRepresentation(cage_base);
262 263
}

264
DEF_GETTER(HeapObject, IsSeqTwoByteString, bool) {
265 266 267
  if (!IsString(cage_base)) return false;
  return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
         String::cast(*this).IsTwoByteRepresentation(cage_base);
268 269
}

270
DEF_GETTER(HeapObject, IsExternalOneByteString, bool) {
271 272 273
  if (!IsString(cage_base)) return false;
  return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
         String::cast(*this).IsOneByteRepresentation(cage_base);
274 275
}

276
DEF_GETTER(HeapObject, IsExternalTwoByteString, bool) {
277 278 279
  if (!IsString(cage_base)) return false;
  return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
         String::cast(*this).IsTwoByteRepresentation(cage_base);
280 281
}

282 283 284
bool Object::IsNumber() const {
  if (IsSmi()) return true;
  HeapObject this_heap_object = HeapObject::cast(*this);
285 286
  PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
  return this_heap_object.IsHeapNumber(cage_base);
287
}
288

289 290
bool Object::IsNumber(PtrComprCageBase cage_base) const {
  return IsSmi() || IsHeapNumber(cage_base);
291 292 293 294 295
}

bool Object::IsNumeric() const {
  if (IsSmi()) return true;
  HeapObject this_heap_object = HeapObject::cast(*this);
296 297 298
  PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
  return this_heap_object.IsHeapNumber(cage_base) ||
         this_heap_object.IsBigInt(cage_base);
299
}
300

301 302
bool Object::IsNumeric(PtrComprCageBase cage_base) const {
  return IsNumber(cage_base) || IsBigInt(cage_base);
303 304
}

305
DEF_GETTER(HeapObject, IsArrayList, bool) {
306 307
  return map(cage_base) ==
         GetReadOnlyRoots(cage_base).unchecked_array_list_map();
308
}
309

310
DEF_GETTER(HeapObject, IsRegExpMatchInfo, bool) {
311
  return IsFixedArrayExact(cage_base);
312
}
313

314
DEF_GETTER(HeapObject, IsDeoptimizationData, bool) {
315
  // Must be a fixed array.
316
  if (!IsFixedArrayExact(cage_base)) return false;
317

318 319 320 321
  // There's no sure way to detect the difference between a fixed array and
  // a deoptimization data array.  Since this is used for asserts we can
  // check that the length is zero or else the fixed size plus a multiple of
  // the entry size.
322
  int length = FixedArray::cast(*this).length();
323
  if (length == 0) return true;
324

325 326
  length -= DeoptimizationData::kFirstDeoptEntryIndex;
  return length >= 0 && length % DeoptimizationData::kDeoptEntrySize == 0;
327 328
}

329
DEF_GETTER(HeapObject, IsHandlerTable, bool) {
330
  return IsFixedArrayExact(cage_base);
331 332
}

333
DEF_GETTER(HeapObject, IsTemplateList, bool) {
334
  if (!IsFixedArrayExact(cage_base)) return false;
335
  if (FixedArray::cast(*this).length() < 1) return false;
336
  return true;
337 338
}

339
DEF_GETTER(HeapObject, IsDependentCode, bool) {
340
  return IsWeakArrayList(cage_base);
341 342
}

343
DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) {
344
  return IsWeakFixedArray(cage_base);
345 346
}

347 348 349 350 351 352 353
bool HeapObject::IsAbstractCode() const {
  // TODO(v8:11880): Either make AbstractCode be ByteArray|CodeT or
  // ensure this version is not called for hot code.
  PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this);
  return HeapObject::IsAbstractCode(cage_base);
}
bool HeapObject::IsAbstractCode(PtrComprCageBase cage_base) const {
354
  return IsBytecodeArray(cage_base) || IsCode(cage_base);
355 356
}

357
DEF_GETTER(HeapObject, IsStringWrapper, bool) {
358 359
  return IsJSPrimitiveWrapper(cage_base) &&
         JSPrimitiveWrapper::cast(*this).value().IsString(cage_base);
360
}
361

362
DEF_GETTER(HeapObject, IsBooleanWrapper, bool) {
363 364
  return IsJSPrimitiveWrapper(cage_base) &&
         JSPrimitiveWrapper::cast(*this).value().IsBoolean(cage_base);
365 366
}

367
DEF_GETTER(HeapObject, IsScriptWrapper, bool) {
368 369
  return IsJSPrimitiveWrapper(cage_base) &&
         JSPrimitiveWrapper::cast(*this).value().IsScript(cage_base);
370 371
}

372
DEF_GETTER(HeapObject, IsNumberWrapper, bool) {
373 374
  return IsJSPrimitiveWrapper(cage_base) &&
         JSPrimitiveWrapper::cast(*this).value().IsNumber(cage_base);
375 376
}

377
DEF_GETTER(HeapObject, IsBigIntWrapper, bool) {
378 379
  return IsJSPrimitiveWrapper(cage_base) &&
         JSPrimitiveWrapper::cast(*this).value().IsBigInt(cage_base);
380 381
}

382
DEF_GETTER(HeapObject, IsSymbolWrapper, bool) {
383 384
  return IsJSPrimitiveWrapper(cage_base) &&
         JSPrimitiveWrapper::cast(*this).value().IsSymbol(cage_base);
385 386
}

387
DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(cage_base); }
388

389
DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(cage_base); }
390

391
DEF_GETTER(HeapObject, IsCompilationCacheTable, bool) {
392
  return IsHashTable(cage_base);
393
}
394

395
DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(cage_base); }
396

397 398 399
DEF_GETTER(HeapObject, IsObjectHashTable, bool) {
  return IsHashTable(cage_base);
}
400

401
DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(cage_base); }
402

403
bool Object::IsPrimitive() const {
404 405
  if (IsSmi()) return true;
  HeapObject this_heap_object = HeapObject::cast(*this);
406 407
  PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
  return this_heap_object.map(cage_base).IsPrimitiveMap();
408 409
}

410 411
bool Object::IsPrimitive(PtrComprCageBase cage_base) const {
  return IsSmi() || HeapObject::cast(*this).map(cage_base).IsPrimitiveMap();
412
}
413

414 415 416 417 418 419 420 421 422
// static
Maybe<bool> Object::IsArray(Handle<Object> object) {
  if (object->IsSmi()) return Just(false);
  Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
  if (heap_object->IsJSArray()) return Just(true);
  if (!heap_object->IsJSProxy()) return Just(false);
  return JSProxy::IsArray(Handle<JSProxy>::cast(object));
}

423
DEF_GETTER(HeapObject, IsUndetectable, bool) {
424
  return map(cage_base).is_undetectable();
425
}
426

427
DEF_GETTER(HeapObject, IsAccessCheckNeeded, bool) {
428
  if (IsJSGlobalProxy(cage_base)) {
429
    const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
430 431
    JSGlobalObject global = proxy.GetIsolate()->context().global_object();
    return proxy.IsDetachedFrom(global);
432
  }
433
  return map(cage_base).is_access_check_needed();
434
}
435

436 437 438 439 440 441
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)                           \
  bool Object::Is##Name() const {                                         \
    return IsHeapObject() && HeapObject::cast(*this).Is##Name();          \
  }                                                                       \
  bool Object::Is##Name(PtrComprCageBase cage_base) const {               \
    return IsHeapObject() && HeapObject::cast(*this).Is##Name(cage_base); \
442
  }
443 444
STRUCT_LIST(MAKE_STRUCT_PREDICATE)
#undef MAKE_STRUCT_PREDICATE
445

446 447
double Object::Number() const {
  DCHECK(IsNumber());
448 449
  return IsSmi() ? static_cast<double>(Smi(this->ptr()).value())
                 : HeapNumber::unchecked_cast(*this).value();
450
}
451

452 453 454 455 456 457 458 459 460 461
// static
bool Object::SameNumberValue(double value1, double value2) {
  // SameNumberValue(NaN, NaN) is true.
  if (value1 != value2) {
    return std::isnan(value1) && std::isnan(value2);
  }
  // SameNumberValue(0.0, -0.0) is false.
  return (std::signbit(value1) == std::signbit(value2));
}

462
bool Object::IsNaN() const {
463
  return this->IsHeapNumber() && std::isnan(HeapNumber::cast(*this).value());
464
}
465

466 467
bool Object::IsMinusZero() const {
  return this->IsHeapNumber() &&
468
         i::IsMinusZero(HeapNumber::cast(*this).value());
469
}
470

471
OBJECT_CONSTRUCTORS_IMPL(BigIntBase, PrimitiveHeapObject)
472 473 474
OBJECT_CONSTRUCTORS_IMPL(BigInt, BigIntBase)
OBJECT_CONSTRUCTORS_IMPL(FreshlyAllocatedBigInt, BigIntBase)

475 476
// ------------------------------------
// Cast operations
477

478
CAST_ACCESSOR(BigIntBase)
479
CAST_ACCESSOR(BigInt)
480

481
bool Object::HasValidElements() {
482 483 484
  // Dictionary is covered under FixedArray. ByteArray is used
  // for the JSTypedArray backing stores.
  return IsFixedArray() || IsFixedDoubleArray() || IsByteArray();
485 486
}

487
bool Object::FilterKey(PropertyFilter filter) {
488
  DCHECK(!IsPropertyCell());
489 490
  if (filter == PRIVATE_NAMES_ONLY) {
    if (!IsSymbol()) return true;
491
    return !Symbol::cast(*this).is_private_name();
492
  } else if (IsSymbol()) {
493
    if (filter & SKIP_SYMBOLS) return true;
494

495
    if (Symbol::cast(*this).is_private()) return true;
496 497 498 499
  } else {
    if (filter & SKIP_STRINGS) return true;
  }
  return false;
500 501
}

502
Representation Object::OptimalRepresentation(PtrComprCageBase cage_base) const {
503 504
  if (IsSmi()) {
    return Representation::Smi();
505 506
  }
  HeapObject heap_object = HeapObject::cast(*this);
507
  if (heap_object.IsHeapNumber(cage_base)) {
508
    return Representation::Double();
509
  } else if (heap_object.IsUninitialized(
510
                 heap_object.GetReadOnlyRoots(cage_base))) {
511 512
    return Representation::None();
  }
513
  return Representation::HeapObject();
514 515
}

516
ElementsKind Object::OptimalElementsKind(PtrComprCageBase cage_base) const {
517
  if (IsSmi()) return PACKED_SMI_ELEMENTS;
518
  if (IsNumber(cage_base)) return PACKED_DOUBLE_ELEMENTS;
519
  return PACKED_ELEMENTS;
520 521
}

522 523
bool Object::FitsRepresentation(Representation representation,
                                bool allow_coercion) const {
524
  if (representation.IsSmi()) {
525
    return IsSmi();
526
  } else if (representation.IsDouble()) {
527
    return allow_coercion ? IsNumber() : IsHeapNumber();
528
  } else if (representation.IsHeapObject()) {
529
    return IsHeapObject();
530
  } else if (representation.IsNone()) {
531
    return false;
532 533 534 535
  }
  return true;
}

536
bool Object::ToUint32(uint32_t* value) const {
537
  if (IsSmi()) {
538
    int num = Smi::ToInt(*this);
539 540 541 542 543
    if (num < 0) return false;
    *value = static_cast<uint32_t>(num);
    return true;
  }
  if (IsHeapNumber()) {
544
    double num = HeapNumber::cast(*this).value();
545
    return DoubleToUint32IfEqualToSelf(num, value);
546 547 548
  }
  return false;
}
549

550
// static
551
MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
552 553
                                         Handle<Object> object,
                                         const char* method_name) {
554
  if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
555
  return ToObjectImpl(isolate, object, method_name);
556 557
}

558 559 560 561 562 563
// static
MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
  if (input->IsName()) return Handle<Name>::cast(input);
  return ConvertToName(isolate, input);
}

564 565 566
// static
MaybeHandle<Object> Object::ToPropertyKey(Isolate* isolate,
                                          Handle<Object> value) {
567
  if (value->IsSmi() || HeapObject::cast(*value).IsName()) return value;
568 569 570
  return ConvertToPropertyKey(isolate, value);
}

571
// static
572
MaybeHandle<Object> Object::ToPrimitive(Isolate* isolate, Handle<Object> input,
573 574
                                        ToPrimitiveHint hint) {
  if (input->IsPrimitive()) return input;
575 576
  return JSReceiver::ToPrimitive(isolate, Handle<JSReceiver>::cast(input),
                                 hint);
577 578
}

579
// static
580
MaybeHandle<Object> Object::ToNumber(Isolate* isolate, Handle<Object> input) {
581
  if (input->IsNumber()) return input;  // Shortcut.
582
  return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumber);
583 584 585
}

// static
586
MaybeHandle<Object> Object::ToNumeric(Isolate* isolate, Handle<Object> input) {
587
  if (input->IsNumber() || input->IsBigInt()) return input;  // Shortcut.
588
  return ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumeric);
589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604
}

// static
MaybeHandle<Object> Object::ToInteger(Isolate* isolate, Handle<Object> input) {
  if (input->IsSmi()) return input;
  return ConvertToInteger(isolate, input);
}

// static
MaybeHandle<Object> Object::ToInt32(Isolate* isolate, Handle<Object> input) {
  if (input->IsSmi()) return input;
  return ConvertToInt32(isolate, input);
}

// static
MaybeHandle<Object> Object::ToUint32(Isolate* isolate, Handle<Object> input) {
605
  if (input->IsSmi()) return handle(Smi::cast(*input).ToUint32Smi(), isolate);
606 607 608 609 610 611 612 613
  return ConvertToUint32(isolate, input);
}

// static
MaybeHandle<String> Object::ToString(Isolate* isolate, Handle<Object> input) {
  if (input->IsString()) return Handle<String>::cast(input);
  return ConvertToString(isolate, input);
}
614

615 616 617
// static
MaybeHandle<Object> Object::ToLength(Isolate* isolate, Handle<Object> input) {
  if (input->IsSmi()) {
jgruber's avatar
jgruber committed
618
    int value = std::max(Smi::ToInt(*input), 0);
619 620 621 622 623 624 625
    return handle(Smi::FromInt(value), isolate);
  }
  return ConvertToLength(isolate, input);
}

// static
MaybeHandle<Object> Object::ToIndex(Isolate* isolate, Handle<Object> input,
626
                                    MessageTemplate error_index) {
jgruber's avatar
jgruber committed
627
  if (input->IsSmi() && Smi::ToInt(*input) >= 0) return input;
628 629 630
  return ConvertToIndex(isolate, input, error_index);
}

631
MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
632
                                        Handle<Name> name) {
633
  LookupIterator it(isolate, object, name);
634
  if (!it.IsFound()) return it.factory()->undefined_value();
635
  return GetProperty(&it);
636 637
}

638
MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
639
                                       uint32_t index) {
640
  LookupIterator it(isolate, object, index);
641
  if (!it.IsFound()) return it.factory()->undefined_value();
642
  return GetProperty(&it);
643 644
}

645 646
MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
                                       uint32_t index, Handle<Object> value,
647
                                       ShouldThrow should_throw) {
648
  LookupIterator it(isolate, object, index);
649
  MAYBE_RETURN_NULL(
650
      SetProperty(&it, value, StoreOrigin::kMaybeKeyed, Just(should_throw)));
651
  return value;
652 653
}

Samuel Groß's avatar
Samuel Groß committed
654 655 656
Address Object::ReadSandboxedPointerField(size_t offset,
                                          PtrComprCageBase cage_base) const {
  return i::ReadSandboxedPointerField(field_address(offset), cage_base);
Samuel Groß's avatar
Samuel Groß committed
657 658
}

Samuel Groß's avatar
Samuel Groß committed
659 660 661 662
void Object::WriteSandboxedPointerField(size_t offset,
                                        PtrComprCageBase cage_base,
                                        Address value) {
  i::WriteSandboxedPointerField(field_address(offset), cage_base, value);
Samuel Groß's avatar
Samuel Groß committed
663 664
}

Samuel Groß's avatar
Samuel Groß committed
665 666 667 668
void Object::WriteSandboxedPointerField(size_t offset, Isolate* isolate,
                                        Address value) {
  i::WriteSandboxedPointerField(field_address(offset),
                                PtrComprCageBase(isolate), value);
Samuel Groß's avatar
Samuel Groß committed
669 670
}

671 672 673
template <ExternalPointerTag tag>
void Object::InitExternalPointerField(size_t offset, Isolate* isolate) {
  i::InitExternalPointerField<tag>(field_address(offset), isolate);
674 675
}

676
template <ExternalPointerTag tag>
677
void Object::InitExternalPointerField(size_t offset, Isolate* isolate,
678 679
                                      Address value) {
  i::InitExternalPointerField<tag>(field_address(offset), isolate, value);
680 681
}

682 683 684 685
template <ExternalPointerTag tag>
Address Object::ReadExternalPointerField(size_t offset,
                                         Isolate* isolate) const {
  return i::ReadExternalPointerField<tag>(field_address(offset), isolate);
686 687
}

688
template <ExternalPointerTag tag>
689
void Object::WriteExternalPointerField(size_t offset, Isolate* isolate,
690 691
                                       Address value) {
  i::WriteExternalPointerField<tag>(field_address(offset), isolate, value);
692 693
}

694
ObjectSlot HeapObject::RawField(int byte_offset) const {
695
  return ObjectSlot(field_address(byte_offset));
696 697 698
}

MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
699
  return MaybeObjectSlot(field_address(byte_offset));
700 701
}

702 703 704 705
CodeObjectSlot HeapObject::RawCodeField(int byte_offset) const {
  return CodeObjectSlot(field_address(byte_offset));
}

706 707
ExternalPointerSlot HeapObject::RawExternalPointerField(int byte_offset) const {
  return ExternalPointerSlot(field_address(byte_offset));
708 709
}

710 711 712 713 714 715 716 717
MapWord MapWord::FromMap(const Map map) {
  DCHECK(map.is_null() || !MapWord::IsPacked(map.ptr()));
#ifdef V8_MAP_PACKING
  return MapWord(Pack(map.ptr()));
#else
  return MapWord(map.ptr());
#endif
}
718

719 720 721 722 723 724 725
Map MapWord::ToMap() const {
#ifdef V8_MAP_PACKING
  return Map::unchecked_cast(Object(Unpack(value_)));
#else
  return Map::unchecked_cast(Object(value_));
#endif
}
726

727 728 729
bool MapWord::IsForwardingAddress() const {
  return (value_ & kForwardingTagMask) == kForwardingTag;
}
730

731
MapWord MapWord::FromForwardingAddress(HeapObject object) {
732
  return MapWord(object.ptr() - kHeapObjectTag);
733 734
}

735
HeapObject MapWord::ToForwardingAddress() {
736
  DCHECK(IsForwardingAddress());
737 738 739
  HeapObject obj = HeapObject::FromAddress(value_);
  // For objects allocated outside of the main pointer compression cage the
  // variant with explicit cage base must be used.
740
  DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(obj));
741 742 743 744 745 746 747 748 749 750 751 752
  return obj;
}

HeapObject MapWord::ToForwardingAddress(PtrComprCageBase host_cage_base) {
  DCHECK(IsForwardingAddress());
  if (V8_EXTERNAL_CODE_SPACE_BOOL) {
    // Recompress value_ using proper host_cage_base since the map word
    // has the upper 32 bits that correspond to the main cage base value.
    Address value =
        DecompressTaggedPointer(host_cage_base, CompressTagged(value_));
    return HeapObject::FromAddress(value);
  }
753
  return HeapObject::FromAddress(value_);
754 755
}

756
#ifdef VERIFY_HEAP
757
void HeapObject::VerifyObjectField(Isolate* isolate, int offset) {
758
  VerifyPointer(isolate, TaggedField<Object>::load(isolate, *this, offset));
759
  static_assert(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
760
}
761

762
void HeapObject::VerifyMaybeObjectField(Isolate* isolate, int offset) {
763 764
  MaybeObject::VerifyMaybeObjectPointer(
      isolate, TaggedField<MaybeObject>::load(isolate, *this, offset));
765
  static_assert(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
766 767
}

768
void HeapObject::VerifySmiField(int offset) {
769
  CHECK(TaggedField<Object>::load(*this, offset).IsSmi());
770
  static_assert(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
771
}
772

773 774
#endif

775
ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
776
  return ReadOnlyHeap::GetReadOnlyRoots(*this);
777 778
}

779 780 781 782
ReadOnlyRoots HeapObject::GetReadOnlyRoots(PtrComprCageBase cage_base) const {
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
  DCHECK_NE(cage_base.address(), 0);
  return ReadOnlyRoots(Isolate::FromRootAddress(cage_base.address()));
783 784 785 786 787
#else
  return GetReadOnlyRoots();
#endif
}

788
Map HeapObject::map() const {
789 790 791 792 793
  // This method is never used for objects located in code space (Code and
  // free space fillers) and thus it is fine to use auto-computed cage base
  // value.
  DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
  PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
794 795 796
  return HeapObject::map(cage_base);
}
Map HeapObject::map(PtrComprCageBase cage_base) const {
797 798
  return map_word(cage_base, kRelaxedLoad).ToMap();
}
799

800
void HeapObject::set_map(Map value) {
801 802
  set_map<EmitWriteBarrier::kYes>(value, kRelaxedStore,
                                  VerificationMode::kPotentialLayoutChange);
803 804
}

805 806 807
void HeapObject::set_map(Map value, ReleaseStoreTag tag) {
  set_map<EmitWriteBarrier::kYes>(value, kReleaseStore,
                                  VerificationMode::kPotentialLayoutChange);
808 809
}

810 811 812 813 814 815 816 817
void HeapObject::set_map_safe_transition(Map value) {
  set_map<EmitWriteBarrier::kYes>(value, kRelaxedStore,
                                  VerificationMode::kSafeMapTransition);
}

void HeapObject::set_map_safe_transition(Map value, ReleaseStoreTag tag) {
  set_map<EmitWriteBarrier::kYes>(value, kReleaseStore,
                                  VerificationMode::kSafeMapTransition);
818 819
}

820
// Unsafe accessor omitting write barrier.
821
void HeapObject::set_map_no_write_barrier(Map value, RelaxedStoreTag tag) {
822 823
  set_map<EmitWriteBarrier::kNo>(value, kRelaxedStore,
                                 VerificationMode::kPotentialLayoutChange);
824 825 826
}

void HeapObject::set_map_no_write_barrier(Map value, ReleaseStoreTag tag) {
827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842
  set_map<EmitWriteBarrier::kNo>(value, kReleaseStore,
                                 VerificationMode::kPotentialLayoutChange);
}

template <HeapObject::EmitWriteBarrier emit_write_barrier, typename MemoryOrder>
void HeapObject::set_map(Map value, MemoryOrder order, VerificationMode mode) {
#if V8_ENABLE_WEBASSEMBLY
  // In {WasmGraphBuilder::SetMap} and {WasmGraphBuilder::LoadMap}, we treat
  // maps as immutable. Therefore we are not allowed to mutate them here.
  DCHECK(!value.IsWasmStructMap() && !value.IsWasmArrayMap());
#endif
  // Object layout changes are currently not supported on background threads.
  // This method might change object layout and therefore can't be used on
  // background threads.
  DCHECK_IMPLIES(mode != VerificationMode::kSafeMapTransition,
                 !LocalHeap::Current());
843 844
#ifdef VERIFY_HEAP
  if (FLAG_verify_heap && !value.is_null()) {
845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862
    Heap* heap = GetHeapFromWritableObject(*this);
    if (mode == VerificationMode::kSafeMapTransition) {
      heap->VerifySafeMapTransition(*this, value);
    } else {
      DCHECK_EQ(mode, VerificationMode::kPotentialLayoutChange);
      heap->VerifyObjectLayoutChange(*this, value);
    }
  }
#endif
  set_map_word(MapWord::FromMap(value), order);
#ifndef V8_DISABLE_WRITE_BARRIERS
  if (!value.is_null()) {
    if (emit_write_barrier == EmitWriteBarrier::kYes) {
      WriteBarrier::Marking(*this, map_slot(), value);
    } else {
      DCHECK_EQ(emit_write_barrier, EmitWriteBarrier::kNo);
      SLOW_DCHECK(!WriteBarrier::IsRequired(*this, value));
    }
863 864
  }
#endif
865 866
}

867
void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
868
  MapWord mapword = MapWord::FromMap(value);
869
  set_map_word(mapword, kRelaxedStore);
870
#ifndef V8_DISABLE_WRITE_BARRIERS
871
  if (mode != SKIP_WRITE_BARRIER) {
872
    DCHECK(!value.is_null());
873
    WriteBarrier::Marking(*this, map_slot(), value);
874 875
  } else {
    SLOW_DCHECK(!WriteBarrier::IsRequired(*this, value));
876
  }
877
#endif
878 879
}

880 881 882 883
DEF_ACQUIRE_GETTER(HeapObject, map, Map) {
  return map_word(cage_base, kAcquireLoad).ToMap();
}

884 885
ObjectSlot HeapObject::map_slot() const {
  return ObjectSlot(MapField::address(*this));
886
}
887

888
MapWord HeapObject::map_word(RelaxedLoadTag tag) const {
889 890 891 892 893
  // This method is never used for objects located in code space (Code and
  // free space fillers) and thus it is fine to use auto-computed cage base
  // value.
  DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
  PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
894 895 896 897
  return HeapObject::map_word(cage_base, tag);
}
MapWord HeapObject::map_word(PtrComprCageBase cage_base,
                             RelaxedLoadTag tag) const {
898
  return MapField::Relaxed_Load_Map_Word(cage_base, *this);
899
}
900

901
void HeapObject::set_map_word(MapWord map_word, RelaxedStoreTag) {
902
  MapField::Relaxed_Store_Map_Word(*this, map_word);
903 904
}

905
MapWord HeapObject::map_word(AcquireLoadTag tag) const {
906 907 908 909
  // This method is never used for objects located in code space (Code and
  // free space fillers) and thus it is fine to use auto-computed cage base
  // value.
  DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
910 911 912 913 914
  PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
  return HeapObject::map_word(cage_base, tag);
}
MapWord HeapObject::map_word(PtrComprCageBase cage_base,
                             AcquireLoadTag tag) const {
915
  return MapField::Acquire_Load_No_Unpack(cage_base, *this);
916 917
}

918
void HeapObject::set_map_word(MapWord map_word, ReleaseStoreTag) {
919
  MapField::Release_Store_Map_Word(*this, map_word);
920 921
}

922 923
bool HeapObject::release_compare_and_swap_map_word(MapWord old_map_word,
                                                   MapWord new_map_word) {
924 925 926
  Tagged_t result =
      MapField::Release_CompareAndSwap(*this, old_map_word, new_map_word);
  return result == static_cast<Tagged_t>(old_map_word.ptr());
927
}
928

929 930
// TODO(v8:11880): consider dropping parameterless version.
int HeapObject::Size() const {
931 932
  DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
  PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
933 934 935 936 937
  return HeapObject::Size(cage_base);
}
int HeapObject::Size(PtrComprCageBase cage_base) const {
  return SizeFromMap(map(cage_base));
}
938

939 940 941
inline bool IsSpecialReceiverInstanceType(InstanceType instance_type) {
  return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
}
942

943 944 945 946 947 948 949 950 951
// This should be in objects/map-inl.h, but can't, because of a cyclic
// dependency.
bool Map::IsSpecialReceiverMap() const {
  bool result = IsSpecialReceiverInstanceType(instance_type());
  DCHECK_IMPLIES(!result,
                 !has_named_interceptor() && !is_access_check_needed());
  return result;
}

952 953 954 955 956 957 958 959 960 961
inline bool IsCustomElementsReceiverInstanceType(InstanceType instance_type) {
  return instance_type <= LAST_CUSTOM_ELEMENTS_RECEIVER;
}

// This should be in objects/map-inl.h, but can't, because of a cyclic
// dependency.
bool Map::IsCustomElementsReceiverMap() const {
  return IsCustomElementsReceiverInstanceType(instance_type());
}

962 963 964
bool Object::ToArrayLength(uint32_t* index) const {
  return Object::ToUint32(index);
}
965

966
bool Object::ToArrayIndex(uint32_t* index) const {
967
  return Object::ToUint32(index) && *index != kMaxUInt32;
968 969
}

970 971 972 973 974 975 976 977 978 979
bool Object::ToIntegerIndex(size_t* index) const {
  if (IsSmi()) {
    int num = Smi::ToInt(*this);
    if (num < 0) return false;
    *index = static_cast<size_t>(num);
    return true;
  }
  if (IsHeapNumber()) {
    double num = HeapNumber::cast(*this).value();
    if (!(num >= 0)) return false;  // Negation to catch NaNs.
980 981 982 983 984
    constexpr double max =
        std::min(kMaxSafeInteger,
                 // The maximum size_t is reserved as "invalid" sentinel.
                 static_cast<double>(std::numeric_limits<size_t>::max() - 1));
    if (num > max) return false;
985 986 987 988 989 990 991 992
    size_t result = static_cast<size_t>(num);
    if (num != result) return false;  // Conversion lost fractional precision.
    *index = result;
    return true;
  }
  return false;
}

993
WriteBarrierMode HeapObject::GetWriteBarrierMode(
994
    const DisallowGarbageCollection& promise) {
995
  return GetWriteBarrierModeForObject(*this, &promise);
996 997
}

998
// static
999
AllocationAlignment HeapObject::RequiredAlignment(Map map) {
1000
  // TODO(v8:4153): We should think about requiring double alignment
1001 1002
  // in general for ByteArray, since they are used as backing store for typed
  // arrays now.
1003 1004 1005 1006 1007 1008 1009
  // TODO(ishell, v8:8875): Consider using aligned allocations for BigInt.
  if (USE_ALLOCATION_ALIGNMENT_BOOL) {
    int instance_type = map.instance_type();
    if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) return kDoubleAligned;
    if (instance_type == HEAP_NUMBER_TYPE) return kDoubleUnaligned;
  }
  return kTaggedAligned;
1010 1011
}

1012 1013 1014 1015 1016 1017
bool HeapObject::CheckRequiredAlignment(PtrComprCageBase cage_base) const {
  AllocationAlignment alignment = HeapObject::RequiredAlignment(map(cage_base));
  CHECK_EQ(0, Heap::GetFillToAlign(address(), alignment));
  return true;
}

1018
Address HeapObject::GetFieldAddress(int field_offset) const {
1019
  return field_address(field_offset);
1020 1021
}

1022
// static
1023 1024 1025
Maybe<bool> Object::GreaterThan(Isolate* isolate, Handle<Object> x,
                                Handle<Object> y) {
  Maybe<ComparisonResult> result = Compare(isolate, x, y);
1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039
  if (result.IsJust()) {
    switch (result.FromJust()) {
      case ComparisonResult::kGreaterThan:
        return Just(true);
      case ComparisonResult::kLessThan:
      case ComparisonResult::kEqual:
      case ComparisonResult::kUndefined:
        return Just(false);
    }
  }
  return Nothing<bool>();
}

// static
1040 1041 1042
Maybe<bool> Object::GreaterThanOrEqual(Isolate* isolate, Handle<Object> x,
                                       Handle<Object> y) {
  Maybe<ComparisonResult> result = Compare(isolate, x, y);
1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056
  if (result.IsJust()) {
    switch (result.FromJust()) {
      case ComparisonResult::kEqual:
      case ComparisonResult::kGreaterThan:
        return Just(true);
      case ComparisonResult::kLessThan:
      case ComparisonResult::kUndefined:
        return Just(false);
    }
  }
  return Nothing<bool>();
}

// static
1057 1058 1059
Maybe<bool> Object::LessThan(Isolate* isolate, Handle<Object> x,
                             Handle<Object> y) {
  Maybe<ComparisonResult> result = Compare(isolate, x, y);
1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073
  if (result.IsJust()) {
    switch (result.FromJust()) {
      case ComparisonResult::kLessThan:
        return Just(true);
      case ComparisonResult::kEqual:
      case ComparisonResult::kGreaterThan:
      case ComparisonResult::kUndefined:
        return Just(false);
    }
  }
  return Nothing<bool>();
}

// static
1074 1075 1076
Maybe<bool> Object::LessThanOrEqual(Isolate* isolate, Handle<Object> x,
                                    Handle<Object> y) {
  Maybe<ComparisonResult> result = Compare(isolate, x, y);
1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089
  if (result.IsJust()) {
    switch (result.FromJust()) {
      case ComparisonResult::kEqual:
      case ComparisonResult::kLessThan:
        return Just(true);
      case ComparisonResult::kGreaterThan:
      case ComparisonResult::kUndefined:
        return Just(false);
    }
  }
  return Nothing<bool>();
}

1090 1091
MaybeHandle<Object> Object::GetPropertyOrElement(Isolate* isolate,
                                                 Handle<Object> object,
1092
                                                 Handle<Name> name) {
1093
  PropertyKey key(isolate, name);
1094
  LookupIterator it(isolate, object, key);
1095
  return GetProperty(&it);
1096 1097
}

1098 1099
MaybeHandle<Object> Object::SetPropertyOrElement(
    Isolate* isolate, Handle<Object> object, Handle<Name> name,
1100
    Handle<Object> value, Maybe<ShouldThrow> should_throw,
1101
    StoreOrigin store_origin) {
1102
  PropertyKey key(isolate, name);
1103
  LookupIterator it(isolate, object, key);
1104
  MAYBE_RETURN_NULL(SetProperty(&it, value, store_origin, should_throw));
1105 1106 1107
  return value;
}

1108
MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> receiver,
1109
                                                 Handle<Name> name,
1110
                                                 Handle<JSReceiver> holder) {
1111
  Isolate* isolate = holder->GetIsolate();
1112
  PropertyKey key(isolate, name);
1113
  LookupIterator it(isolate, receiver, key, holder);
1114
  return GetProperty(&it);
1115 1116
}

1117
// static
1118
Object Object::GetSimpleHash(Object object) {
1119
  DisallowGarbageCollection no_gc;
1120
  if (object.IsSmi()) {
1121
    uint32_t hash = ComputeUnseededHash(Smi::ToInt(object));
1122 1123
    return Smi::FromInt(hash & Smi::kMaxValue);
  }
1124 1125
  auto instance_type = HeapObject::cast(object).map().instance_type();
  if (InstanceTypeChecker::IsHeapNumber(instance_type)) {
1126
    double num = HeapNumber::cast(object).value();
1127
    if (std::isnan(num)) return Smi::FromInt(Smi::kMaxValue);
1128
    // Use ComputeUnseededHash for all values in Signed32 range, including -0,
1129 1130 1131 1132
    // which is considered equal to 0 because collections use SameValueZero.
    uint32_t hash;
    // Check range before conversion to avoid undefined behavior.
    if (num >= kMinInt && num <= kMaxInt && FastI2D(FastD2I(num)) == num) {
1133
      hash = ComputeUnseededHash(FastD2I(num));
1134
    } else {
1135
      hash = ComputeLongHash(base::double_to_uint64(num));
1136 1137
    }
    return Smi::FromInt(hash & Smi::kMaxValue);
1138
  } else if (InstanceTypeChecker::IsName(instance_type)) {
1139
    uint32_t hash = Name::cast(object).EnsureHash();
1140
    return Smi::FromInt(hash);
1141
  } else if (InstanceTypeChecker::IsOddball(instance_type)) {
1142
    uint32_t hash = Oddball::cast(object).to_string().EnsureHash();
1143
    return Smi::FromInt(hash);
1144
  } else if (InstanceTypeChecker::IsBigInt(instance_type)) {
1145
    uint32_t hash = BigInt::cast(object).Hash();
1146
    return Smi::FromInt(hash & Smi::kMaxValue);
1147
  } else if (InstanceTypeChecker::IsSharedFunctionInfo(instance_type)) {
1148
    uint32_t hash = SharedFunctionInfo::cast(object).Hash();
1149 1150
    return Smi::FromInt(hash & Smi::kMaxValue);
  }
1151
  DCHECK(object.IsJSReceiver());
1152 1153 1154
  return object;
}

1155
Object Object::GetHash() {
1156
  DisallowGarbageCollection no_gc;
1157
  Object hash = GetSimpleHash(*this);
1158
  if (hash.IsSmi()) return hash;
1159 1160

  DCHECK(IsJSReceiver());
1161
  JSReceiver receiver = JSReceiver::cast(*this);
1162
  return receiver.GetIdentityHash();
1163
}
1164

1165
bool Object::IsShared() const {
1166 1167 1168
  // This logic should be kept in sync with fast paths in
  // CodeStubAssembler::SharedValueBarrier.

1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182
  // Smis are trivially shared.
  if (IsSmi()) return true;

  HeapObject object = HeapObject::cast(*this);

  // RO objects are shared when the RO space is shared.
  if (IsReadOnlyHeapObject(object)) {
    return ReadOnlyHeap::IsReadOnlySpaceShared();
  }

  // Check if this object is already shared.
  switch (object.map().instance_type()) {
    case SHARED_STRING_TYPE:
    case SHARED_ONE_BYTE_STRING_TYPE:
1183
    case JS_SHARED_ARRAY_TYPE:
1184
    case JS_SHARED_STRUCT_TYPE:
1185
    case JS_ATOMICS_MUTEX_TYPE:
1186 1187 1188 1189 1190 1191 1192 1193 1194
      DCHECK(object.InSharedHeap());
      return true;
    case INTERNALIZED_STRING_TYPE:
    case ONE_BYTE_INTERNALIZED_STRING_TYPE:
      if (FLAG_shared_string_table) {
        DCHECK(object.InSharedHeap());
        return true;
      }
      return false;
1195 1196
    case HEAP_NUMBER_TYPE:
      return object.InSharedWritableHeap();
1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211
    default:
      return false;
  }
}

// static
MaybeHandle<Object> Object::Share(Isolate* isolate, Handle<Object> value,
                                  ShouldThrow throw_if_cannot_be_shared) {
  // Sharing values requires the RO space be shared.
  DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
  if (value->IsShared()) return value;
  return ShareSlow(isolate, Handle<HeapObject>::cast(value),
                   throw_if_cannot_be_shared);
}

1212
Handle<Object> ObjectHashTableShape::AsHandle(Handle<Object> key) {
1213 1214 1215
  return key;
}

1216 1217 1218 1219 1220 1221 1222
Relocatable::Relocatable(Isolate* isolate) {
  isolate_ = isolate;
  prev_ = isolate->relocatable_top();
  isolate->set_relocatable_top(this);
}

Relocatable::~Relocatable() {
1223
  DCHECK_EQ(isolate_->relocatable_top(), this);
1224
  isolate_->set_relocatable_top(prev_);
1225 1226
}

1227
// Predictably converts HeapObject or Address to uint32 by calculating
1228
// offset of the address in respective MemoryChunk.
1229 1230
static inline uint32_t ObjectAddressForHashing(Address object) {
  uint32_t value = static_cast<uint32_t>(object);
1231
  return value & kPageAlignmentMask;
1232 1233
}

1234
static inline Handle<Object> MakeEntryPair(Isolate* isolate, size_t index,
1235
                                           Handle<Object> value) {
1236
  Handle<Object> key = isolate->factory()->SizeToString(index);
1237
  Handle<FixedArray> entry_storage = isolate->factory()->NewFixedArray(2);
1238 1239 1240 1241 1242
  {
    entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
    entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
  }
  return isolate->factory()->NewJSArrayWithElements(entry_storage,
1243
                                                    PACKED_ELEMENTS, 2);
1244 1245
}

1246
static inline Handle<Object> MakeEntryPair(Isolate* isolate, Handle<Object> key,
1247
                                           Handle<Object> value) {
1248
  Handle<FixedArray> entry_storage = isolate->factory()->NewFixedArray(2);
1249 1250 1251 1252 1253
  {
    entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
    entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
  }
  return isolate->factory()->NewJSArrayWithElements(entry_storage,
1254
                                                    PACKED_ELEMENTS, 2);
1255
}
1256

1257
FreshlyAllocatedBigInt FreshlyAllocatedBigInt::cast(Object object) {
1258 1259
  SLOW_DCHECK(object.IsBigInt());
  return FreshlyAllocatedBigInt(object.ptr());
1260 1261
}

1262 1263
}  // namespace internal
}  // namespace v8
1264

1265 1266
#include "src/objects/object-macros-undef.h"

1267
#endif  // V8_OBJECTS_OBJECTS_INL_H_