heap-object.h 9.99 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_OBJECTS_HEAP_OBJECT_H_
#define V8_OBJECTS_HEAP_OBJECT_H_

8
#include "src/common/globals.h"
9
#include "src/objects/instance-type.h"
10
#include "src/objects/objects.h"
11
#include "src/objects/tagged-field.h"
12
#include "src/roots/roots.h"
13
#include "src/torque/runtime-macro-shims.h"
14
#include "src/torque/runtime-support.h"
15 16 17 18 19 20 21

// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

namespace v8 {
namespace internal {

22
class Heap;
23
class PrimitiveHeapObject;
24

25 26
// HeapObject is the superclass for all classes describing heap allocated
// objects.
27
class HeapObject : public Object {
28
 public:
29 30 31
  bool is_null() const {
    return static_cast<Tagged_t>(ptr()) == static_cast<Tagged_t>(kNullAddress);
  }
32

33 34
  // [map]: Contains a map which contains the object's reflective
  // information.
35
  DECL_GETTER(map, Map)
36 37
  inline void set_map(Map value);

38 39 40 41 42
  // This method behaves the same as `set_map` but marks the map transition as
  // safe for the concurrent marker (object layout doesn't change) during
  // verification.
  inline void set_map_safe_transition(Map value);

43
  inline ObjectSlot map_slot() const;
44 45 46 47

  // The no-write-barrier version.  This is OK if the object is white and in
  // new space, or if the value is an immortal immutable object, like the maps
  // of primitive (non-JS) objects like strings, heap numbers etc.
48 49 50
  inline void set_map_no_write_barrier(Map value,
                                       RelaxedStoreTag = kRelaxedStore);
  inline void set_map_no_write_barrier(Map value, ReleaseStoreTag);
51

52
  // Access the map using acquire load and release store.
53 54
  DECL_ACQUIRE_GETTER(map, Map)
  inline void set_map(Map value, ReleaseStoreTag);
55
  inline void set_map_safe_transition(Map value, ReleaseStoreTag);
56

57 58
  // Compare-and-swaps map word using release store, returns true if the map
  // word was actually swapped.
59 60
  inline bool release_compare_and_swap_map_word(MapWord old_map_word,
                                                MapWord new_map_word);
61 62 63 64 65 66 67 68

  // Initialize the map immediately after the object is allocated.
  // Do not use this outside Heap.
  inline void set_map_after_allocation(
      Map value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);

  // During garbage collection, the map word of a heap object does not
  // necessarily contain a map pointer.
69 70
  DECL_RELAXED_GETTER(map_word, MapWord)
  inline void set_map_word(MapWord map_word, RelaxedStoreTag);
71

72
  // Access the map word using acquire load and release store.
73 74
  DECL_ACQUIRE_GETTER(map_word, MapWord)
  inline void set_map_word(MapWord map_word, ReleaseStoreTag);
75

76
  // This method exists to help remove GetIsolate/GetHeap from HeapObject, in a
77 78 79
  // way that doesn't require passing Isolate/Heap down huge call chains or to
  // places where it might not be safe to access it.
  inline ReadOnlyRoots GetReadOnlyRoots() const;
80
  // This version is intended to be used for the isolate values produced by
81 82
  // i::GetPtrComprCageBase(HeapObject) function which may return nullptr.
  inline ReadOnlyRoots GetReadOnlyRoots(PtrComprCageBase cage_base) const;
83

84 85 86 87 88 89
  // Whether the object is in the RO heap and the RO heap is shared, or in the
  // writable shared heap.
  V8_INLINE bool InSharedHeap() const;

  V8_INLINE bool InSharedWritableHeap() const;

90 91
#define IS_TYPE_FUNCTION_DECL(Type) \
  V8_INLINE bool Is##Type() const;  \
92
  V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
93
  HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
94 95
  IS_TYPE_FUNCTION_DECL(HashTableBase)
  IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable)
96
  IS_TYPE_FUNCTION_DECL(CodeT)
97 98 99 100
#undef IS_TYPE_FUNCTION_DECL

// Oddball checks are faster when they are raw pointer comparisons, so the
// isolate/read-only roots overloads should be preferred where possible.
101 102 103 104
#define IS_TYPE_FUNCTION_DECL(Type, Value)              \
  V8_INLINE bool Is##Type(Isolate* isolate) const;      \
  V8_INLINE bool Is##Type(LocalIsolate* isolate) const; \
  V8_INLINE bool Is##Type(ReadOnlyRoots roots) const;   \
105 106
  V8_INLINE bool Is##Type() const;
  ODDBALL_LIST(IS_TYPE_FUNCTION_DECL)
107
  IS_TYPE_FUNCTION_DECL(NullOrUndefined, /* unused */)
108 109
#undef IS_TYPE_FUNCTION_DECL

110 111
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
  V8_INLINE bool Is##Name() const;              \
112
  V8_INLINE bool Is##Name(PtrComprCageBase cage_base) const;
113 114 115 116
  STRUCT_LIST(DECL_STRUCT_PREDICATE)
#undef DECL_STRUCT_PREDICATE

  // Converts an address to a HeapObject pointer.
117 118 119 120
  static inline HeapObject FromAddress(Address address) {
    DCHECK_TAG_ALIGNED(address);
    return HeapObject(address + kHeapObjectTag);
  }
121 122

  // Returns the address of this HeapObject.
123
  inline Address address() const { return ptr() - kHeapObjectTag; }
124 125 126 127

  // Iterates over pointers contained in the object (including the Map).
  // If it's not performance critical iteration use the non-templatized
  // version.
128
  void Iterate(PtrComprCageBase cage_base, ObjectVisitor* v);
129 130

  template <typename ObjectVisitor>
131
  inline void IterateFast(PtrComprCageBase cage_base, ObjectVisitor* v);
132

133 134 135
  template <typename ObjectVisitor>
  inline void IterateFast(Map map, int object_size, ObjectVisitor* v);

136 137 138 139 140 141
  // Iterates over all pointers contained in the object except the
  // first map pointer.  The object type is given in the first
  // parameter. This function does not access the map pointer in the
  // object, and so is safe to call while the map pointer is modified.
  // If it's not performance critical iteration use the non-templatized
  // version.
142
  void IterateBody(PtrComprCageBase cage_base, ObjectVisitor* v);
143 144 145
  void IterateBody(Map map, int object_size, ObjectVisitor* v);

  template <typename ObjectVisitor>
146
  inline void IterateBodyFast(PtrComprCageBase cage_base, ObjectVisitor* v);
147 148 149 150 151 152 153

  template <typename ObjectVisitor>
  inline void IterateBodyFast(Map map, int object_size, ObjectVisitor* v);

  // Returns true if the object contains a tagged value at given offset.
  // It is used for invalid slots filtering. If the offset points outside
  // of the object or to the map word, the result is UNDEFINED (!!!).
154
  V8_EXPORT_PRIVATE bool IsValidSlot(Map map, int offset);
155 156

  // Returns the heap object's size in bytes
157
  DECL_GETTER(Size, int)
158 159 160 161

  // Given a heap object's map pointer, returns the heap size in bytes
  // Useful when the map pointer field is used for other purposes.
  // GC internal.
162
  V8_EXPORT_PRIVATE int SizeFromMap(Map map) const;
163

164
  // Returns the field at offset in obj, as a read/write Object reference.
165 166 167 168 169
  // Does no checking, and is safe to use during GC, while maps are invalid.
  // Does not invoke write barrier, so should only be assigned to
  // during marking GC.
  inline ObjectSlot RawField(int byte_offset) const;
  inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
170
  inline CodeObjectSlot RawCodeField(int byte_offset) const;
171
  inline ExternalPointerSlot RawExternalPointerField(int byte_offset) const;
172

173
  DECL_CAST(HeapObject)
174 175

  // Return the write barrier mode for this. Callers of this function
176
  // must be able to present a reference to an DisallowGarbageCollection
177 178 179 180
  // object as a sign that they are not going to use this function
  // from code that allocates and thus invalidates the returned write
  // barrier mode.
  inline WriteBarrierMode GetWriteBarrierMode(
181
      const DisallowGarbageCollection& promise);
182 183

  // Dispatched behavior.
184
  void HeapObjectShortPrint(std::ostream& os);
185
#ifdef OBJECT_PRINT
186
  void PrintHeader(std::ostream& os, const char* id);
187 188
#endif
  DECL_PRINTER(HeapObject)
189
  EXPORT_DECL_VERIFIER(HeapObject)
190 191 192 193 194 195 196
#ifdef VERIFY_HEAP
  inline void VerifyObjectField(Isolate* isolate, int offset);
  inline void VerifySmiField(int offset);
  inline void VerifyMaybeObjectField(Isolate* isolate, int offset);

  // Verify a pointer is a valid HeapObject pointer that points to object
  // areas in the heap.
197
  static void VerifyHeapPointer(Isolate* isolate, Object p);
198
  static void VerifyCodePointer(Isolate* isolate, Object p);
199 200 201
#endif

  static inline AllocationAlignment RequiredAlignment(Map map);
202
  bool inline CheckRequiredAlignment(PtrComprCageBase cage_base) const;
203 204 205 206

  // Whether the object needs rehashing. That is the case if the object's
  // content depends on FLAG_hash_seed. When the object is deserialized into
  // a heap with a different hash seed, these objects need to adapt.
207
  bool NeedsRehashing(InstanceType instance_type) const;
208
  bool NeedsRehashing(PtrComprCageBase cage_base) const;
209 210 211 212

  // Rehashing support is not implemented for all objects that need rehashing.
  // With objects that need rehashing but cannot be rehashed, rehashing has to
  // be disabled.
213
  bool CanBeRehashed(PtrComprCageBase cage_base) const;
214 215

  // Rehash the object based on the layout inferred from its map.
216 217
  template <typename IsolateT>
  void RehashBasedOnMap(IsolateT* isolate);
218 219

  // Layout description.
220 221 222 223 224 225 226
#define HEAP_OBJECT_FIELDS(V) \
  V(kMapOffset, kTaggedSize)  \
  /* Header size. */          \
  V(kHeaderSize, 0)

  DEFINE_FIELD_OFFSET_CONSTANTS(Object::kHeaderSize, HEAP_OBJECT_FIELDS)
#undef HEAP_OBJECT_FIELDS
227

228
  static_assert(kMapOffset == Internals::kHeapObjectMapOffset);
229

230 231
  using MapField = TaggedField<MapWord, HeapObject::kMapOffset>;

232 233
  inline Address GetFieldAddress(int field_offset) const;

234 235 236 237
 protected:
  // Special-purpose constructor for subclasses that have fast paths where
  // their ptr() is a Smi.
  enum class AllowInlineSmiStorage { kRequireHeapObjectTag, kAllowBeingASmi };
238
  inline HeapObject(Address ptr, AllowInlineSmiStorage allow_smi);
239

240
  OBJECT_CONSTRUCTORS(HeapObject, Object);
241 242 243 244 245 246 247 248 249 250 251 252 253 254

 private:
  enum class VerificationMode {
    kSafeMapTransition,
    kPotentialLayoutChange,
  };

  enum class EmitWriteBarrier {
    kYes,
    kNo,
  };

  template <EmitWriteBarrier emit_write_barrier, typename MemoryOrder>
  V8_INLINE void set_map(Map value, MemoryOrder order, VerificationMode mode);
255 256
};

257 258 259
OBJECT_CONSTRUCTORS_IMPL(HeapObject, Object)
CAST_ACCESSOR(HeapObject)

260 261 262 263 264 265
}  // namespace internal
}  // namespace v8

#include "src/objects/object-macros-undef.h"

#endif  // V8_OBJECTS_HEAP_OBJECT_H_