objects-visiting.h 18.5 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_OBJECTS_VISITING_H_
#define V8_OBJECTS_VISITING_H_
7

8
#include "src/allocation.h"
9
#include "src/layout-descriptor.h"
10

11 12 13 14 15 16 17 18 19 20 21 22
// This file provides base classes and auxiliary methods for defining
// static object visitors used during GC.
// Visiting HeapObject body with a normal ObjectVisitor requires performing
// two switches on object's instance type to determine object size and layout
// and one or more virtual method calls on visitor itself.
// Static visitor is different: it provides a dispatch table which contains
// pointers to specialized visit functions. Each map has the visitor_id
// field which contains an index of specialized visitor to use.

namespace v8 {
namespace internal {

23

24 25 26
// Base class for all static visitors.
class StaticVisitorBase : public AllStatic {
 public:
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
#define VISITOR_ID_LIST(V) \
  V(SeqOneByteString)      \
  V(SeqTwoByteString)      \
  V(ShortcutCandidate)     \
  V(ByteArray)             \
  V(FreeSpace)             \
  V(FixedArray)            \
  V(FixedDoubleArray)      \
  V(FixedTypedArray)       \
  V(FixedFloat64Array)     \
  V(ConstantPoolArray)     \
  V(NativeContext)         \
  V(AllocationSite)        \
  V(DataObject2)           \
  V(DataObject3)           \
  V(DataObject4)           \
  V(DataObject5)           \
  V(DataObject6)           \
  V(DataObject7)           \
  V(DataObject8)           \
  V(DataObject9)           \
  V(DataObjectGeneric)     \
  V(JSObject2)             \
  V(JSObject3)             \
  V(JSObject4)             \
  V(JSObject5)             \
  V(JSObject6)             \
  V(JSObject7)             \
  V(JSObject8)             \
  V(JSObject9)             \
  V(JSObjectGeneric)       \
  V(Struct2)               \
  V(Struct3)               \
  V(Struct4)               \
  V(Struct5)               \
  V(Struct6)               \
  V(Struct7)               \
  V(Struct8)               \
  V(Struct9)               \
  V(StructGeneric)         \
  V(ConsString)            \
  V(SlicedString)          \
  V(Symbol)                \
  V(Oddball)               \
  V(Code)                  \
  V(Map)                   \
  V(Cell)                  \
  V(PropertyCell)          \
ulan@chromium.org's avatar
ulan@chromium.org committed
75
  V(WeakCell)              \
76 77 78 79 80 81
  V(SharedFunctionInfo)    \
  V(JSFunction)            \
  V(JSWeakCollection)      \
  V(JSArrayBuffer)         \
  V(JSTypedArray)          \
  V(JSDataView)            \
82 83 84 85 86 87 88 89 90 91 92
  V(JSRegExp)

  // For data objects, JS objects and structs along with generic visitor which
  // can visit object of any size we provide visitors specialized by
  // object size in words.
  // Ids of specialized visitors are declared in a linear order (without
  // holes) starting from the id of visitor specialized for 2 words objects
  // (base visitor id) and ending with the id of generic visitor.
  // Method GetVisitorIdForSize depends on this ordering to calculate visitor
  // id of specialized visitor from given instance size, base visitor id and
  // generic visitor's id.
93
  enum VisitorId {
94
#define VISITOR_ID_ENUM_DECL(id) kVisit##id,
95 96
    VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
#undef VISITOR_ID_ENUM_DECL
97
    kVisitorIdCount,
98 99 100
    kVisitDataObject = kVisitDataObject2,
    kVisitJSObject = kVisitJSObject2,
    kVisitStruct = kVisitStruct2,
101 102 103
    kMinObjectSizeInWords = 2
  };

104 105 106
  // Visitor ID should fit in one byte.
  STATIC_ASSERT(kVisitorIdCount <= 256);

107 108
  // Determine which specialized visitor should be used for given instance type
  // and instance type.
109 110
  static VisitorId GetVisitorId(int instance_type, int instance_size,
                                bool has_unboxed_fields);
111

112
  // Determine which specialized visitor should be used for given map.
113
  static VisitorId GetVisitorId(Map* map) {
114 115 116
    return GetVisitorId(
        map->instance_type(), map->instance_size(),
        FLAG_unbox_double_fields && !map->HasFastPointerLayout());
117 118 119 120
  }

  // For visitors that allow specialization by size calculate VisitorId based
  // on size, base visitor id and generic visitor id.
121
  static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
122 123
                                       int object_size,
                                       bool has_unboxed_fields) {
124
    DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
125
           (base == kVisitJSObject));
126 127 128
    DCHECK(IsAligned(object_size, kPointerSize));
    DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size);
    DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
129 130 131
    DCHECK(!has_unboxed_fields || (base == kVisitJSObject));

    if (has_unboxed_fields) return generic;
132

133 134 135
    int visitor_id =
        Min(base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords,
            static_cast<int>(generic));
136

137
    return static_cast<VisitorId>(visitor_id);
138 139 140 141
  }
};


142
template <typename Callback>
143 144
class VisitorDispatchTable {
 public:
145 146 147 148 149
  void CopyFrom(VisitorDispatchTable* other) {
    // We are not using memcpy to guarantee that during update
    // every element of callbacks_ array will remain correct
    // pointer (memcpy might be implemented as a byte copying loop).
    for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
150
      base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
151 152 153
    }
  }

154 155 156 157
  inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
    return reinterpret_cast<Callback>(callbacks_[id]);
  }

158
  inline Callback GetVisitor(Map* map) {
159
    return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
160 161 162
  }

  void Register(StaticVisitorBase::VisitorId id, Callback callback) {
163
    DCHECK(id < StaticVisitorBase::kVisitorIdCount);  // id is unsigned.
164
    callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
165 166
  }

167 168
  template <typename Visitor, StaticVisitorBase::VisitorId base,
            StaticVisitorBase::VisitorId generic, int object_size_in_words>
169 170
  void RegisterSpecialization() {
    static const int size = object_size_in_words * kPointerSize;
171
    Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size, false),
172 173 174 175
             &Visitor::template VisitSpecialized<size>);
  }


176 177
  template <typename Visitor, StaticVisitorBase::VisitorId base,
            StaticVisitorBase::VisitorId generic>
178
  void RegisterSpecializations() {
179 180
    STATIC_ASSERT((generic - base + StaticVisitorBase::kMinObjectSizeInWords) ==
                  10);
181 182 183 184 185 186 187 188 189 190 191 192
    RegisterSpecialization<Visitor, base, generic, 2>();
    RegisterSpecialization<Visitor, base, generic, 3>();
    RegisterSpecialization<Visitor, base, generic, 4>();
    RegisterSpecialization<Visitor, base, generic, 5>();
    RegisterSpecialization<Visitor, base, generic, 6>();
    RegisterSpecialization<Visitor, base, generic, 7>();
    RegisterSpecialization<Visitor, base, generic, 8>();
    RegisterSpecialization<Visitor, base, generic, 9>();
    Register(generic, &Visitor::Visit);
  }

 private:
193
  base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
194 195 196
};


197
template <typename StaticVisitor>
198 199
class BodyVisitorBase : public AllStatic {
 public:
200 201
  INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
                                     int start_offset, int end_offset)) {
202
    DCHECK(!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout());
203 204 205 206 207
    IterateRawPointers(heap, object, start_offset, end_offset);
  }

  INLINE(static void IterateBody(Heap* heap, HeapObject* object,
                                 int start_offset, int end_offset)) {
208
    if (!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout()) {
209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229
      IterateRawPointers(heap, object, start_offset, end_offset);
    } else {
      IterateBodyUsingLayoutDescriptor(heap, object, start_offset, end_offset);
    }
  }

 private:
  INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
                                        int start_offset, int end_offset)) {
    StaticVisitor::VisitPointers(heap,
                                 HeapObject::RawField(object, start_offset),
                                 HeapObject::RawField(object, end_offset));
  }

  static void IterateBodyUsingLayoutDescriptor(Heap* heap, HeapObject* object,
                                               int start_offset,
                                               int end_offset) {
    DCHECK(FLAG_unbox_double_fields);
    DCHECK(IsAligned(start_offset, kPointerSize) &&
           IsAligned(end_offset, kPointerSize));

230
    LayoutDescriptorHelper helper(object->map());
231
    DCHECK(!helper.all_fields_tagged());
232 233 234 235
    for (int offset = start_offset; offset < end_offset;) {
      int end_of_region_offset;
      if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
        IterateRawPointers(heap, object, offset, end_of_region_offset);
236
      }
237
      offset = end_of_region_offset;
238
    }
239 240 241 242
  }
};


243
template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
244 245
class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
 public:
246
  INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
247
    int object_size = BodyDescriptor::SizeOf(map, object);
248
    BodyVisitorBase<StaticVisitor>::IterateBody(
249
        map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
250 251 252
    return static_cast<ReturnType>(object_size);
  }

253
  template <int object_size>
254
  static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
255
    DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
256
    BodyVisitorBase<StaticVisitor>::IteratePointers(
257
        map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
258 259 260 261 262
    return static_cast<ReturnType>(object_size);
  }
};


263
template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
264 265
class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
 public:
266
  INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
267 268 269
    BodyVisitorBase<StaticVisitor>::IterateBody(map->GetHeap(), object,
                                                BodyDescriptor::kStartOffset,
                                                BodyDescriptor::kEndOffset);
270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290
    return static_cast<ReturnType>(BodyDescriptor::kSize);
  }
};


// Base class for visitors used for a linear new space iteration.
// IterateBody returns size of visited object.
// Certain types of objects (i.e. Code objects) are not handled
// by dispatch table of this visitor because they cannot appear
// in the new space.
//
// This class is intended to be used in the following way:
//
//   class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
//     ...
//   }
//
// This is an example of Curiously recurring template pattern
// (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
// We use CRTP to guarantee aggressive compile time optimizations (i.e.
// inlining and specialization of StaticVisitor::VisitPointers methods).
291
template <typename StaticVisitor>
292 293
class StaticNewSpaceVisitor : public StaticVisitorBase {
 public:
294
  static void Initialize();
295

296
  INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
297 298 299
    return table_.GetVisitor(map)(map, obj);
  }

300
  INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
301
    for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
302 303 304
  }

 private:
305
  INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
306 307
    Heap* heap = map->GetHeap();
    VisitPointers(heap,
308
                  HeapObject::RawField(object, JSFunction::kPropertiesOffset),
309 310 311 312 313
                  HeapObject::RawField(object, JSFunction::kCodeEntryOffset));

    // Don't visit code entry. We are using this visitor only during scavenges.

    VisitPointers(
314 315 316
        heap, HeapObject::RawField(object,
                                   JSFunction::kCodeEntryOffset + kPointerSize),
        HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
317 318 319
    return JSFunction::kSize;
  }

320
  INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
321 322 323
    return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
  }

324
  INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
325 326 327 328
    int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
    return FixedDoubleArray::SizeFor(length);
  }

329 330 331 332
  INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
    return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
  }

333
  INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
334 335 336
    return JSObjectVisitor::Visit(map, object);
  }

337
  INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
338 339
    return SeqOneByteString::cast(object)
        ->SeqOneByteStringSize(map->instance_type());
340 341
  }

342
  INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
343 344
    return SeqTwoByteString::cast(object)
        ->SeqTwoByteStringSize(map->instance_type());
345 346
  }

347
  INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
348 349 350
    return FreeSpace::cast(object)->Size();
  }

351 352
  INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
  INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
353
  INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
354

355 356
  class DataObjectVisitor {
   public:
357
    template <int object_size>
358 359 360 361
    static inline int VisitSpecialized(Map* map, HeapObject* object) {
      return object_size;
    }

362
    INLINE(static int Visit(Map* map, HeapObject* object)) {
363 364 365 366
      return map->instance_size();
    }
  };

367 368
  typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
      StructVisitor;
369

370 371
  typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
      JSObjectVisitor;
372 373 374 375 376 377 378

  typedef int (*Callback)(Map* map, HeapObject* object);

  static VisitorDispatchTable<Callback> table_;
};


379
template <typename StaticVisitor>
380
VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397
    StaticNewSpaceVisitor<StaticVisitor>::table_;


// Base class for visitors used to transitively mark the entire heap.
// IterateBody returns nothing.
// Certain types of objects might not be handled by this base class and
// no visitor function is registered by the generic initialization. A
// specialized visitor function needs to be provided by the inheriting
// class itself for those cases.
//
// This class is intended to be used in the following way:
//
//   class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
//     ...
//   }
//
// This is an example of Curiously recurring template pattern.
398
template <typename StaticVisitor>
399 400 401 402
class StaticMarkingVisitor : public StaticVisitorBase {
 public:
  static void Initialize();

403
  INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
404 405 406
    table_.GetVisitor(map)(map, obj);
  }

407
  INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
ulan@chromium.org's avatar
ulan@chromium.org committed
408
  INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
409 410
  INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
  INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
411
  INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
412 413 414
  INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
  INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
  INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
415 416
  INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
  INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
417
  // Skip the weak next code link in a code object.
418
  INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
419

420 421 422 423 424
  // TODO(mstarzinger): This should be made protected once refactoring is done.
  // Mark non-optimize code for functions inlined into the given optimized
  // code. This will prevent it from being flushed.
  static void MarkInlinedFunctionsCode(Heap* heap, Code* code);

425
 protected:
426 427 428
  INLINE(static void VisitMap(Map* map, HeapObject* object));
  INLINE(static void VisitCode(Map* map, HeapObject* object));
  INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
429
  INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object));
430 431
  INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
  INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
432 433
  INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
  INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
434 435
  INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
  INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
436
  INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
437
  INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
438

439 440 441 442 443
  // Mark pointers in a Map and its TransitionArray together, possibly
  // treating transitions or back pointers weak.
  static void MarkMapContents(Heap* heap, Map* map);
  static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);

444
  // Code flushing support.
445 446
  INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
  INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
447 448 449 450 451 452 453 454

  // Helpers used by code flushing support that visit pointer fields and treat
  // references to code objects either strongly or weakly.
  static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
  static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
  static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
  static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);

455 456
  class DataObjectVisitor {
   public:
457 458
    template <int size>
    static inline void VisitSpecialized(Map* map, HeapObject* object) {}
459

460
    INLINE(static void Visit(Map* map, HeapObject* object)) {}
461 462
  };

463 464
  typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
      FixedArrayVisitor;
465

466 467
  typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
      JSObjectVisitor;
468

469 470
  typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
      StructObjectVisitor;
471 472 473 474 475 476 477

  typedef void (*Callback)(Map* map, HeapObject* object);

  static VisitorDispatchTable<Callback> table_;
};


478
template <typename StaticVisitor>
479 480
VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
    StaticMarkingVisitor<StaticVisitor>::table_;
481 482


483 484 485 486 487 488 489 490 491
class WeakObjectRetainer;


// A weak list is single linked list where each element has a weak pointer to
// the next element. Given the head of the list, this function removes dead
// elements from the list and if requested records slots for next-element
// pointers. The template parameter T is a WeakListVisitor that defines how to
// access the next-element pointers.
template <class T>
492
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
493 494
}
}  // namespace v8::internal
495

496
#endif  // V8_OBJECTS_VISITING_H_