mark-compact-inl.h 26.9 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_HEAP_MARK_COMPACT_INL_H_
#define V8_HEAP_MARK_COMPACT_INL_H_
7

8 9
#include "src/heap/mark-compact.h"

10
#include "src/assembler-inl.h"
11
#include "src/base/bits.h"
12 13
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h"
14
#include "src/heap/objects-visiting-inl.h"
15
#include "src/heap/remembered-set.h"
16
#include "src/objects/js-collection-inl.h"
17
#include "src/objects/js-weak-refs-inl.h"
18
#include "src/objects/slots-inl.h"
19
#include "src/transitions.h"
20 21 22 23

namespace v8 {
namespace internal {

24
template <typename ConcreteState, AccessMode access_mode>
25
bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(HeapObject obj) {
26
  MemoryChunk* p = MemoryChunk::FromHeapObject(obj);
27 28 29 30 31 32 33
  MarkBit markbit = MarkBitFrom(p, obj->address());
  if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
  static_cast<ConcreteState*>(this)->IncrementLiveBytes(p, obj->Size());
  return true;
}

template <typename ConcreteState, AccessMode access_mode>
34
bool MarkingStateBase<ConcreteState, access_mode>::WhiteToGrey(HeapObject obj) {
35 36 37 38 39
  return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj));
}

template <typename ConcreteState, AccessMode access_mode>
bool MarkingStateBase<ConcreteState, access_mode>::WhiteToBlack(
40
    HeapObject obj) {
41 42 43
  return WhiteToGrey(obj) && GreyToBlack(obj);
}

44
template <FixedArrayVisitationMode fixed_array_mode,
45 46 47 48 49 50
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
MarkingVisitor<fixed_array_mode, retaining_path_mode,
               MarkingState>::MarkingVisitor(MarkCompactCollector* collector,
                                             MarkingState* marking_state)
    : heap_(collector->heap()),
      collector_(collector),
51 52
      marking_state_(marking_state),
      mark_compact_epoch_(collector->epoch()) {}
53 54

template <FixedArrayVisitationMode fixed_array_mode,
55 56
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
57
                   MarkingState>::VisitBytecodeArray(Map map,
58
                                                     BytecodeArray array) {
59
  int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
60
  BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
61 62 63 64

  if (!heap_->is_current_gc_forced()) {
    array->MakeOlder();
  }
65 66 67
  return size;
}

68 69 70 71 72 73 74 75 76 77 78 79
template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
                   MarkingState>::VisitDescriptorArray(Map map,
                                                       DescriptorArray array) {
  int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
  VisitPointers(array, array->GetFirstPointerSlot(),
                array->GetDescriptorSlot(0));
  VisitDescriptors(array, array->number_of_descriptors());
  return size;
}

80 81 82 83 84 85 86 87 88
template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
    VisitSharedFunctionInfo(Map map, SharedFunctionInfo shared_info) {
  int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
  SharedFunctionInfo::BodyDescriptor::IterateBody(map, shared_info, size, this);

  // If the SharedFunctionInfo has old bytecode, mark it as flushable,
  // otherwise visit the function data field strongly.
89
  if (shared_info->ShouldFlushBytecode(Heap::GetBytecodeFlushMode())) {
90 91 92
    collector_->AddBytecodeFlushingCandidate(shared_info);
  } else {
    VisitPointer(shared_info,
93
                 shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
94 95 96 97
  }
  return size;
}

98 99 100 101 102 103 104 105 106 107 108 109 110
template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
                   MarkingState>::VisitJSFunction(Map map, JSFunction object) {
  int size = Parent::VisitJSFunction(map, object);

  // Check if the JSFunction needs reset due to bytecode being flushed.
  if (FLAG_flush_bytecode && object->NeedsResetDueToFlushedBytecode()) {
    collector_->AddFlushedJSFunction(object);
  }

  return size;
}
111
template <FixedArrayVisitationMode fixed_array_mode,
112 113
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
114
                   MarkingState>::VisitFixedArray(Map map, FixedArray object) {
115 116 117 118 119 120
  return (fixed_array_mode == FixedArrayVisitationMode::kRegular)
             ? Parent::VisitFixedArray(map, object)
             : VisitFixedArrayIncremental(map, object);
}

template <FixedArrayVisitationMode fixed_array_mode,
121
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
122 123 124
template <typename T>
V8_INLINE int
MarkingVisitor<fixed_array_mode, retaining_path_mode,
125
               MarkingState>::VisitEmbedderTracingSubclass(Map map, T object) {
126
  if (heap_->local_embedder_heap_tracer()->InUse()) {
127 128
    marking_worklist()->embedder()->Push(MarkCompactCollectorBase::kMainThread,
                                         object);
129
  }
130 131
  int size = T::BodyDescriptor::SizeOf(map, object);
  T::BodyDescriptor::IterateBody(map, object, size, this);
132 133 134
  return size;
}

135 136 137
template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
138
                   MarkingState>::VisitJSApiObject(Map map, JSObject object) {
139 140 141 142 143 144
  return VisitEmbedderTracingSubclass(map, object);
}

template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
145
                   MarkingState>::VisitJSArrayBuffer(Map map,
146
                                                     JSArrayBuffer object) {
147 148 149 150 151 152
  return VisitEmbedderTracingSubclass(map, object);
}

template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
153
                   MarkingState>::VisitJSDataView(Map map, JSDataView object) {
154 155 156 157 158 159
  return VisitEmbedderTracingSubclass(map, object);
}

template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
160
                   MarkingState>::VisitJSTypedArray(Map map,
161
                                                    JSTypedArray object) {
162 163 164
  return VisitEmbedderTracingSubclass(map, object);
}

165 166 167
template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
168
    VisitEphemeronHashTable(Map map, EphemeronHashTable table) {
169 170
  collector_->AddEphemeronHashTable(table);

171
  for (int i = 0; i < table->Capacity(); i++) {
172
    ObjectSlot key_slot =
173
        table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
174
    HeapObject key = HeapObject::cast(table->KeyAt(i));
175 176
    collector_->RecordSlot(table, key_slot, key);

177
    ObjectSlot value_slot =
178 179 180 181 182 183
        table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));

    if (marking_state()->IsBlackOrGrey(key)) {
      VisitPointer(table, value_slot);

    } else {
184
      Object value_obj = *value_slot;
185 186

      if (value_obj->IsHeapObject()) {
187
        HeapObject value = HeapObject::cast(value_obj);
188 189 190 191 192 193
        collector_->RecordSlot(table, value_slot, value);

        // Revisit ephemerons with both key and value unreachable at end
        // of concurrent marking cycle.
        if (marking_state()->IsWhite(value)) {
          collector_->AddEphemeron(key, value);
194
        }
195
      }
196 197 198 199
    }
  }

  return table->SizeFromMap(map);
200 201
}

202
template <FixedArrayVisitationMode fixed_array_mode,
203 204
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
205 206 207
                   MarkingState>::VisitMap(Map meta_map, Map map) {
  int size = Map::BodyDescriptor::SizeOf(meta_map, map);
  if (map->CanTransition()) {
208 209
    // Maps that can transition share their descriptor arrays and require
    // special visiting logic to avoid memory leaks.
210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225
    // Since descriptor arrays are potentially shared, ensure that only the
    // descriptors that belong to this map are marked. The first time a
    // non-empty descriptor array is marked, its header is also visited. The
    // slot holding the descriptor array will be implicitly recorded when the
    // pointer fields of this map are visited.
    DescriptorArray descriptors = map->instance_descriptors();
    MarkDescriptorArrayBlack(map, descriptors);
    int number_of_own_descriptors = map->NumberOfOwnDescriptors();
    if (number_of_own_descriptors) {
      DCHECK_LE(number_of_own_descriptors,
                descriptors->number_of_descriptors());
      VisitDescriptors(descriptors, number_of_own_descriptors);
    }
    // Mark the pointer fields of the Map. Since the transitions array has
    // been marked already, it is fine that one of these fields contains a
    // pointer to it.
226
  }
227
  Map::BodyDescriptor::IterateBody(meta_map, map, size, this);
228
  return size;
229 230 231
}

template <FixedArrayVisitationMode fixed_array_mode,
232 233
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
234
                   MarkingState>::VisitTransitionArray(Map map,
235
                                                       TransitionArray array) {
236
  int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
237
  TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
238 239 240 241
  collector_->AddTransitionArray(array);
  return size;
}

242 243 244 245 246
template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
                   MarkingState>::VisitJSWeakRef(Map map, JSWeakRef weak_ref) {
  if (weak_ref->target()->IsHeapObject()) {
247
    HeapObject target = HeapObject::cast(weak_ref->target());
248 249 250
    if (marking_state()->IsBlackOrGrey(target)) {
      // Record the slot inside the JSWeakRef, since the IterateBody below
      // won't visit it.
251
      ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
252 253 254 255 256 257 258 259 260 261 262 263
      collector_->RecordSlot(weak_ref, slot, target);
    } else {
      // JSWeakRef points to a potentially dead object. We have to process
      // them when we know the liveness of the whole transitive closure.
      collector_->AddWeakRef(weak_ref);
    }
  }
  int size = JSWeakRef::BodyDescriptor::SizeOf(map, weak_ref);
  JSWeakRef::BodyDescriptor::IterateBody(map, weak_ref, size, this);
  return size;
}

264 265 266
template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
267
                   MarkingState>::VisitWeakCell(Map map, WeakCell weak_cell) {
268
  if (weak_cell->target()->IsHeapObject()) {
269
    HeapObject target = HeapObject::cast(weak_cell->target());
270
    if (marking_state()->IsBlackOrGrey(target)) {
271
      // Record the slot inside the WeakCell, since the IterateBody below
272
      // won't visit it.
273
      ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
274 275
      collector_->RecordSlot(weak_cell, slot, target);
    } else {
276
      // WeakCell points to a potentially dead object. We have to process
277 278 279 280
      // them when we know the liveness of the whole transitive closure.
      collector_->AddWeakCell(weak_cell);
    }
  }
281 282
  int size = WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
  WeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
283 284 285
  return size;
}

286
// class template arguments
287
template <FixedArrayVisitationMode fixed_array_mode,
288
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
289 290
// method template arguments
template <typename TSlot>
291
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
292
                    MarkingState>::VisitPointerImpl(HeapObject host,
293
                                                    TSlot slot) {
294 295 296
  static_assert(std::is_same<TSlot, ObjectSlot>::value ||
                    std::is_same<TSlot, MaybeObjectSlot>::value,
                "Only ObjectSlot and MaybeObjectSlot are expected here");
297
  typename TSlot::TObject object = *slot;
298
  HeapObject target_object;
299 300
  if (object.GetHeapObjectIfStrong(&target_object)) {
    collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
301
    MarkObject(host, target_object);
302
  } else if (TSlot::kCanBeWeak && object.GetHeapObjectIfWeak(&target_object)) {
303 304 305
    if (marking_state()->IsBlackOrGrey(target_object)) {
      // Weak references with live values are directly processed here to reduce
      // the processing time of weak cells during the main GC pause.
306
      collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
307 308 309 310
    } else {
      // If we do not know about liveness of values of weak cells, we have to
      // process them when we know the liveness of the whole transitive
      // closure.
311
      collector_->AddWeakReference(host, HeapObjectSlot(slot));
312 313 314 315
    }
  }
}

316
// class template arguments
317
template <FixedArrayVisitationMode fixed_array_mode,
318
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
319 320
// method template arguments
template <typename TSlot>
321
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
322
                    MarkingState>::VisitPointersImpl(HeapObject host,
323 324
                                                     TSlot start, TSlot end) {
  for (TSlot p = start; p < end; ++p) {
325 326 327 328
    VisitPointer(host, p);
  }
}

329
template <FixedArrayVisitationMode fixed_array_mode,
330 331
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
332
                    MarkingState>::VisitEmbeddedPointer(Code host,
333
                                                        RelocInfo* rinfo) {
334
  DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
335
  HeapObject object = HeapObject::cast(rinfo->target_object());
336
  collector_->RecordRelocSlot(host, rinfo, object);
337 338 339 340 341 342
  if (!marking_state()->IsBlackOrGrey(object)) {
    if (host->IsWeakObject(object)) {
      collector_->AddWeakObjectInCode(object, host);
    } else {
      MarkObject(host, object);
    }
343 344 345 346
  }
}

template <FixedArrayVisitationMode fixed_array_mode,
347 348
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
349
                    MarkingState>::VisitCodeTarget(Code host,
350
                                                   RelocInfo* rinfo) {
351
  DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
352
  Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
353 354 355 356 357
  collector_->RecordRelocSlot(host, rinfo, target);
  MarkObject(host, target);
}

template <FixedArrayVisitationMode fixed_array_mode,
358
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
359 360 361 362 363 364
void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
    MarkDescriptorArrayBlack(HeapObject host, DescriptorArray descriptors) {
  // Note that WhiteToBlack is not sufficient here because it fails if the
  // descriptor array is grey. So we need to do two steps: WhiteToGrey and
  // GreyToBlack. Alternatively, we could check WhiteToGrey || WhiteToBlack.
  if (marking_state()->WhiteToGrey(descriptors)) {
365 366
    if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
        V8_UNLIKELY(FLAG_track_retaining_path)) {
367
      heap_->AddRetainer(host, descriptors);
368 369
    }
  }
370 371 372 373 374
  if (marking_state()->GreyToBlack(descriptors)) {
    VisitPointers(descriptors, descriptors->GetFirstPointerSlot(),
                  descriptors->GetDescriptorSlot(0));
  }
  DCHECK(marking_state()->IsBlack(descriptors));
375 376 377
}

template <FixedArrayVisitationMode fixed_array_mode,
378 379
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
380 381
                    MarkingState>::MarkObject(HeapObject host,
                                              HeapObject object) {
382
  if (marking_state()->WhiteToGrey(object)) {
383
    marking_worklist()->Push(object);
384 385 386 387 388 389 390 391
    if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
        V8_UNLIKELY(FLAG_track_retaining_path)) {
      heap_->AddRetainer(host, object);
    }
  }
}

template <FixedArrayVisitationMode fixed_array_mode,
392 393
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
394 395
    VisitFixedArrayIncremental(Map map, FixedArray object) {
  MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
396
  int size = FixedArray::BodyDescriptor::SizeOf(map, object);
397
  if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
398 399
    DCHECK(FLAG_use_marking_progress_bar);
    DCHECK(heap_->IsLargeObject(object));
400 401 402 403 404 405 406 407 408 409 410
    size_t current_progress_bar = chunk->ProgressBar();
    if (current_progress_bar == 0) {
      // Try to move the progress bar forward to start offset. This solves the
      // problem of not being able to observe a progress bar reset when
      // processing the first kProgressBarScanningChunk.
      if (!chunk->TrySetProgressBar(0,
                                    FixedArray::BodyDescriptor::kStartOffset))
        return 0;
      current_progress_bar = FixedArray::BodyDescriptor::kStartOffset;
    }
    int start = static_cast<int>(current_progress_bar);
411 412
    int end = Min(size, start + kProgressBarScanningChunk);
    if (start < end) {
413
      VisitPointers(object, object.RawField(start), object.RawField(end));
414 415 416 417 418
      // Setting the progress bar can fail if the object that is currently
      // scanned is also revisited. In this case, there may be two tasks racing
      // on the progress counter. The looser can bail out because the progress
      // bar is reset before the tasks race on the object.
      if (chunk->TrySetProgressBar(current_progress_bar, end) && (end < size)) {
419 420 421
        DCHECK(marking_state()->IsBlack(object));
        // The object can be pushed back onto the marking worklist only after
        // progress bar was updated.
422 423 424
        marking_worklist()->Push(object);
      }
    }
425
    return end - start;
426
  }
427 428 429

  // Non-batched processing.
  FixedArray::BodyDescriptor::IterateBody(map, object, size, this);
430
  return size;
431 432
}

433 434 435
template <FixedArrayVisitationMode fixed_array_mode,
          TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
436
    VisitDescriptors(DescriptorArray descriptors,
437
                     int number_of_own_descriptors) {
438 439 440
  // Updating the number of marked descriptor is supported only for black
  // descriptor arrays.
  DCHECK(marking_state()->IsBlack(descriptors));
441
  int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
442
  int16_t old_marked = descriptors->UpdateNumberOfMarkedDescriptors(
443 444
      mark_compact_epoch_, new_marked);
  if (old_marked < new_marked) {
445 446 447
    VisitPointers(descriptors,
                  MaybeObjectSlot(descriptors->GetDescriptorSlot(old_marked)),
                  MaybeObjectSlot(descriptors->GetDescriptorSlot(new_marked)));
448 449 450
  }
}

451
void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) {
452
  if (marking_state()->WhiteToGrey(obj)) {
453
    marking_worklist()->Push(obj);
454 455 456 457 458 459
    if (V8_UNLIKELY(FLAG_track_retaining_path)) {
      heap_->AddRetainer(host, obj);
    }
  }
}

460
void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
461
  if (marking_state()->WhiteToGrey(obj)) {
462
    marking_worklist()->Push(obj);
463 464 465 466 467 468
    if (V8_UNLIKELY(FLAG_track_retaining_path)) {
      heap_->AddRetainingRoot(root, obj);
    }
  }
}

469 470
#ifdef ENABLE_MINOR_MC

471
void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
472 473
  if (Heap::InYoungGeneration(obj) &&
      non_atomic_marking_state_.WhiteToGrey(obj)) {
474 475 476 477 478 479
    worklist_->Push(kMainThread, obj);
  }
}

#endif

480
void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject obj) {
481
  if (marking_state()->WhiteToGrey(obj)) {
482
    marking_worklist()->Push(obj);
483 484 485
    if (V8_UNLIKELY(FLAG_track_retaining_path)) {
      heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
    }
486 487 488
  }
}

489 490
void MarkCompactCollector::RecordSlot(HeapObject object, ObjectSlot slot,
                                      HeapObject target) {
491
  RecordSlot(object, HeapObjectSlot(slot), target);
492 493
}

494 495 496 497
void MarkCompactCollector::RecordSlot(HeapObject object, HeapObjectSlot slot,
                                      HeapObject target) {
  Page* target_page = Page::FromHeapObject(target);
  Page* source_page = Page::FromHeapObject(object);
498 499
  if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
      !source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
500
    RememberedSet<OLD_TO_OLD>::Insert(source_page, slot.address());
501 502 503
  }
}

504 505 506 507
void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
  weak_objects_.transition_arrays.Push(kMainThread, array);
}

508 509 510 511 512
void MarkCompactCollector::AddBytecodeFlushingCandidate(
    SharedFunctionInfo flush_candidate) {
  weak_objects_.bytecode_flushing_candidates.Push(kMainThread, flush_candidate);
}

513 514 515 516
void MarkCompactCollector::AddFlushedJSFunction(JSFunction flushed_function) {
  weak_objects_.flushed_js_functions.Push(kMainThread, flushed_function);
}

517
template <LiveObjectIterationMode mode>
518 519
LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
                                          Address start)
520
    : chunk_(chunk),
521 522 523 524 525
      one_word_filler_map_(
          ReadOnlyRoots(chunk->heap()).one_pointer_filler_map()),
      two_word_filler_map_(
          ReadOnlyRoots(chunk->heap()).two_pointer_filler_map()),
      free_space_map_(ReadOnlyRoots(chunk->heap()).free_space_map()),
526
      it_(chunk, bitmap) {
527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552
  it_.Advance(Bitmap::IndexToCell(
      Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));
  if (!it_.Done()) {
    cell_base_ = it_.CurrentCellBase();
    current_cell_ = *it_.CurrentCell();
    AdvanceToNextValidObject();
  }
}

template <LiveObjectIterationMode mode>
typename LiveObjectRange<mode>::iterator& LiveObjectRange<mode>::iterator::
operator++() {
  AdvanceToNextValidObject();
  return *this;
}

template <LiveObjectIterationMode mode>
typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::iterator::
operator++(int) {
  iterator retval = *this;
  ++(*this);
  return retval;
}

template <LiveObjectIterationMode mode>
void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
553
  while (!it_.Done()) {
554
    HeapObject object;
555
    int size = 0;
556
    while (current_cell_ != 0) {
557
      uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
558
      Address addr = cell_base_ + trailing_zeros * kTaggedSize;
559 560 561 562

      // Clear the first bit of the found object..
      current_cell_ &= ~(1u << trailing_zeros);

563
      uint32_t second_bit_index = 0;
564
      if (trailing_zeros >= Bitmap::kBitIndexMask) {
565 566 567
        second_bit_index = 0x1;
        // The overlapping case; there has to exist a cell after the current
        // cell.
568 569 570
        // However, if there is a black area at the end of the page, and the
        // last word is a one word filler, we are not allowed to advance. In
        // that case we can return immediately.
571
        if (!it_.Advance()) {
572
          DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
573
          current_object_ = HeapObject();
574
          return;
575
        }
576 577
        cell_base_ = it_.CurrentCellBase();
        current_cell_ = *it_.CurrentCell();
578 579
      } else {
        second_bit_index = 1u << (trailing_zeros + 1);
580
      }
581

582
      Map map;
583 584 585 586
      if (current_cell_ & second_bit_index) {
        // We found a black object. If the black object is within a black area,
        // make sure that we skip all set bits in the black area until the
        // object ends.
587
        HeapObject black_object = HeapObject::FromAddress(addr);
588
        map = Map::cast(ObjectSlot(addr).Acquire_Load());
589
        size = black_object->SizeFromMap(map);
590
        Address end = addr + size - kTaggedSize;
591 592 593
        // One word filler objects do not borrow the second mark bit. We have
        // to jump over the advancing and clearing part.
        // Note that we know that we are at a one word filler when
594
        // object_start + object_size - kTaggedSize == object_start.
595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610
        if (addr != end) {
          DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
          uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
          unsigned int end_cell_index =
              end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
          MarkBit::CellType end_index_mask =
              1u << Bitmap::IndexInCell(end_mark_bit_index);
          if (it_.Advance(end_cell_index)) {
            cell_base_ = it_.CurrentCellBase();
            current_cell_ = *it_.CurrentCell();
          }

          // Clear all bits in current_cell, including the end index.
          current_cell_ &= ~(end_index_mask + end_index_mask - 1);
        }

611
        if (mode == kBlackObjects || mode == kAllLiveObjects) {
612 613
          object = black_object;
        }
614
      } else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
615
        map = Map::cast(ObjectSlot(addr).Acquire_Load());
616
        object = HeapObject::FromAddress(addr);
617
        size = object->SizeFromMap(map);
618
      }
619

620
      // We found a live object.
621
      if (!object.is_null()) {
622 623 624
        // Do not use IsFiller() here. This may cause a data race for reading
        // out the instance type when a new map concurrently is written into
        // this object while iterating over the object.
625 626
        if (map == one_word_filler_map_ || map == two_word_filler_map_ ||
            map == free_space_map_) {
627 628 629 630 631 632
          // There are two reasons why we can get black or grey fillers:
          // 1) Black areas together with slack tracking may result in black one
          // word filler objects.
          // 2) Left trimming may leave black or grey fillers behind because we
          // do not clear the old location of the object start.
          // We filter these objects out in the iterator.
633
          object = HeapObject();
634 635 636 637
        } else {
          break;
        }
      }
638
    }
639

640
    if (current_cell_ == 0) {
641
      if (it_.Advance()) {
642 643 644 645
        cell_base_ = it_.CurrentCellBase();
        current_cell_ = *it_.CurrentCell();
      }
    }
646
    if (!object.is_null()) {
647 648 649 650
      current_object_ = object;
      current_size_ = size;
      return;
    }
651
  }
652
  current_object_ = HeapObject();
653 654 655 656
}

template <LiveObjectIterationMode mode>
typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::begin() {
657
  return iterator(chunk_, bitmap_, start_);
658 659 660 661
}

template <LiveObjectIterationMode mode>
typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
662
  return iterator(chunk_, bitmap_, end_);
663 664
}

665 666
Isolate* MarkCompactCollectorBase::isolate() { return heap()->isolate(); }

667 668
}  // namespace internal
}  // namespace v8
669

670
#endif  // V8_HEAP_MARK_COMPACT_INL_H_