marking-state.h 19 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_HEAP_CPPGC_MARKING_STATE_H_
#define V8_HEAP_CPPGC_MARKING_STATE_H_

8 9
#include <algorithm>

10
#include "include/cppgc/trace-trait.h"
11
#include "include/cppgc/visitor.h"
12
#include "src/base/logging.h"
13
#include "src/heap/cppgc/compaction-worklists.h"
14 15 16 17
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/liveness-broker.h"
18
#include "src/heap/cppgc/marking-worklists.h"
19 20 21 22 23

namespace cppgc {
namespace internal {

// C++ marking implementation.
Omer Katz's avatar
Omer Katz committed
24
class MarkingStateBase {
25
 public:
26
  inline MarkingStateBase(HeapBase&, MarkingWorklists&);
27

Omer Katz's avatar
Omer Katz committed
28 29
  MarkingStateBase(const MarkingStateBase&) = delete;
  MarkingStateBase& operator=(const MarkingStateBase&) = delete;
30 31 32 33

  inline void MarkAndPush(const void*, TraceDescriptor);
  inline void MarkAndPush(HeapObjectHeader&);

34 35
  inline void PushMarked(HeapObjectHeader&, TraceDescriptor desc);

36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
  void Publish() { marking_worklist_.Publish(); }

  MarkingWorklists::MarkingWorklist::Local& marking_worklist() {
    return marking_worklist_;
  }
  MarkingWorklists::NotFullyConstructedWorklist&
  not_fully_constructed_worklist() {
    return not_fully_constructed_worklist_;
  }

 protected:
  inline void MarkAndPush(HeapObjectHeader&, TraceDescriptor);

  inline bool MarkNoPush(HeapObjectHeader&);

  HeapBase& heap_;

  MarkingWorklists::MarkingWorklist::Local marking_worklist_;
  MarkingWorklists::NotFullyConstructedWorklist&
      not_fully_constructed_worklist_;
};

MarkingStateBase::MarkingStateBase(HeapBase& heap,
                                   MarkingWorklists& marking_worklists)
    : heap_(heap),
      marking_worklist_(marking_worklists.marking_worklist()),
      not_fully_constructed_worklist_(
          *marking_worklists.not_fully_constructed_worklist()) {}

void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) {
  DCHECK_NOT_NULL(object);
  MarkAndPush(
      HeapObjectHeader::FromObject(const_cast<void*>(desc.base_object_payload)),
      desc);
}

void MarkingStateBase::MarkAndPush(HeapObjectHeader& header,
                                   TraceDescriptor desc) {
  DCHECK_NOT_NULL(desc.callback);

  if (header.IsInConstruction<AccessMode::kAtomic>()) {
    not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header);
  } else if (MarkNoPush(header)) {
    PushMarked(header, desc);
  }
}

bool MarkingStateBase::MarkNoPush(HeapObjectHeader& header) {
  // A GC should only mark the objects that belong in its heap.
  DCHECK_EQ(&heap_, &BasePage::FromPayload(&header)->heap());
  // Never mark free space objects. This would e.g. hint to marking a promptly
  // freed backing store.
  DCHECK(!header.IsFree<AccessMode::kAtomic>());
  return header.TryMarkAtomic();
}

void MarkingStateBase::MarkAndPush(HeapObjectHeader& header) {
  MarkAndPush(
      header,
      {header.ObjectStart(),
       GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}

void MarkingStateBase::PushMarked(HeapObjectHeader& header,
                                  TraceDescriptor desc) {
  DCHECK(header.IsMarked<AccessMode::kAtomic>());
  DCHECK(!header.IsInConstruction<AccessMode::kAtomic>());
  DCHECK_NOT_NULL(desc.callback);

  marking_worklist_.Push(desc);
}

class BasicMarkingState : public MarkingStateBase {
 public:
  inline BasicMarkingState(HeapBase& heap, MarkingWorklists&,
                           CompactionWorklists*);

  BasicMarkingState(const BasicMarkingState&) = delete;
  BasicMarkingState& operator=(const BasicMarkingState&) = delete;

116 117 118 119
  inline void RegisterWeakReferenceIfNeeded(const void*, TraceDescriptor,
                                            WeakCallback, const void*);
  inline void RegisterWeakCallback(WeakCallback, const void*);

120 121 122 123 124
  void RegisterMovableReference(const void** slot) {
    if (!movable_slots_worklist_) return;
    movable_slots_worklist_->Push(slot);
  }

125 126 127 128 129 130 131
  // Weak containers are special in that they may require re-tracing if
  // reachable through stack, even if the container was already traced before.
  // ProcessWeakContainer records which weak containers were already marked so
  // that conservative stack scanning knows to retrace them.
  inline void ProcessWeakContainer(const void*, TraceDescriptor, WeakCallback,
                                   const void*);

132 133
  inline void ProcessEphemeron(const void*, const void*, TraceDescriptor,
                               Visitor&);
134

135
  inline void AccountMarkedBytes(const HeapObjectHeader&);
136
  inline void AccountMarkedBytes(size_t);
137 138
  size_t marked_bytes() const { return marked_bytes_; }

139
  void Publish() {
140
    MarkingStateBase::Publish();
141 142 143
    previously_not_fully_constructed_worklist_.Publish();
    weak_callback_worklist_.Publish();
    write_barrier_worklist_.Publish();
144
    concurrent_marking_bailout_worklist_.Publish();
145 146
    discovered_ephemeron_pairs_worklist_.Publish();
    ephemeron_pairs_for_processing_worklist_.Publish();
147
    if (IsCompactionEnabled()) movable_slots_worklist_->Publish();
148 149
  }

150
  MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local&
151 152 153 154 155 156 157 158 159
  previously_not_fully_constructed_worklist() {
    return previously_not_fully_constructed_worklist_;
  }
  MarkingWorklists::WeakCallbackWorklist::Local& weak_callback_worklist() {
    return weak_callback_worklist_;
  }
  MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist() {
    return write_barrier_worklist_;
  }
160 161 162 163
  MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local&
  concurrent_marking_bailout_worklist() {
    return concurrent_marking_bailout_worklist_;
  }
164 165 166 167 168 169 170 171
  MarkingWorklists::EphemeronPairsWorklist::Local&
  discovered_ephemeron_pairs_worklist() {
    return discovered_ephemeron_pairs_worklist_;
  }
  MarkingWorklists::EphemeronPairsWorklist::Local&
  ephemeron_pairs_for_processing_worklist() {
    return ephemeron_pairs_for_processing_worklist_;
  }
172 173 174
  MarkingWorklists::WeakContainersWorklist& weak_containers_worklist() {
    return weak_containers_worklist_;
  }
175 176 177 178
  MarkingWorklists::RetraceMarkedObjectsWorklist::Local&
  retrace_marked_objects_worklist() {
    return retrace_marked_objects_worklist_;
  }
179

180 181 182 183 184
  CompactionWorklists::MovableReferencesWorklist::Local*
  movable_slots_worklist() {
    return movable_slots_worklist_.get();
  }

185 186 187 188 189 190 191 192
  bool DidDiscoverNewEphemeronPairs() const {
    return discovered_new_ephemeron_pairs_;
  }

  void ResetDidDiscoverNewEphemeronPairs() {
    discovered_new_ephemeron_pairs_ = false;
  }

193 194
  void set_in_atomic_pause() { in_atomic_pause_ = true; }

Omer Katz's avatar
Omer Katz committed
195
 protected:
196 197
  inline void RegisterWeakContainer(HeapObjectHeader&);

198 199 200 201
  inline bool IsCompactionEnabled() const {
    return movable_slots_worklist_.get();
  }

202
  MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local
203 204 205
      previously_not_fully_constructed_worklist_;
  MarkingWorklists::WeakCallbackWorklist::Local weak_callback_worklist_;
  MarkingWorklists::WriteBarrierWorklist::Local write_barrier_worklist_;
206 207
  MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local
      concurrent_marking_bailout_worklist_;
208 209 210 211
  MarkingWorklists::EphemeronPairsWorklist::Local
      discovered_ephemeron_pairs_worklist_;
  MarkingWorklists::EphemeronPairsWorklist::Local
      ephemeron_pairs_for_processing_worklist_;
212
  MarkingWorklists::WeakContainersWorklist& weak_containers_worklist_;
213 214
  MarkingWorklists::RetraceMarkedObjectsWorklist::Local
      retrace_marked_objects_worklist_;
215 216 217 218
  // Existence of the worklist (|movable_slot_worklist_| != nullptr) denotes
  // that compaction is currently enabled and slots must be recorded.
  std::unique_ptr<CompactionWorklists::MovableReferencesWorklist::Local>
      movable_slots_worklist_;
219 220

  size_t marked_bytes_ = 0;
221 222
  bool in_ephemeron_processing_ = false;
  bool discovered_new_ephemeron_pairs_ = false;
223
  bool in_atomic_pause_ = false;
224 225
};

226 227 228 229
BasicMarkingState::BasicMarkingState(HeapBase& heap,
                                     MarkingWorklists& marking_worklists,
                                     CompactionWorklists* compaction_worklists)
    : MarkingStateBase(heap, marking_worklists),
230 231 232
      previously_not_fully_constructed_worklist_(
          marking_worklists.previously_not_fully_constructed_worklist()),
      weak_callback_worklist_(marking_worklists.weak_callback_worklist()),
233 234
      write_barrier_worklist_(marking_worklists.write_barrier_worklist()),
      concurrent_marking_bailout_worklist_(
235 236 237 238
          marking_worklists.concurrent_marking_bailout_worklist()),
      discovered_ephemeron_pairs_worklist_(
          marking_worklists.discovered_ephemeron_pairs_worklist()),
      ephemeron_pairs_for_processing_worklist_(
239
          marking_worklists.ephemeron_pairs_for_processing_worklist()),
240 241 242
      weak_containers_worklist_(*marking_worklists.weak_containers_worklist()),
      retrace_marked_objects_worklist_(
          marking_worklists.retrace_marked_objects_worklist()) {
243 244 245 246 247
  if (compaction_worklists) {
    movable_slots_worklist_ =
        std::make_unique<CompactionWorklists::MovableReferencesWorklist::Local>(
            compaction_worklists->movable_slots_worklist());
  }
248 249
}

250 251 252
void BasicMarkingState::RegisterWeakReferenceIfNeeded(
    const void* object, TraceDescriptor desc, WeakCallback weak_callback,
    const void* parameter) {
253 254 255
  // Filter out already marked values. The write barrier for WeakMember
  // ensures that any newly set value after this point is kept alive and does
  // not require the callback.
256 257 258 259
  const HeapObjectHeader& header =
      HeapObjectHeader::FromObject(desc.base_object_payload);
  if (!header.IsInConstruction<AccessMode::kAtomic>() &&
      header.IsMarked<AccessMode::kAtomic>())
260 261 262 263
    return;
  RegisterWeakCallback(weak_callback, parameter);
}

264 265
void BasicMarkingState::RegisterWeakCallback(WeakCallback callback,
                                             const void* object) {
266
  DCHECK_NOT_NULL(callback);
267 268 269
  weak_callback_worklist_.Push({callback, object});
}

270
void BasicMarkingState::RegisterWeakContainer(HeapObjectHeader& header) {
271
  weak_containers_worklist_.Push<AccessMode::kAtomic>(&header);
272 273
}

274 275 276 277
void BasicMarkingState::ProcessWeakContainer(const void* object,
                                             TraceDescriptor desc,
                                             WeakCallback callback,
                                             const void* data) {
278 279 280
  DCHECK_NOT_NULL(object);

  HeapObjectHeader& header =
Omer Katz's avatar
Omer Katz committed
281
      HeapObjectHeader::FromObject(const_cast<void*>(object));
282

283 284
  if (header.IsInConstruction<AccessMode::kAtomic>()) {
    not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header);
285 286 287 288 289 290
    return;
  }

  // Only mark the container initially. Its buckets will be processed after
  // marking.
  if (!MarkNoPush(header)) return;
291

292 293 294 295 296 297 298 299 300
  RegisterWeakContainer(header);

  // Register final weak processing of the backing store.
  RegisterWeakCallback(callback, data);

  // Weak containers might not require tracing. In such cases the callback in
  // the TraceDescriptor will be nullptr. For ephemerons the callback will be
  // non-nullptr so that the container is traced and the ephemeron pairs are
  // processed.
301 302 303 304 305 306 307
  if (desc.callback) {
    PushMarked(header, desc);
  } else {
    // For weak containers, there's no trace callback and no processing loop to
    // update the marked bytes, hence inline that here.
    AccountMarkedBytes(header);
  }
308 309
}

310 311 312
void BasicMarkingState::ProcessEphemeron(const void* key, const void* value,
                                         TraceDescriptor value_desc,
                                         Visitor& visitor) {
313 314 315 316
  // ProcessEphemeron is not expected to find new ephemerons recursively, which
  // would break the main marking loop.
  DCHECK(!in_ephemeron_processing_);
  in_ephemeron_processing_ = true;
317 318 319 320 321 322 323 324 325 326 327 328 329
  // Keys are considered live even in incremental/concurrent marking settings
  // because the write barrier for WeakMember ensures that any newly set value
  // after this point is kept alive and does not require the callback.
  const bool key_in_construction =
      HeapObjectHeader::FromObject(key).IsInConstruction<AccessMode::kAtomic>();
  const bool key_considered_as_live =
      key_in_construction
          ? in_atomic_pause_
          : HeapObjectHeader::FromObject(key).IsMarked<AccessMode::kAtomic>();
  DCHECK_IMPLIES(
      key_in_construction && in_atomic_pause_,
      HeapObjectHeader::FromObject(key).IsMarked<AccessMode::kAtomic>());
  if (key_considered_as_live) {
330 331 332 333 334 335 336
    if (value_desc.base_object_payload) {
      MarkAndPush(value_desc.base_object_payload, value_desc);
    } else {
      // If value_desc.base_object_payload is nullptr, the value is not GCed and
      // should be immediately traced.
      value_desc.callback(&visitor, value);
    }
337 338 339
  } else {
    discovered_ephemeron_pairs_worklist_.Push({key, value, value_desc});
    discovered_new_ephemeron_pairs_ = true;
340
  }
341
  in_ephemeron_processing_ = false;
342 343
}

344
void BasicMarkingState::AccountMarkedBytes(const HeapObjectHeader& header) {
345
  AccountMarkedBytes(
346
      header.IsLargeObject<AccessMode::kAtomic>()
Omer Katz's avatar
Omer Katz committed
347 348
          ? reinterpret_cast<const LargePage*>(BasePage::FromPayload(&header))
                ->PayloadSize()
Omer Katz's avatar
Omer Katz committed
349
          : header.AllocatedSize<AccessMode::kAtomic>());
350 351
}

352
void BasicMarkingState::AccountMarkedBytes(size_t marked_bytes) {
353
  marked_bytes_ += marked_bytes;
Omer Katz's avatar
Omer Katz committed
354 355
}

356
class MutatorMarkingState : public BasicMarkingState {
Omer Katz's avatar
Omer Katz committed
357
 public:
358 359
  MutatorMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
                      CompactionWorklists* compaction_worklists)
360
      : BasicMarkingState(heap, marking_worklists, compaction_worklists) {}
Omer Katz's avatar
Omer Katz committed
361 362

  inline bool MarkNoPush(HeapObjectHeader& header) {
363
    return MutatorMarkingState::BasicMarkingState::MarkNoPush(header);
Omer Katz's avatar
Omer Katz committed
364 365
  }

366
  inline void ReTraceMarkedWeakContainer(cppgc::Visitor&, HeapObjectHeader&);
367

Omer Katz's avatar
Omer Katz committed
368 369 370 371 372 373
  inline void DynamicallyMarkAddress(ConstAddress);

  // Moves objects in not_fully_constructed_worklist_ to
  // previously_not_full_constructed_worklists_.
  void FlushNotFullyConstructedObjects();

374 375 376 377
  // Moves ephemeron pairs in discovered_ephemeron_pairs_worklist_ to
  // ephemeron_pairs_for_processing_worklist_.
  void FlushDiscoveredEphemeronPairs();

Omer Katz's avatar
Omer Katz committed
378 379
  inline void InvokeWeakRootsCallbackIfNeeded(const void*, TraceDescriptor,
                                              WeakCallback, const void*);
380 381

  inline bool IsMarkedWeakContainer(HeapObjectHeader&);
382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398

 private:
  // Weak containers are strongly retraced during conservative stack scanning.
  // Stack scanning happens once per GC at the start of the atomic pause.
  // Because the visitor is not retained between GCs, there is no need to clear
  // the set at the end of GC.
  class RecentlyRetracedWeakContainers {
    static constexpr size_t kMaxCacheSize = 8;

   public:
    inline bool Contains(const HeapObjectHeader*);
    inline void Insert(const HeapObjectHeader*);

   private:
    std::vector<const HeapObjectHeader*> recently_retraced_cache_;
    size_t last_used_index_ = -1;
  } recently_retraced_weak_containers_;
Omer Katz's avatar
Omer Katz committed
399 400
};

401 402
void MutatorMarkingState::ReTraceMarkedWeakContainer(cppgc::Visitor& visitor,
                                                     HeapObjectHeader& header) {
403
  DCHECK(weak_containers_worklist_.Contains(&header));
404
  recently_retraced_weak_containers_.Insert(&header);
405
  retrace_marked_objects_worklist().Push(&header);
406 407
}

Omer Katz's avatar
Omer Katz committed
408 409 410 411 412 413 414
void MutatorMarkingState::DynamicallyMarkAddress(ConstAddress address) {
  HeapObjectHeader& header =
      BasePage::FromPayload(address)->ObjectHeaderFromInnerAddress(
          const_cast<Address>(address));
  DCHECK(!header.IsInConstruction());
  if (MarkNoPush(header)) {
    marking_worklist_.Push(
Omer Katz's avatar
Omer Katz committed
415
        {reinterpret_cast<void*>(header.ObjectStart()),
Omer Katz's avatar
Omer Katz committed
416 417 418 419 420 421 422
         GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
  }
}

void MutatorMarkingState::InvokeWeakRootsCallbackIfNeeded(
    const void* object, TraceDescriptor desc, WeakCallback weak_callback,
    const void* parameter) {
423 424
  // Since weak roots are only traced at the end of marking, we can execute
  // the callback instead of registering it.
425 426
#if DEBUG
  const HeapObjectHeader& header =
Omer Katz's avatar
Omer Katz committed
427
      HeapObjectHeader::FromObject(desc.base_object_payload);
428 429
  DCHECK_IMPLIES(header.IsInConstruction(),
                 header.IsMarked<AccessMode::kAtomic>());
430
#endif  // DEBUG
431 432 433
  weak_callback(LivenessBrokerFactory::Create(), parameter);
}

434
bool MutatorMarkingState::IsMarkedWeakContainer(HeapObjectHeader& header) {
435 436 437 438
  const bool result =
      weak_containers_worklist_.Contains<AccessMode::kAtomic>(&header) &&
      !recently_retraced_weak_containers_.Contains(&header);
  DCHECK_IMPLIES(result, header.IsMarked<AccessMode::kAtomic>());
439
  DCHECK_IMPLIES(result, !header.IsInConstruction());
440 441 442
  return result;
}

443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458
bool MutatorMarkingState::RecentlyRetracedWeakContainers::Contains(
    const HeapObjectHeader* header) {
  return std::find(recently_retraced_cache_.begin(),
                   recently_retraced_cache_.end(),
                   header) != recently_retraced_cache_.end();
}

void MutatorMarkingState::RecentlyRetracedWeakContainers::Insert(
    const HeapObjectHeader* header) {
  last_used_index_ = (last_used_index_ + 1) % kMaxCacheSize;
  if (recently_retraced_cache_.size() <= last_used_index_)
    recently_retraced_cache_.push_back(header);
  else
    recently_retraced_cache_[last_used_index_] = header;
}

459
class ConcurrentMarkingState : public BasicMarkingState {
Omer Katz's avatar
Omer Katz committed
460
 public:
461 462
  ConcurrentMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
                         CompactionWorklists* compaction_worklists)
463
      : BasicMarkingState(heap, marking_worklists, compaction_worklists) {}
Omer Katz's avatar
Omer Katz committed
464 465 466 467 468 469 470

  ~ConcurrentMarkingState() { DCHECK_EQ(last_marked_bytes_, marked_bytes_); }

  size_t RecentlyMarkedBytes() {
    return marked_bytes_ - std::exchange(last_marked_bytes_, marked_bytes_);
  }

471 472 473 474 475 476 477
  inline void AccountDeferredMarkedBytes(size_t deferred_bytes) {
    // AccountDeferredMarkedBytes is called from Trace methods, which are always
    // called after AccountMarkedBytes, so there should be no underflow here.
    DCHECK_LE(deferred_bytes, marked_bytes_);
    marked_bytes_ -= deferred_bytes;
  }

Omer Katz's avatar
Omer Katz committed
478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503
 private:
  size_t last_marked_bytes_ = 0;
};

template <size_t deadline_check_interval, typename WorklistLocal,
          typename Callback, typename Predicate>
bool DrainWorklistWithPredicate(Predicate should_yield,
                                WorklistLocal& worklist_local,
                                Callback callback) {
  if (worklist_local.IsLocalAndGlobalEmpty()) return true;
  // For concurrent markers, should_yield also reports marked bytes.
  if (should_yield()) return false;
  size_t processed_callback_count = deadline_check_interval;
  typename WorklistLocal::ItemType item;
  while (worklist_local.Pop(&item)) {
    callback(item);
    if (--processed_callback_count == 0) {
      if (should_yield()) {
        return false;
      }
      processed_callback_count = deadline_check_interval;
    }
  }
  return true;
}

504
template <AccessMode mode>
Omer Katz's avatar
Omer Katz committed
505 506 507
void DynamicallyTraceMarkedObject(Visitor& visitor,
                                  const HeapObjectHeader& header) {
  DCHECK(!header.IsInConstruction<mode>());
508
  DCHECK(header.IsMarked<AccessMode::kAtomic>());
Michael Lippautz's avatar
Michael Lippautz committed
509
  header.Trace<mode>(&visitor);
510 511 512 513 514 515
}

}  // namespace internal
}  // namespace cppgc

#endif  // V8_HEAP_CPPGC_MARKING_STATE_H_