write-barrier.cc 7.67 KB
Newer Older
1 2 3 4
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5
#include "src/heap/cppgc/write-barrier.h"
6

7
#include "include/cppgc/heap-consistency.h"
8
#include "include/cppgc/internal/pointer-policies.h"
9
#include "src/heap/cppgc/globals.h"
10
#include "src/heap/cppgc/heap-object-header.h"
11
#include "src/heap/cppgc/heap-page.h"
12 13 14 15
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/marking-visitor.h"

16 17 18 19
#if defined(CPPGC_CAGED_HEAP)
#include "include/cppgc/internal/caged-heap-local-data.h"
#endif

20 21 22
namespace cppgc {
namespace internal {

23
// static
24 25
AtomicEntryFlag WriteBarrier::write_barrier_enabled_;

26 27
namespace {

28
template <MarkerBase::WriteBarrierType type>
29 30
void ProcessMarkValue(HeapObjectHeader& header, MarkerBase* marker,
                      const void* value) {
31 32 33 34
#if defined(CPPGC_CAGED_HEAP)
  DCHECK(reinterpret_cast<CagedHeapLocalData*>(
             reinterpret_cast<uintptr_t>(value) &
             ~(kCagedHeapReservationAlignment - 1))
35
             ->is_incremental_marking_in_progress);
36
#endif
37
  DCHECK(header.IsMarked<AccessMode::kAtomic>());
38 39
  DCHECK(marker);

40
  if (V8_UNLIKELY(header.IsInConstruction<AccessMode::kNonAtomic>())) {
41 42 43 44
    // In construction objects are traced only if they are unmarked. If marking
    // reaches this object again when it is fully constructed, it will re-mark
    // it and tracing it as a previously not fully constructed object would know
    // to bail out.
45
    header.Unmark<AccessMode::kAtomic>();
46
    marker->WriteBarrierForInConstructionObject(header);
47 48 49
    return;
  }

50
  marker->WriteBarrierForObject<type>(header);
51 52 53 54
}

}  // namespace

55 56 57
// static
void WriteBarrier::DijkstraMarkingBarrierSlowWithSentinelCheck(
    const void* value) {
58 59
  if (!value || value == kSentinelPointer) return;

60
  DijkstraMarkingBarrierSlow(value);
61 62
}

63 64
// static
void WriteBarrier::DijkstraMarkingBarrierSlow(const void* value) {
65
  const BasePage* page = BasePage::FromPayload(value);
66
  const auto& heap = page->heap();
67

68
  // GetWriteBarrierType() checks marking state.
69
  DCHECK(heap.marker());
70
  // No write barriers should be executed from atomic pause marking.
71
  DCHECK(!heap.in_atomic_pause());
72

73 74 75 76
  auto& header =
      const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
  if (!header.TryMarkAtomic()) return;

77
  ProcessMarkValue<MarkerBase::WriteBarrierType::kDijkstra>(
78
      header, heap.marker(), value);
79 80 81 82 83 84 85
}

// static
void WriteBarrier::DijkstraMarkingBarrierRangeSlow(
    HeapHandle& heap_handle, const void* first_element, size_t element_size,
    size_t number_of_elements, TraceCallback trace_callback) {
  auto& heap_base = HeapBase::From(heap_handle);
86 87 88 89 90

  // GetWriteBarrierType() checks marking state.
  DCHECK(heap_base.marker());
  // No write barriers should be executed from atomic pause marking.
  DCHECK(!heap_base.in_atomic_pause());
91

92
  cppgc::subtle::DisallowGarbageCollectionScope disallow_gc_scope(heap_base);
93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110
  const char* array = static_cast<const char*>(first_element);
  while (number_of_elements-- > 0) {
    trace_callback(&heap_base.marker()->Visitor(), array);
    array += element_size;
  }
}

// static
void WriteBarrier::SteeleMarkingBarrierSlowWithSentinelCheck(
    const void* value) {
  if (!value || value == kSentinelPointer) return;

  SteeleMarkingBarrierSlow(value);
}

// static
void WriteBarrier::SteeleMarkingBarrierSlow(const void* value) {
  const BasePage* page = BasePage::FromPayload(value);
111
  const auto& heap = page->heap();
112

113
  // GetWriteBarrierType() checks marking state.
114
  DCHECK(heap.marker());
115
  // No write barriers should be executed from atomic pause marking.
116
  DCHECK(!heap.in_atomic_pause());
117 118 119 120 121

  auto& header =
      const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
  if (!header.IsMarked<AccessMode::kAtomic>()) return;

122 123
  ProcessMarkValue<MarkerBase::WriteBarrierType::kSteele>(header, heap.marker(),
                                                          value);
124 125
}

126
#if defined(CPPGC_YOUNG_GENERATION)
127
// static
128
void WriteBarrier::GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
129 130 131
                                           const AgeTable& age_table,
                                           const void* slot,
                                           uintptr_t value_offset) {
132
  DCHECK(slot);
133 134 135
  // A write during atomic pause (e.g. pre-finalizer) may trigger the slow path
  // of the barrier. This is a result of the order of bailouts where not marking
  // results in applying the generational barrier.
136
  if (local_data.heap_base.in_atomic_pause()) return;
137

138
  if (value_offset > 0 && age_table.GetAge(value_offset) == AgeTable::Age::kOld)
139
    return;
140

141
  // Record slot.
142
  local_data.heap_base.remembered_set().AddSlot((const_cast<void*>(slot)));
143
}
144 145 146 147 148 149 150 151

// static
void WriteBarrier::GenerationalBarrierForSourceObjectSlow(
    const CagedHeapLocalData& local_data, const void* inner_pointer) {
  DCHECK(inner_pointer);

  auto& object_header =
      BasePage::FromInnerAddress(&local_data.heap_base, inner_pointer)
152
          ->ObjectHeaderFromInnerAddress<AccessMode::kAtomic>(inner_pointer);
153 154

  // Record the source object.
155 156
  local_data.heap_base.remembered_set().AddSourceObject(
      const_cast<HeapObjectHeader&>(object_header));
157
}
158
#endif  // CPPGC_YOUNG_GENERATION
159

160 161 162 163 164 165 166
#if V8_ENABLE_CHECKS
// static
void WriteBarrier::CheckParams(Type expected_type, const Params& params) {
  CHECK_EQ(expected_type, params.type);
}
#endif  // V8_ENABLE_CHECKS

167 168 169 170 171 172
// static
bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(const void* object,
                                                      HeapHandle** handle) {
  // Large objects cannot have mixins, so we are guaranteed to always have
  // a pointer on the same page.
  const auto* page = BasePage::FromPayload(object);
173 174
  *handle = &page->heap();
  const MarkerBase* marker = page->heap().marker();
175
  return marker && marker->IsMarking();
176 177
}

178 179 180
// static
bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(HeapHandle& heap_handle) {
  const auto& heap_base = internal::HeapBase::From(heap_handle);
181 182
  const MarkerBase* marker = heap_base.marker();
  return marker && marker->IsMarking();
183 184
}

185 186 187 188 189 190
#if defined(CPPGC_CAGED_HEAP)

// static
bool WriteBarrierTypeForCagedHeapPolicy::IsMarking(
    const HeapHandle& heap_handle, WriteBarrier::Params& params) {
  const auto& heap_base = internal::HeapBase::From(heap_handle);
191
  const bool is_marking = heap_base.marker() && heap_base.marker()->IsMarking();
192 193 194
  // Also set caged heap start here to avoid another call immediately after
  // checking IsMarking().
#if defined(CPPGC_YOUNG_GENERATION)
195
  params.start = reinterpret_cast<uintptr_t>(heap_base.caged_heap().base());
196
#endif  // !CPPGC_YOUNG_GENERATION
197
  return is_marking;
198 199 200 201
}

#endif  // CPPGC_CAGED_HEAP

202
#if defined(CPPGC_YOUNG_GENERATION)
203 204 205 206 207 208 209

// static
YoungGenerationEnabler& YoungGenerationEnabler::Instance() {
  static v8::base::LeakyObject<YoungGenerationEnabler> instance;
  return *instance.get();
}

210
void YoungGenerationEnabler::Enable() {
211 212 213
  auto& instance = Instance();
  v8::base::LockGuard _(&instance.mutex_);
  if (++instance.is_enabled_ == 1) {
214 215 216 217
    // Enter the flag so that the check in the write barrier will always trigger
    // when young generation is enabled.
    WriteBarrier::FlagUpdater::Enter();
  }
218 219
}

220
void YoungGenerationEnabler::Disable() {
221 222 223 224
  auto& instance = Instance();
  v8::base::LockGuard _(&instance.mutex_);
  DCHECK_LT(0, instance.is_enabled_);
  if (--instance.is_enabled_ == 0) {
225 226
    WriteBarrier::FlagUpdater::Exit();
  }
227 228 229
}

bool YoungGenerationEnabler::IsEnabled() {
230 231 232
  auto& instance = Instance();
  v8::base::LockGuard _(&instance.mutex_);
  return instance.is_enabled_;
233
}
234

235 236
#endif  // defined(CPPGC_YOUNG_GENERATION)

237 238
}  // namespace internal
}  // namespace cppgc