marking-barrier.cc 6.59 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/heap/marking-barrier.h"

#include "src/heap/heap-inl.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/mark-compact.h"
#include "src/heap/marking-barrier-inl.h"
14 15 16
#include "src/heap/marking-worklist-inl.h"
#include "src/heap/marking-worklist.h"
#include "src/heap/safepoint.h"
17 18 19 20 21
#include "src/objects/js-array-buffer.h"

namespace v8 {
namespace internal {

22
MarkingBarrier::MarkingBarrier(Heap* heap)
23
    : heap_(heap),
24 25 26 27 28 29 30 31 32 33 34 35 36
      collector_(heap_->mark_compact_collector()),
      incremental_marking_(heap_->incremental_marking()),
      worklist_(collector_->marking_worklists()->shared()),
      is_main_thread_barrier_(true) {}

MarkingBarrier::MarkingBarrier(LocalHeap* local_heap)
    : heap_(local_heap->heap()),
      collector_(heap_->mark_compact_collector()),
      incremental_marking_(nullptr),
      worklist_(collector_->marking_worklists()->shared()),
      is_main_thread_barrier_(false) {}

MarkingBarrier::~MarkingBarrier() { DCHECK(worklist_.IsLocalEmpty()); }
37 38 39 40 41 42 43 44 45 46 47 48 49

void MarkingBarrier::Write(HeapObject host, HeapObjectSlot slot,
                           HeapObject value) {
  if (MarkValue(host, value)) {
    if (is_compacting_ && slot.address()) {
      collector_->RecordSlot(host, slot, value);
    }
  }
}

void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) {
  if (MarkValue(host, value)) {
    if (is_compacting_) {
50 51 52 53 54 55 56
      if (is_main_thread_barrier_) {
        // An optimization to avoid allocating additional typed slots for the
        // main thread.
        collector_->RecordRelocSlot(host, reloc_info, value);
      } else {
        RecordRelocSlot(host, reloc_info, value);
      }
57 58 59 60 61 62
    }
  }
}

void MarkingBarrier::Write(JSArrayBuffer host,
                           ArrayBufferExtension* extension) {
63
  if (!V8_CONCURRENT_MARKING_BOOL && !marking_state_.IsBlack(host)) {
64 65 66 67 68 69
    // The extension will be marked when the marker visits the host object.
    return;
  }
  extension->Mark();
}

70
void MarkingBarrier::Write(DescriptorArray descriptor_array,
71
                           int number_of_own_descriptors) {
72
  DCHECK(is_main_thread_barrier_);
73 74 75
  int16_t raw_marked = descriptor_array.raw_number_of_marked_descriptors();
  if (NumberOfMarkedDescriptors::decode(collector_->epoch(), raw_marked) <
      number_of_own_descriptors) {
76
    collector_->MarkDescriptorArrayFromWriteBarrier(descriptor_array,
77 78 79 80
                                                    number_of_own_descriptors);
  }
}

81 82 83 84 85 86 87 88 89 90 91 92 93
void MarkingBarrier::RecordRelocSlot(Code host, RelocInfo* rinfo,
                                     HeapObject target) {
  MarkCompactCollector::RecordRelocSlotInfo info =
      MarkCompactCollector::PrepareRecordRelocSlot(host, rinfo, target);
  if (info.should_record) {
    auto& typed_slots = typed_slots_map_[info.memory_chunk];
    if (!typed_slots) {
      typed_slots.reset(new TypedSlots());
    }
    typed_slots->Insert(info.slot_type, info.offset);
  }
}

94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128
// static
void MarkingBarrier::ActivateAll(Heap* heap, bool is_compacting) {
  heap->marking_barrier()->Activate(is_compacting);
  if (FLAG_local_heaps) {
    heap->safepoint()->IterateLocalHeaps(
        [is_compacting](LocalHeap* local_heap) {
          local_heap->marking_barrier()->Activate(is_compacting);
        });
  }
}

// static
void MarkingBarrier::DeactivateAll(Heap* heap) {
  heap->marking_barrier()->Deactivate();
  if (FLAG_local_heaps) {
    heap->safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
      local_heap->marking_barrier()->Deactivate();
    });
  }
}

// static
void MarkingBarrier::PublishAll(Heap* heap) {
  heap->marking_barrier()->Publish();
  if (FLAG_local_heaps) {
    heap->safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
      local_heap->marking_barrier()->Publish();
    });
  }
}

void MarkingBarrier::Publish() {
  DCHECK_IMPLIES(!is_main_thread_barrier_, FLAG_local_heaps);
  if (is_activated_) {
    worklist_.Publish();
129 130 131 132 133 134 135
    for (auto& it : typed_slots_map_) {
      MemoryChunk* memory_chunk = it.first;
      std::unique_ptr<TypedSlots>& typed_slots = it.second;
      RememberedSet<OLD_TO_OLD>::MergeTyped(memory_chunk,
                                            std::move(typed_slots));
    }
    typed_slots_map_.clear();
136 137 138 139 140
  }
}

void MarkingBarrier::DeactivateSpace(PagedSpace* space) {
  DCHECK(is_main_thread_barrier_);
141 142 143 144 145
  for (Page* p : *space) {
    p->SetOldGenerationPageFlags(false);
  }
}

146 147
void MarkingBarrier::DeactivateSpace(NewSpace* space) {
  DCHECK(is_main_thread_barrier_);
148 149 150 151 152 153 154 155
  for (Page* p : *space) {
    p->SetYoungGenerationPageFlags(false);
  }
}

void MarkingBarrier::Deactivate() {
  is_activated_ = false;
  is_compacting_ = false;
156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172
  DCHECK_IMPLIES(!is_main_thread_barrier_, FLAG_local_heaps);
  if (is_main_thread_barrier_) {
    DeactivateSpace(heap_->old_space());
    DeactivateSpace(heap_->map_space());
    DeactivateSpace(heap_->code_space());
    DeactivateSpace(heap_->new_space());
    for (LargePage* p : *heap_->new_lo_space()) {
      p->SetYoungGenerationPageFlags(false);
      DCHECK(p->IsLargePage());
    }
    for (LargePage* p : *heap_->lo_space()) {
      p->SetOldGenerationPageFlags(false);
    }
    for (LargePage* p : *heap_->code_lo_space()) {
      p->SetOldGenerationPageFlags(false);
    }
  }
173
  DCHECK(typed_slots_map_.empty());
174
  DCHECK(worklist_.IsLocalEmpty());
175 176
}

177 178
void MarkingBarrier::ActivateSpace(PagedSpace* space) {
  DCHECK(is_main_thread_barrier_);
179 180 181 182 183
  for (Page* p : *space) {
    p->SetOldGenerationPageFlags(true);
  }
}

184 185
void MarkingBarrier::ActivateSpace(NewSpace* space) {
  DCHECK(is_main_thread_barrier_);
186 187 188 189 190 191 192
  for (Page* p : *space) {
    p->SetYoungGenerationPageFlags(true);
  }
}

void MarkingBarrier::Activate(bool is_compacting) {
  DCHECK(!is_activated_);
193 194
  DCHECK(worklist_.IsLocalEmpty());
  DCHECK_IMPLIES(!is_main_thread_barrier_, FLAG_local_heaps);
195 196
  is_compacting_ = is_compacting;
  is_activated_ = true;
197 198 199 200 201 202 203 204 205 206
  if (is_main_thread_barrier_) {
    ActivateSpace(heap_->old_space());
    ActivateSpace(heap_->map_space());
    ActivateSpace(heap_->code_space());
    ActivateSpace(heap_->new_space());

    for (LargePage* p : *heap_->new_lo_space()) {
      p->SetYoungGenerationPageFlags(true);
      DCHECK(p->IsLargePage());
    }
207

208 209 210
    for (LargePage* p : *heap_->lo_space()) {
      p->SetOldGenerationPageFlags(true);
    }
211

212 213 214
    for (LargePage* p : *heap_->code_lo_space()) {
      p->SetOldGenerationPageFlags(true);
    }
215 216 217 218 219
  }
}

}  // namespace internal
}  // namespace v8