store-buffer.h 7.77 KB
Newer Older
1
// Copyright 2011 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4 5 6 7

#ifndef V8_STORE_BUFFER_H_
#define V8_STORE_BUFFER_H_

8
#include "src/allocation.h"
9 10
#include "src/base/logging.h"
#include "src/base/platform/platform.h"
11
#include "src/globals.h"
12 13 14 15

namespace v8 {
namespace internal {

16 17
class Page;
class PagedSpace;
18 19 20 21
class StoreBuffer;

typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);

22
typedef void (StoreBuffer::*RegionCallback)(Address start, Address end,
23 24 25
                                            ObjectSlotCallback slot_callback,
                                            bool clear_maps);

26 27 28 29 30 31 32 33 34 35
// Used to implement the write barrier by collecting addresses of pointers
// between spaces.
class StoreBuffer {
 public:
  explicit StoreBuffer(Heap* heap);

  static void StoreBufferOverflow(Isolate* isolate);

  inline Address TopAddress();

36
  void SetUp();
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
  void TearDown();

  // This is used by the mutator to enter addresses into the store buffer.
  inline void Mark(Address addr);

  // This is used by the heap traversal to enter the addresses into the store
  // buffer that should still be in the store buffer after GC.  It enters
  // addresses directly into the old buffer because the GC starts by wiping the
  // old buffer and thereafter only visits each cell once so there is no need
  // to attempt to remove any dupes.  During the first part of a GC we
  // are using the store buffer to access the old spaces and at the same time
  // we are rebuilding the store buffer using this function.  There is, however
  // no issue of overwriting the buffer we are iterating over, because this
  // stage of the scavenge can only reduce the number of addresses in the store
  // buffer (some objects are promoted so pointers to them do not need to be in
  // the store buffer).  The later parts of the GC scan the pages that are
  // exempt from the store buffer and process the promotion queue.  These steps
  // can overflow this buffer.  We check for this and on overflow we call the
  // callback set up with the StoreBufferRebuildScope object.
  inline void EnterDirectlyIntoStoreBuffer(Address addr);

  // Iterates over all pointers that go from old space to new space.  It will
  // delete the store buffer as it starts so the callback should reenter
  // surviving old-to-new pointers into the store buffer to rebuild it.
  void IteratePointersToNewSpace(ObjectSlotCallback callback);

63 64 65 66
  // Same as IteratePointersToNewSpace but additonally clears maps in objects
  // referenced from the store buffer that do not contain a forwarding pointer.
  void IteratePointersToNewSpaceAndClearMaps(ObjectSlotCallback callback);

67
  static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2);
68 69
  static const int kStoreBufferSize = kStoreBufferOverflowBit;
  static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address);
70
  static const int kOldStoreBufferLength = kStoreBufferLength * 16;
71 72
  static const int kHashSetLengthLog2 = 12;
  static const int kHashSetLength = 1 << kHashSetLengthLog2;
73 74 75

  void Compact();

76
  void GCPrologue();
77 78 79 80 81 82
  void GCEpilogue();

  Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); }
  Object*** Start() { return reinterpret_cast<Object***>(old_start_); }
  Object*** Top() { return reinterpret_cast<Object***>(old_top_); }
  void SetTop(Object*** top) {
83 84
    DCHECK(top >= Start());
    DCHECK(top <= Limit());
85 86 87 88 89 90 91 92 93 94
    old_top_ = reinterpret_cast<Address*>(top);
  }

  bool old_buffer_is_sorted() { return old_buffer_is_sorted_; }
  bool old_buffer_is_filtered() { return old_buffer_is_filtered_; }

  // Goes through the store buffer removing pointers to things that have
  // been promoted.  Rebuilds the store buffer completely if it overflowed.
  void SortUniq();

95
  void EnsureSpace(intptr_t space_needed);
96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119
  void Verify();

  bool PrepareForIteration();

#ifdef DEBUG
  void Clean();
  // Slow, for asserts only.
  bool CellIsInStoreBuffer(Address cell);
#endif

  void Filter(int flag);

 private:
  Heap* heap_;

  // The store buffer is divided up into a new buffer that is constantly being
  // filled by mutator activity and an old buffer that is filled with the data
  // from the new buffer after compression.
  Address* start_;
  Address* limit_;

  Address* old_start_;
  Address* old_limit_;
  Address* old_top_;
120
  Address* old_reserved_limit_;
121
  base::VirtualMemory* old_virtual_memory_;
122 123 124

  bool old_buffer_is_sorted_;
  bool old_buffer_is_filtered_;
125
  bool during_gc_;
126 127 128 129 130 131 132
  // The garbage collector iterates over many pointers to new space that are not
  // handled by the store buffer.  This flag indicates whether the pointers
  // found by the callbacks should be added to the store buffer or not.
  bool store_buffer_rebuilding_enabled_;
  StoreBufferCallback callback_;
  bool may_move_store_buffer_entries_;

133
  base::VirtualMemory* virtual_memory_;
134 135 136 137 138 139 140 141 142

  // Two hash sets used for filtering.
  // If address is in the hash set then it is guaranteed to be in the
  // old part of the store buffer.
  uintptr_t* hash_set_1_;
  uintptr_t* hash_set_2_;
  bool hash_sets_are_empty_;

  void ClearFilteringHashSets();
143

144
  bool SpaceAvailable(intptr_t space_needed);
145 146 147
  void Uniq();
  void ExemptPopularPages(int prime_sample_step, int threshold);

148
  // Set the map field of the object to NULL if contains a map.
149
  inline void ClearDeadObject(HeapObject* object);
150

151 152 153
  void ProcessOldToNewSlot(Address slot_address,
                           ObjectSlotCallback slot_callback, bool clear_maps);

154 155
  void IteratePointersToNewSpace(ObjectSlotCallback callback, bool clear_maps);

156
  void FindPointersToNewSpaceInRegion(Address start, Address end,
157 158
                                      ObjectSlotCallback slot_callback,
                                      bool clear_maps);
159

160 161 162 163 164
  // For each region of pointers on a page in use from an old space call
  // visit_pointer_region callback.
  // If either visit_pointer_region or callback can cause an allocation
  // in old space and changes in allocation watermark then
  // can_preallocate_during_iteration should be set to true.
165 166 167
  void IteratePointersOnPage(PagedSpace* space, Page* page,
                             RegionCallback region_callback,
                             ObjectSlotCallback slot_callback);
168

169 170
  void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback,
                                    bool clear_maps);
171

172
#ifdef VERIFY_HEAP
173 174 175 176 177 178 179 180 181 182
  void VerifyPointers(LargeObjectSpace* space);
#endif

  friend class StoreBufferRebuildScope;
  friend class DontMoveStoreBufferEntriesScope;
};


class StoreBufferRebuildScope {
 public:
183
  explicit StoreBufferRebuildScope(Heap* heap, StoreBuffer* store_buffer,
184
                                   StoreBufferCallback callback)
185
      : store_buffer_(store_buffer),
186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220
        stored_state_(store_buffer->store_buffer_rebuilding_enabled_),
        stored_callback_(store_buffer->callback_) {
    store_buffer_->store_buffer_rebuilding_enabled_ = true;
    store_buffer_->callback_ = callback;
    (*callback)(heap, NULL, kStoreBufferStartScanningPagesEvent);
  }

  ~StoreBufferRebuildScope() {
    store_buffer_->callback_ = stored_callback_;
    store_buffer_->store_buffer_rebuilding_enabled_ = stored_state_;
  }

 private:
  StoreBuffer* store_buffer_;
  bool stored_state_;
  StoreBufferCallback stored_callback_;
};


class DontMoveStoreBufferEntriesScope {
 public:
  explicit DontMoveStoreBufferEntriesScope(StoreBuffer* store_buffer)
      : store_buffer_(store_buffer),
        stored_state_(store_buffer->may_move_store_buffer_entries_) {
    store_buffer_->may_move_store_buffer_entries_ = false;
  }

  ~DontMoveStoreBufferEntriesScope() {
    store_buffer_->may_move_store_buffer_entries_ = stored_state_;
  }

 private:
  StoreBuffer* store_buffer_;
  bool stored_state_;
};
221 222
}
}  // namespace v8::internal
223 224

#endif  // V8_STORE_BUFFER_H_