array-buffer-tracker.cc 5.26 KB
Newer Older
1 2 3 4 5
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/heap/array-buffer-tracker.h"
6 7 8 9

#include <vector>

#include "src/heap/array-buffer-collector.h"
10
#include "src/heap/array-buffer-tracker-inl.h"
11
#include "src/heap/heap.h"
12
#include "src/heap/spaces.h"
13 14 15 16

namespace v8 {
namespace internal {

17 18 19 20 21
LocalArrayBufferTracker::~LocalArrayBufferTracker() {
  CHECK(array_buffers_.empty());
}

template <typename Callback>
22
void LocalArrayBufferTracker::Process(Callback callback) {
23
  std::vector<JSArrayBuffer::Allocation> backing_stores_to_free;
24
  TrackingData kept_array_buffers;
25

26
  JSArrayBuffer* new_buffer = nullptr;
27
  JSArrayBuffer* old_buffer = nullptr;
28 29
  size_t freed_memory = 0;
  size_t moved_memory = 0;
30
  for (TrackingData::iterator it = array_buffers_.begin();
31
       it != array_buffers_.end(); ++it) {
32
    old_buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
33
    Page* old_page = Page::FromAddress(old_buffer->address());
34
    const CallbackResult result = callback(old_buffer, &new_buffer);
35
    if (result == kKeepEntry) {
36
      kept_array_buffers.insert(*it);
37 38 39
    } else if (result == kUpdateEntry) {
      DCHECK_NOT_NULL(new_buffer);
      Page* target_page = Page::FromAddress(new_buffer->address());
40
      {
41
        base::LockGuard<base::Mutex> guard(target_page->mutex());
42 43 44 45 46 47
        LocalArrayBufferTracker* tracker = target_page->local_tracker();
        if (tracker == nullptr) {
          target_page->AllocateLocalTracker();
          tracker = target_page->local_tracker();
        }
        DCHECK_NOT_NULL(tracker);
48
        const size_t size = NumberToSize(new_buffer->byte_length());
49 50 51 52
        // We should decrement before adding to avoid potential overflows in
        // the external memory counters.
        old_page->DecrementExternalBackingStoreBytes(
            ExternalBackingStoreType::kArrayBuffer, it->second.length);
53
        tracker->Add(new_buffer, size);
54
      }
55
      moved_memory += it->second.length;
56

57
    } else if (result == kRemoveEntry) {
58
      freed_memory += it->second.length;
59 60 61
      // We pass backing_store() and stored length to the collector for freeing
      // the backing store. Wasm allocations will go through their own tracker
      // based on the backing store.
62
      backing_stores_to_free.emplace_back(
63
          it->second.backing_store, it->second.length, it->second.backing_store,
64
          old_buffer->allocation_mode(), old_buffer->is_wasm_memory());
65 66 67
      old_page->DecrementExternalBackingStoreBytes(
          ExternalBackingStoreType::kArrayBuffer, it->second.length);

68 69
    } else {
      UNREACHABLE();
70 71
    }
  }
72 73
  if (moved_memory || freed_memory) {
    // TODO(wez): Remove backing-store from external memory accounting.
74
    page_->heap()->update_external_memory_concurrently_freed(
75 76
        static_cast<intptr_t>(freed_memory));
  }
77

78 79
  array_buffers_.swap(kept_array_buffers);

80 81
  // Pass the backing stores that need to be freed to the main thread for later
  // distribution.
82
  page_->heap()->array_buffer_collector()->AddGarbageAllocations(
83
      std::move(backing_stores_to_free));
84 85
}

86
void ArrayBufferTracker::PrepareToFreeDeadInNewSpace(Heap* heap) {
87
  DCHECK_EQ(heap->gc_state(), Heap::HeapState::SCAVENGE);
88 89
  for (Page* page :
       PageRange(heap->new_space()->from_space().first_page(), nullptr)) {
90 91
    bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers);
    CHECK(empty);
92
  }
93
}
94

95 96 97
void ArrayBufferTracker::FreeAll(Page* page) {
  LocalArrayBufferTracker* tracker = page->local_tracker();
  if (tracker == nullptr) return;
98
  tracker->Free([](JSArrayBuffer* buffer) { return true; });
99 100
  if (tracker->IsEmpty()) {
    page->ReleaseLocalTracker();
101
  }
102 103
}

104 105 106 107 108
bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
  LocalArrayBufferTracker* tracker = page->local_tracker();
  if (tracker == nullptr) return true;

  DCHECK(page->SweepingDone());
109
  tracker->Process(
110 111 112 113 114 115 116 117 118 119 120
      [mode](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
        MapWord map_word = old_buffer->map_word();
        if (map_word.IsForwardingAddress()) {
          *new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
          return LocalArrayBufferTracker::kUpdateEntry;
        }
        return mode == kUpdateForwardedKeepOthers
                   ? LocalArrayBufferTracker::kKeepEntry
                   : LocalArrayBufferTracker::kRemoveEntry;
      });
  return tracker->IsEmpty();
121 122
}

123 124 125
bool ArrayBufferTracker::IsTracked(JSArrayBuffer* buffer) {
  Page* page = Page::FromAddress(buffer->address());
  {
126
    base::LockGuard<base::Mutex> guard(page->mutex());
127 128 129 130
    LocalArrayBufferTracker* tracker = page->local_tracker();
    if (tracker == nullptr) return false;
    return tracker->IsTracked(buffer);
  }
131
}
132

133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
void ArrayBufferTracker::TearDown(Heap* heap) {
  // ArrayBuffers can only be found in NEW_SPACE and OLD_SPACE.
  for (Page* p : *heap->old_space()) {
    FreeAll(p);
  }
  NewSpace* new_space = heap->new_space();
  if (new_space->to_space().is_committed()) {
    for (Page* p : new_space->to_space()) {
      FreeAll(p);
    }
  }
#ifdef DEBUG
  if (new_space->from_space().is_committed()) {
    for (Page* p : new_space->from_space()) {
      DCHECK(!p->contains_array_buffers());
    }
  }
#endif  // DEBUG
}

153 154
}  // namespace internal
}  // namespace v8