accounting-allocator.cc 4.18 KB
Newer Older
1 2 3 4 5 6
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/zone/accounting-allocator.h"

7 8 9
#include <memory>

#include "src/base/bounded-page-allocator.h"
10
#include "src/base/logging.h"
11
#include "src/base/macros.h"
12
#include "src/base/platform/wrappers.h"
13
#include "src/utils/allocation.h"
14
#include "src/zone/zone-compression.h"
15
#include "src/zone/zone-segment.h"
16

17 18 19
namespace v8 {
namespace internal {

20 21 22 23 24
// These definitions are here in order to please the linker, which in debug mode
// sometimes requires static constants to be defined in .cc files.
const size_t ZoneCompression::kReservationSize;
const size_t ZoneCompression::kReservationAlignment;

25 26 27 28 29
namespace {

static constexpr size_t kZonePageSize = 256 * KB;

VirtualMemory ReserveAddressSpace(v8::PageAllocator* platform_allocator) {
30 31
  DCHECK(IsAligned(ZoneCompression::kReservationSize,
                   platform_allocator->AllocatePageSize()));
32 33 34

  void* hint = reinterpret_cast<void*>(RoundDown(
      reinterpret_cast<uintptr_t>(platform_allocator->GetRandomMmapAddr()),
35
      ZoneCompression::kReservationAlignment));
36

37 38
  VirtualMemory memory(platform_allocator, ZoneCompression::kReservationSize,
                       hint, ZoneCompression::kReservationAlignment);
39
  if (memory.IsReserved()) {
40
    CHECK(IsAligned(memory.address(), ZoneCompression::kReservationAlignment));
41 42 43 44 45 46 47 48 49 50 51
    return memory;
  }

  FATAL(
      "Fatal process out of memory: Failed to reserve memory for compressed "
      "zones");
  UNREACHABLE();
}

std::unique_ptr<v8::base::BoundedPageAllocator> CreateBoundedAllocator(
    v8::PageAllocator* platform_allocator, Address reservation_start) {
52 53
  CHECK(reservation_start);
  CHECK(IsAligned(reservation_start, ZoneCompression::kReservationAlignment));
54 55

  auto allocator = std::make_unique<v8::base::BoundedPageAllocator>(
56
      platform_allocator, reservation_start, ZoneCompression::kReservationSize,
57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77
      kZonePageSize);

  // Exclude first page from allocation to ensure that accesses through
  // decompressed null pointer will seg-fault.
  allocator->AllocatePagesAt(reservation_start, kZonePageSize,
                             v8::PageAllocator::kNoAccess);
  return allocator;
}

}  // namespace

AccountingAllocator::AccountingAllocator() {
  if (COMPRESS_ZONES_BOOL) {
    v8::PageAllocator* platform_page_allocator = GetPlatformPageAllocator();
    VirtualMemory memory = ReserveAddressSpace(platform_page_allocator);
    reserved_area_ = std::make_unique<VirtualMemory>(std::move(memory));
    bounded_page_allocator_ = CreateBoundedAllocator(platform_page_allocator,
                                                     reserved_area_->address());
  }
}

78
AccountingAllocator::~AccountingAllocator() = default;
79

80 81 82 83 84 85 86 87 88 89 90
Segment* AccountingAllocator::AllocateSegment(size_t bytes,
                                              bool supports_compression) {
  void* memory;
  if (COMPRESS_ZONES_BOOL && supports_compression) {
    bytes = RoundUp(bytes, kZonePageSize);
    memory = AllocatePages(bounded_page_allocator_.get(), nullptr, bytes,
                           kZonePageSize, PageAllocator::kReadWrite);

  } else {
    memory = AllocWithRetry(bytes);
  }
91 92 93
  if (memory == nullptr) return nullptr;

  size_t current =
94
      current_memory_usage_.fetch_add(bytes, std::memory_order_relaxed) + bytes;
95 96 97 98
  size_t max = max_memory_usage_.load(std::memory_order_relaxed);
  while (current > max && !max_memory_usage_.compare_exchange_weak(
                              max, current, std::memory_order_relaxed)) {
    // {max} was updated by {compare_exchange_weak}; retry.
99
  }
100 101
  DCHECK_LE(sizeof(Segment), bytes);
  return new (memory) Segment(bytes);
102 103
}

104 105
void AccountingAllocator::ReturnSegment(Segment* segment,
                                        bool supports_compression) {
106
  segment->ZapContents();
107 108
  size_t segment_size = segment->total_size();
  current_memory_usage_.fetch_sub(segment_size, std::memory_order_relaxed);
109
  segment->ZapHeader();
110 111 112
  if (COMPRESS_ZONES_BOOL && supports_compression) {
    CHECK(FreePages(bounded_page_allocator_.get(), segment, segment_size));
  } else {
113
    base::Free(segment);
114
  }
115 116
}

117 118
}  // namespace internal
}  // namespace v8