spaces-inl.h 10.6 KB
Newer Older
1
// Copyright 2011 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_HEAP_SPACES_INL_H_
#define V8_HEAP_SPACES_INL_H_
7

8
#include "src/base/atomic-utils.h"
9
#include "src/base/v8-fallthrough.h"
10
#include "src/common/globals.h"
11
#include "src/heap/heap-inl.h"
12
#include "src/heap/incremental-marking.h"
13
#include "src/heap/large-spaces.h"
14
#include "src/heap/memory-chunk-inl.h"
15 16
#include "src/heap/new-spaces.h"
#include "src/heap/paged-spaces.h"
17
#include "src/heap/spaces.h"
18

19 20
namespace v8 {
namespace internal {
21

22 23 24 25 26 27 28 29 30 31 32 33 34
template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
  p_ = p_->next_page();
  return *this;
}

template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
  PageIteratorImpl<PAGE_TYPE> tmp(*this);
  operator++();
  return tmp;
}

35 36 37 38 39 40 41 42
PageRange::PageRange(Address start, Address limit)
    : begin_(Page::FromAddress(start)),
      end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
#ifdef DEBUG
  if (begin_->InNewSpace()) {
    SemiSpace::AssertValidRange(start, limit);
  }
#endif  // DEBUG
43
}
44

45 46 47 48 49 50 51 52 53 54
ConstPageRange::ConstPageRange(Address start, Address limit)
    : begin_(Page::FromAddress(start)),
      end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
#ifdef DEBUG
  if (begin_->InNewSpace()) {
    SemiSpace::AssertValidRange(start, limit);
  }
#endif  // DEBUG
}

55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
                                               size_t amount) {
  base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
  heap()->IncrementExternalBackingStoreBytes(type, amount);
}

void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
                                               size_t amount) {
  base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
  heap()->DecrementExternalBackingStoreBytes(type, amount);
}

void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
                                          Space* from, Space* to,
                                          size_t amount) {
  if (from == to) return;

  base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
  base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
}

76
void Page::MarkNeverAllocateForTesting() {
77
  DCHECK(this->owner_identity() != NEW_SPACE);
78 79
  DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
  SetFlag(NEVER_ALLOCATE_ON_PAGE);
80
  SetFlag(NEVER_EVACUATE);
81 82 83 84 85
  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
}

void Page::MarkEvacuationCandidate() {
  DCHECK(!IsFlagSet(NEVER_EVACUATE));
86 87
  DCHECK_NULL(slot_set<OLD_TO_OLD>());
  DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
88 89 90 91 92
  SetFlag(EVACUATION_CANDIDATE);
  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
}

void Page::ClearEvacuationCandidate() {
93
  if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
94 95
    DCHECK_NULL(slot_set<OLD_TO_OLD>());
    DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
96
  }
97 98 99 100
  ClearFlag(EVACUATION_CANDIDATE);
  InitializeFreeListCategories();
}

101
OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
102 103 104 105
    : heap_(heap),
      state_(kOldSpaceState),
      old_iterator_(heap->old_space()->begin()),
      code_iterator_(heap->code_space()->begin()),
106 107
      map_iterator_(heap->map_space() ? heap->map_space()->begin()
                                      : PageRange::iterator(nullptr)),
108 109
      map_iterator_end_(heap->map_space() ? heap->map_space()->end()
                                          : PageRange::iterator(nullptr)),
110 111
      lo_iterator_(heap->lo_space()->begin()),
      code_lo_iterator_(heap->code_lo_space()->begin()) {}
112

113
MemoryChunk* OldGenerationMemoryChunkIterator::next() {
114 115
  switch (state_) {
    case kOldSpaceState: {
116
      if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
117
      state_ = kMapState;
118
      V8_FALLTHROUGH;
119 120
    }
    case kMapState: {
121
      if (map_iterator_ != map_iterator_end_) return *(map_iterator_++);
122
      state_ = kCodeState;
123
      V8_FALLTHROUGH;
124 125
    }
    case kCodeState: {
126 127
      if (code_iterator_ != heap_->code_space()->end())
        return *(code_iterator_++);
128
      state_ = kLargeObjectState;
129
      V8_FALLTHROUGH;
130 131
    }
    case kLargeObjectState: {
132
      if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
133 134 135 136 137 138
      state_ = kCodeLargeObjectState;
      V8_FALLTHROUGH;
    }
    case kCodeLargeObjectState: {
      if (code_lo_iterator_ != heap_->code_lo_space()->end())
        return *(code_lo_iterator_++);
139
      state_ = kFinishedState;
140
      V8_FALLTHROUGH;
141 142
    }
    case kFinishedState:
143
      return nullptr;
144 145 146 147
    default:
      break;
  }
  UNREACHABLE();
148 149
}

150 151 152 153
AllocationResult LocalAllocationBuffer::AllocateRawAligned(
    int size_in_bytes, AllocationAlignment alignment) {
  Address current_top = allocation_info_.top();
  int filler_size = Heap::GetFillToAlign(current_top, alignment);
154 155
  int aligned_size = filler_size + size_in_bytes;
  if (!allocation_info_.CanIncrementTop(aligned_size)) {
156
    return AllocationResult::Failure();
157 158 159
  }
  HeapObject object =
      HeapObject::FromAddress(allocation_info_.IncrementTop(aligned_size));
160 161 162
  return filler_size > 0 ? AllocationResult::FromObject(
                               heap_->PrecedeWithFiller(object, filler_size))
                         : AllocationResult::FromObject(object);
163 164 165 166 167
}

LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
                                                        AllocationResult result,
                                                        intptr_t size) {
168
  if (result.IsFailure()) return InvalidBuffer();
169
  HeapObject obj;
170 171 172
  bool ok = result.To(&obj);
  USE(ok);
  DCHECK(ok);
173
  Address top = HeapObject::cast(obj).address();
174
  return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
175 176 177
}

bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
178
  return allocation_info_.MergeIfAdjacent(other->allocation_info_);
179 180
}

181
bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
182
  if (IsValid()) {
183
    const Address object_address = object.address();
184
    return allocation_info_.DecrementTopIfAdjacent(object_address, object_size);
185 186 187 188
  }
  return false;
}

189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206
bool MemoryChunkIterator::HasNext() {
  if (current_chunk_) return true;

  while (space_iterator_.HasNext()) {
    Space* space = space_iterator_.Next();
    current_chunk_ = space->first_page();
    if (current_chunk_) return true;
  }

  return false;
}

MemoryChunk* MemoryChunkIterator::Next() {
  MemoryChunk* chunk = current_chunk_;
  current_chunk_ = chunk->list_node().next();
  return chunk;
}

207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275
AllocationResult SpaceWithLinearArea::AllocateFastUnaligned(
    int size_in_bytes, AllocationOrigin origin) {
  if (!allocation_info_->CanIncrementTop(size_in_bytes)) {
    return AllocationResult::Failure();
  }
  HeapObject obj =
      HeapObject::FromAddress(allocation_info_->IncrementTop(size_in_bytes));

  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);

  return AllocationResult::FromObject(obj);
}

AllocationResult SpaceWithLinearArea::AllocateFastAligned(
    int size_in_bytes, int* result_aligned_size_in_bytes,
    AllocationAlignment alignment, AllocationOrigin origin) {
  Address top = allocation_info_->top();
  int filler_size = Heap::GetFillToAlign(top, alignment);
  int aligned_size_in_bytes = size_in_bytes + filler_size;

  if (!allocation_info_->CanIncrementTop(aligned_size_in_bytes)) {
    return AllocationResult::Failure();
  }
  HeapObject obj = HeapObject::FromAddress(
      allocation_info_->IncrementTop(aligned_size_in_bytes));
  if (result_aligned_size_in_bytes)
    *result_aligned_size_in_bytes = aligned_size_in_bytes;

  if (filler_size > 0) {
    obj = heap()->PrecedeWithFiller(obj, filler_size);
  }

  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);

  return AllocationResult::FromObject(obj);
}

AllocationResult SpaceWithLinearArea::AllocateRaw(int size_in_bytes,
                                                  AllocationAlignment alignment,
                                                  AllocationOrigin origin) {
  DCHECK(!FLAG_enable_third_party_heap);

  AllocationResult result;

  if (USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned) {
    result = AllocateFastAligned(size_in_bytes, nullptr, alignment, origin);
  } else {
    result = AllocateFastUnaligned(size_in_bytes, origin);
  }

  return result.IsFailure() ? AllocateRawSlow(size_in_bytes, alignment, origin)
                            : result;
}

AllocationResult SpaceWithLinearArea::AllocateRawUnaligned(
    int size_in_bytes, AllocationOrigin origin) {
  DCHECK(!FLAG_enable_third_party_heap);
  int max_aligned_size;
  if (!EnsureAllocation(size_in_bytes, kTaggedAligned, origin,
                        &max_aligned_size)) {
    return AllocationResult::Failure();
  }

  DCHECK_EQ(max_aligned_size, size_in_bytes);
  DCHECK_LE(allocation_info_->start(), allocation_info_->top());

  AllocationResult result = AllocateFastUnaligned(size_in_bytes, origin);
  DCHECK(!result.IsFailure());

276 277 278 279
  if (FLAG_trace_allocations_origins) {
    UpdateAllocationOrigins(origin);
  }

280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303
  InvokeAllocationObservers(result.ToAddress(), size_in_bytes, size_in_bytes,
                            size_in_bytes);

  return result;
}

AllocationResult SpaceWithLinearArea::AllocateRawAligned(
    int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) {
  DCHECK(!FLAG_enable_third_party_heap);
  int max_aligned_size;
  if (!EnsureAllocation(size_in_bytes, alignment, origin, &max_aligned_size)) {
    return AllocationResult::Failure();
  }

  DCHECK_GE(max_aligned_size, size_in_bytes);
  DCHECK_LE(allocation_info_->start(), allocation_info_->top());

  int aligned_size_in_bytes;

  AllocationResult result = AllocateFastAligned(
      size_in_bytes, &aligned_size_in_bytes, alignment, origin);
  DCHECK_GE(max_aligned_size, aligned_size_in_bytes);
  DCHECK(!result.IsFailure());

304 305 306 307
  if (FLAG_trace_allocations_origins) {
    UpdateAllocationOrigins(origin);
  }

308 309 310 311 312 313 314 315 316 317 318 319 320 321 322
  InvokeAllocationObservers(result.ToAddress(), size_in_bytes,
                            aligned_size_in_bytes, max_aligned_size);

  return result;
}

AllocationResult SpaceWithLinearArea::AllocateRawSlow(
    int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) {
  AllocationResult result =
      USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned
          ? AllocateRawAligned(size_in_bytes, alignment, origin)
          : AllocateRawUnaligned(size_in_bytes, origin);
  return result;
}

323 324
}  // namespace internal
}  // namespace v8
325

326
#endif  // V8_HEAP_SPACES_INL_H_