spaces-inl.h 18.4 KB
Newer Older
1
// Copyright 2011 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_HEAP_SPACES_INL_H_
#define V8_HEAP_SPACES_INL_H_
7

8
#include "src/base/atomic-utils.h"
9
#include "src/base/bounded-page-allocator.h"
10
#include "src/base/v8-fallthrough.h"
11
#include "src/heap/incremental-marking.h"
12
#include "src/heap/spaces.h"
13
#include "src/msan.h"
14
#include "src/objects/code-inl.h"
15

16 17
namespace v8 {
namespace internal {
18

19 20 21 22 23 24 25 26 27 28 29 30 31
template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
  p_ = p_->next_page();
  return *this;
}

template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
  PageIteratorImpl<PAGE_TYPE> tmp(*this);
  operator++();
  return tmp;
}

32 33 34 35 36 37 38 39
PageRange::PageRange(Address start, Address limit)
    : begin_(Page::FromAddress(start)),
      end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
#ifdef DEBUG
  if (begin_->InNewSpace()) {
    SemiSpace::AssertValidRange(start, limit);
  }
#endif  // DEBUG
40
}
41

42
// -----------------------------------------------------------------------------
43 44 45
// SemiSpaceIterator

HeapObject* SemiSpaceIterator::Next() {
46
  while (current_ != limit_) {
47 48
    if (Page::IsAlignedToPageSize(current_)) {
      Page* page = Page::FromAllocationAreaAddress(current_);
49
      page = page->next_page();
50
      DCHECK(page);
51 52 53 54 55 56 57 58
      current_ = page->area_start();
      if (current_ == limit_) return nullptr;
    }
    HeapObject* object = HeapObject::FromAddress(current_);
    current_ += object->Size();
    if (!object->IsFiller()) {
      return object;
    }
59
  }
60
  return nullptr;
61
}
62

63 64
// -----------------------------------------------------------------------------
// HeapObjectIterator
65 66 67 68

HeapObject* HeapObjectIterator::Next() {
  do {
    HeapObject* next_obj = FromCurrentPage();
69
    if (next_obj != nullptr) return next_obj;
70
  } while (AdvanceToNextPage());
71
  return nullptr;
72 73
}

74 75
HeapObject* HeapObjectIterator::FromCurrentPage() {
  while (cur_addr_ != cur_end_) {
76 77
    if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
      cur_addr_ = space_->limit();
78 79 80
      continue;
    }
    HeapObject* obj = HeapObject::FromAddress(cur_addr_);
81
    const int obj_size = obj->Size();
82
    cur_addr_ += obj_size;
83
    DCHECK_LE(cur_addr_, cur_end_);
84
    if (!obj->IsFiller()) {
85 86
      if (obj->IsCode()) {
        DCHECK_EQ(space_, space_->heap()->code_space());
87
        DCHECK_CODEOBJECT_SIZE(obj_size, space_);
88 89 90
      } else {
        DCHECK_OBJECT_SIZE(obj_size);
      }
91
      return obj;
92 93
    }
  }
94
  return nullptr;
95 96
}

97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117
void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
                                               size_t amount) {
  base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
  heap()->IncrementExternalBackingStoreBytes(type, amount);
}

void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
                                               size_t amount) {
  base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
  heap()->DecrementExternalBackingStoreBytes(type, amount);
}

void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
                                          Space* from, Space* to,
                                          size_t amount) {
  if (from == to) return;

  base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
  base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
}

118 119 120 121 122 123 124 125 126 127 128 129 130 131
// -----------------------------------------------------------------------------
// SemiSpace

bool SemiSpace::Contains(HeapObject* o) {
  return id_ == kToSpace
             ? MemoryChunk::FromAddress(o->address())->InToSpace()
             : MemoryChunk::FromAddress(o->address())->InFromSpace();
}

bool SemiSpace::Contains(Object* o) {
  return o->IsHeapObject() && Contains(HeapObject::cast(o));
}

bool SemiSpace::ContainsSlow(Address a) {
132 133
  for (Page* p : *this) {
    if (p == MemoryChunk::FromAddress(a)) return true;
134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158
  }
  return false;
}

// --------------------------------------------------------------------------
// NewSpace

bool NewSpace::Contains(HeapObject* o) {
  return MemoryChunk::FromAddress(o->address())->InNewSpace();
}

bool NewSpace::Contains(Object* o) {
  return o->IsHeapObject() && Contains(HeapObject::cast(o));
}

bool NewSpace::ContainsSlow(Address a) {
  return from_space_.ContainsSlow(a) || to_space_.ContainsSlow(a);
}

bool NewSpace::ToSpaceContainsSlow(Address a) {
  return to_space_.ContainsSlow(a);
}

bool NewSpace::ToSpaceContains(Object* o) { return to_space_.Contains(o); }
bool NewSpace::FromSpaceContains(Object* o) { return from_space_.Contains(o); }
159

160
bool PagedSpace::Contains(Address addr) {
161
  if (heap()->IsWithinLargeObject(addr)) return false;
162
  return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this;
163 164
}

165 166
bool PagedSpace::Contains(Object* o) {
  if (!o->IsHeapObject()) return false;
167
  return Page::FromAddress(HeapObject::cast(o)->address())->owner() == this;
168
}
169

170 171 172 173 174
bool PagedSpace::Contains(ObjectPtr o) {
  if (!o.IsHeapObject()) return false;
  return Page::FromAddress(o.ptr())->owner() == this;
}

175 176 177 178
void PagedSpace::UnlinkFreeListCategories(Page* page) {
  DCHECK_EQ(this, page->owner());
  page->ForAllFreeListCategories([this](FreeListCategory* category) {
    DCHECK_EQ(free_list(), category->owner());
179
    category->set_free_list(nullptr);
180 181 182 183
    free_list()->RemoveCategory(category);
  });
}

184
size_t PagedSpace::RelinkFreeListCategories(Page* page) {
185
  DCHECK_EQ(this, page->owner());
186
  size_t added = 0;
187 188
  page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
    category->set_free_list(&free_list_);
189 190 191
    added += category->available();
    category->Relink();
  });
192 193
  DCHECK_EQ(page->AvailableInFreeList(),
            page->AvailableInFreeListFromAllocatedBytes());
194 195 196
  return added;
}

197
bool PagedSpace::TryFreeLast(HeapObject* object, int object_size) {
198
  if (allocation_info_.top() != kNullAddress) {
199 200 201 202 203 204 205 206 207
    const Address object_address = object->address();
    if ((allocation_info_.top() - object_size) == object_address) {
      allocation_info_.set_top(object_address);
      return true;
    }
  }
  return false;
}

208
MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) {
209 210 211
  MemoryChunk* chunk = heap->lo_space()->FindPage(addr);
  if (chunk == nullptr) {
    chunk = MemoryChunk::FromAddress(addr);
212
  }
213 214 215
  return chunk;
}

216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237
void MemoryChunk::IncrementExternalBackingStoreBytes(
    ExternalBackingStoreType type, size_t amount) {
  base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
  owner()->IncrementExternalBackingStoreBytes(type, amount);
}

void MemoryChunk::DecrementExternalBackingStoreBytes(
    ExternalBackingStoreType type, size_t amount) {
  base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
  owner()->DecrementExternalBackingStoreBytes(type, amount);
}

void MemoryChunk::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
                                                MemoryChunk* from,
                                                MemoryChunk* to,
                                                size_t amount) {
  base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
  base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
  Space::MoveExternalBackingStoreBytes(type, from->owner(), to->owner(),
                                       amount);
}

238 239 240 241
bool MemoryChunk::IsInNewLargeObjectSpace() const {
  return owner()->identity() == NEW_LO_SPACE;
}

242
void Page::MarkNeverAllocateForTesting() {
243
  DCHECK(this->owner()->identity() != NEW_SPACE);
244 245
  DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
  SetFlag(NEVER_ALLOCATE_ON_PAGE);
246
  SetFlag(NEVER_EVACUATE);
247 248 249 250 251
  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
}

void Page::MarkEvacuationCandidate() {
  DCHECK(!IsFlagSet(NEVER_EVACUATE));
252 253
  DCHECK_NULL(slot_set<OLD_TO_OLD>());
  DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
254 255 256 257 258
  SetFlag(EVACUATION_CANDIDATE);
  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
}

void Page::ClearEvacuationCandidate() {
259
  if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
260 261
    DCHECK_NULL(slot_set<OLD_TO_OLD>());
    DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
262
  }
263 264 265 266
  ClearFlag(EVACUATION_CANDIDATE);
  InitializeFreeListCategories();
}

267
OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
268 269 270 271 272
    : heap_(heap),
      state_(kOldSpaceState),
      old_iterator_(heap->old_space()->begin()),
      code_iterator_(heap->code_space()->begin()),
      map_iterator_(heap->map_space()->begin()),
273 274
      lo_iterator_(heap->lo_space()->begin()),
      code_lo_iterator_(heap->code_lo_space()->begin()) {}
275

276
MemoryChunk* OldGenerationMemoryChunkIterator::next() {
277 278
  switch (state_) {
    case kOldSpaceState: {
279
      if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
280
      state_ = kMapState;
281
      V8_FALLTHROUGH;
282 283
    }
    case kMapState: {
284
      if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
285
      state_ = kCodeState;
286
      V8_FALLTHROUGH;
287 288
    }
    case kCodeState: {
289 290
      if (code_iterator_ != heap_->code_space()->end())
        return *(code_iterator_++);
291
      state_ = kLargeObjectState;
292
      V8_FALLTHROUGH;
293 294
    }
    case kLargeObjectState: {
295
      if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
296 297 298 299 300 301
      state_ = kCodeLargeObjectState;
      V8_FALLTHROUGH;
    }
    case kCodeLargeObjectState: {
      if (code_lo_iterator_ != heap_->code_lo_space()->end())
        return *(code_lo_iterator_++);
302
      state_ = kFinishedState;
303
      V8_FALLTHROUGH;
304 305
    }
    case kFinishedState:
306
      return nullptr;
307 308 309 310
    default:
      break;
  }
  UNREACHABLE();
311 312
}

313 314
Page* FreeList::GetPageForCategoryType(FreeListCategoryType type) {
  return top(type) ? top(type)->page() : nullptr;
315 316
}

317
FreeList* FreeListCategory::owner() { return free_list_; }
318 319

bool FreeListCategory::is_linked() {
320
  return prev_ != nullptr || next_ != nullptr;
321
}
322

323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339
AllocationResult LocalAllocationBuffer::AllocateRawAligned(
    int size_in_bytes, AllocationAlignment alignment) {
  Address current_top = allocation_info_.top();
  int filler_size = Heap::GetFillToAlign(current_top, alignment);

  Address new_top = current_top + filler_size + size_in_bytes;
  if (new_top > allocation_info_.limit()) return AllocationResult::Retry();

  allocation_info_.set_top(new_top);
  if (filler_size > 0) {
    return heap_->PrecedeWithFiller(HeapObject::FromAddress(current_top),
                                    filler_size);
  }

  return AllocationResult(HeapObject::FromAddress(current_top));
}

340
bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) {
341
  if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
342
    return true;
343
  }
344
  return SlowRefillLinearAllocationArea(size_in_bytes);
345
}
346

347 348 349 350 351 352 353 354 355 356
HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
  Address current_top = allocation_info_.top();
  Address new_top = current_top + size_in_bytes;
  DCHECK_LE(new_top, allocation_info_.limit());
  allocation_info_.set_top(new_top);
  return HeapObject::FromAddress(current_top);
}

HeapObject* PagedSpace::TryAllocateLinearlyAligned(
    int* size_in_bytes, AllocationAlignment alignment) {
357
  Address current_top = allocation_info_.top();
358 359 360
  int filler_size = Heap::GetFillToAlign(current_top, alignment);

  Address new_top = current_top + filler_size + *size_in_bytes;
361
  if (new_top > allocation_info_.limit()) return nullptr;
362 363

  allocation_info_.set_top(new_top);
364 365 366 367
  if (filler_size > 0) {
    *size_in_bytes += filler_size;
    return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
                                     filler_size);
368
  }
369

370 371 372
  return HeapObject::FromAddress(current_top);
}

373 374
AllocationResult PagedSpace::AllocateRawUnaligned(
    int size_in_bytes, UpdateSkipList update_skip_list) {
375
  DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
376 377
  if (!EnsureLinearAllocationArea(size_in_bytes)) {
    return AllocationResult::Retry(identity());
378
  }
379 380 381 382
  HeapObject* object = AllocateLinearly(size_in_bytes);
  DCHECK_NOT_NULL(object);
  if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) {
    SkipList::Update(object->address(), size_in_bytes);
383
  }
384 385
  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
  return object;
386 387 388
}


389 390
AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
                                                AllocationAlignment alignment) {
391 392
  DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
  DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
393
  int allocation_size = size_in_bytes;
394 395
  HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment);
  if (object == nullptr) {
396 397 398 399
    // We don't know exactly how much filler we need to align until space is
    // allocated, so assume the worst case.
    int filler_size = Heap::GetMaximumFillToAlign(alignment);
    allocation_size += filler_size;
400 401
    if (!EnsureLinearAllocationArea(allocation_size)) {
      return AllocationResult::Retry(identity());
402
    }
403 404 405
    allocation_size = size_in_bytes;
    object = TryAllocateLinearlyAligned(&allocation_size, alignment);
    DCHECK_NOT_NULL(object);
406
  }
407 408
  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
  return object;
409 410 411
}


412 413
AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
                                         AllocationAlignment alignment) {
414 415 416 417
  if (top_on_previous_step_ && top() < top_on_previous_step_ &&
      SupportsInlineAllocation()) {
    // Generated code decreased the top() pointer to do folded allocations.
    // The top_on_previous_step_ can be one byte beyond the current page.
418
    DCHECK_NE(top(), kNullAddress);
419 420
    DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
              Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
421 422 423 424 425 426
    top_on_previous_step_ = top();
  }
  size_t bytes_since_last =
      top_on_previous_step_ ? top() - top_on_previous_step_ : 0;

  DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
427
#ifdef V8_HOST_ARCH_32_BIT
428 429 430 431
  AllocationResult result =
      alignment == kDoubleAligned
          ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
          : AllocateRawUnaligned(size_in_bytes);
432
#else
433
  AllocationResult result = AllocateRawUnaligned(size_in_bytes);
434
#endif
435
  HeapObject* heap_obj = nullptr;
436
  if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
437 438 439
    DCHECK_IMPLIES(
        heap()->incremental_marking()->black_allocation(),
        heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
440 441
    AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
                   heap_obj->address(), size_in_bytes);
442
    StartNextInlineAllocationStep();
443 444
  }
  return result;
445 446 447
}


448
// -----------------------------------------------------------------------------
449
// NewSpace
450 451


452 453
AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
                                              AllocationAlignment alignment) {
454 455
  Address top = allocation_info_.top();
  int filler_size = Heap::GetFillToAlign(top, alignment);
456
  int aligned_size_in_bytes = size_in_bytes + filler_size;
457

458 459
  if (allocation_info_.limit() - top <
      static_cast<uintptr_t>(aligned_size_in_bytes)) {
460 461 462 463 464 465 466 467
    // See if we can create room.
    if (!EnsureAllocation(size_in_bytes, alignment)) {
      return AllocationResult::Retry();
    }

    top = allocation_info_.top();
    filler_size = Heap::GetFillToAlign(top, alignment);
    aligned_size_in_bytes = size_in_bytes + filler_size;
468 469
  }

470 471
  HeapObject* obj = HeapObject::FromAddress(top);
  allocation_info_.set_top(top + aligned_size_in_bytes);
472 473
  DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);

474 475
  if (filler_size > 0) {
    obj = heap()->PrecedeWithFiller(obj, filler_size);
476 477 478 479 480 481 482 483
  }

  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);

  return obj;
}


484
AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
485
  Address top = allocation_info_.top();
486
  if (allocation_info_.limit() < top + size_in_bytes) {
487 488 489 490
    // See if we can create room.
    if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
      return AllocationResult::Retry();
    }
491

492
    top = allocation_info_.top();
493
  }
494

495 496
  HeapObject* obj = HeapObject::FromAddress(top);
  allocation_info_.set_top(top + size_in_bytes);
497
  DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
498

499 500
  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);

501 502 503
  return obj;
}

504

505 506
AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
                                       AllocationAlignment alignment) {
507 508
  if (top() < top_on_previous_step_) {
    // Generated code decreased the top() pointer to do folded allocations
509 510
    DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
              Page::FromAllocationAreaAddress(top_on_previous_step_));
511 512
    top_on_previous_step_ = top();
  }
513 514 515 516 517 518 519 520 521
#ifdef V8_HOST_ARCH_32_BIT
  return alignment == kDoubleAligned
             ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
             : AllocateRawUnaligned(size_in_bytes);
#else
  return AllocateRawUnaligned(size_in_bytes);
#endif
}

522
V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
523
    int size_in_bytes, AllocationAlignment alignment) {
524
  base::MutexGuard guard(&mutex_);
525 526 527 528 529 530 531 532 533 534 535 536
  return AllocateRaw(size_in_bytes, alignment);
}

LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
                                                        AllocationResult result,
                                                        intptr_t size) {
  if (result.IsRetry()) return InvalidBuffer();
  HeapObject* obj = nullptr;
  bool ok = result.To(&obj);
  USE(ok);
  DCHECK(ok);
  Address top = HeapObject::cast(obj)->address();
537
  return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
538 539 540 541 542 543
}


bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
  if (allocation_info_.top() == other->allocation_info_.limit()) {
    allocation_info_.set_top(other->allocation_info_.top());
544
    other->allocation_info_.Reset(kNullAddress, kNullAddress);
545 546 547 548 549
    return true;
  }
  return false;
}

550 551 552 553 554 555 556 557 558 559 560
bool LocalAllocationBuffer::TryFreeLast(HeapObject* object, int object_size) {
  if (IsValid()) {
    const Address object_address = object->address();
    if ((allocation_info_.top() - object_size) == object_address) {
      allocation_info_.set_top(object_address);
      return true;
    }
  }
  return false;
}

561 562
}  // namespace internal
}  // namespace v8
563

564
#endif  // V8_HEAP_SPACES_INL_H_