spaces-inl.h 6.11 KB
Newer Older
1
// Copyright 2011 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_HEAP_SPACES_INL_H_
#define V8_HEAP_SPACES_INL_H_
7

8
#include "src/base/atomic-utils.h"
9
#include "src/base/v8-fallthrough.h"
10
#include "src/common/globals.h"
11
#include "src/heap/heap-inl.h"
12
#include "src/heap/incremental-marking.h"
13
#include "src/heap/large-spaces.h"
14
#include "src/heap/memory-chunk-inl.h"
15 16
#include "src/heap/new-spaces.h"
#include "src/heap/paged-spaces.h"
17
#include "src/heap/spaces.h"
18
#include "src/objects/code-inl.h"
19

20 21
namespace v8 {
namespace internal {
22

23 24 25 26 27 28 29 30 31 32 33 34 35
template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
  p_ = p_->next_page();
  return *this;
}

template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
  PageIteratorImpl<PAGE_TYPE> tmp(*this);
  operator++();
  return tmp;
}

36 37 38 39 40 41 42 43
PageRange::PageRange(Address start, Address limit)
    : begin_(Page::FromAddress(start)),
      end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
#ifdef DEBUG
  if (begin_->InNewSpace()) {
    SemiSpace::AssertValidRange(start, limit);
  }
#endif  // DEBUG
44
}
45

46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66
void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
                                               size_t amount) {
  base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
  heap()->IncrementExternalBackingStoreBytes(type, amount);
}

void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
                                               size_t amount) {
  base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
  heap()->DecrementExternalBackingStoreBytes(type, amount);
}

void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
                                          Space* from, Space* to,
                                          size_t amount) {
  if (from == to) return;

  base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
  base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
}

67
void Page::MarkNeverAllocateForTesting() {
68
  DCHECK(this->owner_identity() != NEW_SPACE);
69 70
  DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
  SetFlag(NEVER_ALLOCATE_ON_PAGE);
71
  SetFlag(NEVER_EVACUATE);
72 73 74 75 76
  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
}

void Page::MarkEvacuationCandidate() {
  DCHECK(!IsFlagSet(NEVER_EVACUATE));
77 78
  DCHECK_NULL(slot_set<OLD_TO_OLD>());
  DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
79 80 81 82 83
  SetFlag(EVACUATION_CANDIDATE);
  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
}

void Page::ClearEvacuationCandidate() {
84
  if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
85 86
    DCHECK_NULL(slot_set<OLD_TO_OLD>());
    DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
87
  }
88 89 90 91
  ClearFlag(EVACUATION_CANDIDATE);
  InitializeFreeListCategories();
}

92
OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
93 94 95 96 97
    : heap_(heap),
      state_(kOldSpaceState),
      old_iterator_(heap->old_space()->begin()),
      code_iterator_(heap->code_space()->begin()),
      map_iterator_(heap->map_space()->begin()),
98 99
      lo_iterator_(heap->lo_space()->begin()),
      code_lo_iterator_(heap->code_lo_space()->begin()) {}
100

101
MemoryChunk* OldGenerationMemoryChunkIterator::next() {
102 103
  switch (state_) {
    case kOldSpaceState: {
104
      if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
105
      state_ = kMapState;
106
      V8_FALLTHROUGH;
107 108
    }
    case kMapState: {
109
      if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
110
      state_ = kCodeState;
111
      V8_FALLTHROUGH;
112 113
    }
    case kCodeState: {
114 115
      if (code_iterator_ != heap_->code_space()->end())
        return *(code_iterator_++);
116
      state_ = kLargeObjectState;
117
      V8_FALLTHROUGH;
118 119
    }
    case kLargeObjectState: {
120
      if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
121 122 123 124 125 126
      state_ = kCodeLargeObjectState;
      V8_FALLTHROUGH;
    }
    case kCodeLargeObjectState: {
      if (code_lo_iterator_ != heap_->code_lo_space()->end())
        return *(code_lo_iterator_++);
127
      state_ = kFinishedState;
128
      V8_FALLTHROUGH;
129 130
    }
    case kFinishedState:
131
      return nullptr;
132 133 134 135
    default:
      break;
  }
  UNREACHABLE();
136 137
}

138 139 140 141 142 143 144 145 146 147
AllocationResult LocalAllocationBuffer::AllocateRawAligned(
    int size_in_bytes, AllocationAlignment alignment) {
  Address current_top = allocation_info_.top();
  int filler_size = Heap::GetFillToAlign(current_top, alignment);

  Address new_top = current_top + filler_size + size_in_bytes;
  if (new_top > allocation_info_.limit()) return AllocationResult::Retry();

  allocation_info_.set_top(new_top);
  if (filler_size > 0) {
148 149 150
    return Heap::PrecedeWithFiller(ReadOnlyRoots(heap_),
                                   HeapObject::FromAddress(current_top),
                                   filler_size);
151 152 153 154 155 156 157 158 159
  }

  return AllocationResult(HeapObject::FromAddress(current_top));
}

LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
                                                        AllocationResult result,
                                                        intptr_t size) {
  if (result.IsRetry()) return InvalidBuffer();
160
  HeapObject obj;
161 162 163
  bool ok = result.To(&obj);
  USE(ok);
  DCHECK(ok);
164
  Address top = HeapObject::cast(obj).address();
165
  return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
166 167 168 169 170 171
}


bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
  if (allocation_info_.top() == other->allocation_info_.limit()) {
    allocation_info_.set_top(other->allocation_info_.top());
172
    other->allocation_info_.Reset(kNullAddress, kNullAddress);
173 174 175 176 177
    return true;
  }
  return false;
}

178
bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
179
  if (IsValid()) {
180
    const Address object_address = object.address();
181 182 183 184 185 186 187 188
    if ((allocation_info_.top() - object_size) == object_address) {
      allocation_info_.set_top(object_address);
      return true;
    }
  }
  return false;
}

189 190
}  // namespace internal
}  // namespace v8
191

192
#endif  // V8_HEAP_SPACES_INL_H_