spaces-inl.h 6.08 KB
Newer Older
1
// Copyright 2011 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_HEAP_SPACES_INL_H_
#define V8_HEAP_SPACES_INL_H_
7

8
#include "src/base/atomic-utils.h"
9
#include "src/base/v8-fallthrough.h"
10
#include "src/common/globals.h"
11
#include "src/heap/heap-inl.h"
12
#include "src/heap/incremental-marking.h"
13
#include "src/heap/memory-chunk-inl.h"
14 15
#include "src/heap/new-spaces.h"
#include "src/heap/paged-spaces.h"
16
#include "src/heap/spaces.h"
17
#include "src/objects/code-inl.h"
18

19 20
namespace v8 {
namespace internal {
21

22 23 24 25 26 27 28 29 30 31 32 33 34
template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
  p_ = p_->next_page();
  return *this;
}

template <class PAGE_TYPE>
PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
  PageIteratorImpl<PAGE_TYPE> tmp(*this);
  operator++();
  return tmp;
}

35 36 37 38 39 40 41 42
PageRange::PageRange(Address start, Address limit)
    : begin_(Page::FromAddress(start)),
      end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
#ifdef DEBUG
  if (begin_->InNewSpace()) {
    SemiSpace::AssertValidRange(start, limit);
  }
#endif  // DEBUG
43
}
44

45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65
void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
                                               size_t amount) {
  base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
  heap()->IncrementExternalBackingStoreBytes(type, amount);
}

void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
                                               size_t amount) {
  base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
  heap()->DecrementExternalBackingStoreBytes(type, amount);
}

void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
                                          Space* from, Space* to,
                                          size_t amount) {
  if (from == to) return;

  base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
  base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
}

66
void Page::MarkNeverAllocateForTesting() {
67
  DCHECK(this->owner_identity() != NEW_SPACE);
68 69
  DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
  SetFlag(NEVER_ALLOCATE_ON_PAGE);
70
  SetFlag(NEVER_EVACUATE);
71 72 73 74 75
  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
}

void Page::MarkEvacuationCandidate() {
  DCHECK(!IsFlagSet(NEVER_EVACUATE));
76 77
  DCHECK_NULL(slot_set<OLD_TO_OLD>());
  DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
78 79 80 81 82
  SetFlag(EVACUATION_CANDIDATE);
  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
}

void Page::ClearEvacuationCandidate() {
83
  if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
84 85
    DCHECK_NULL(slot_set<OLD_TO_OLD>());
    DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
86
  }
87 88 89 90
  ClearFlag(EVACUATION_CANDIDATE);
  InitializeFreeListCategories();
}

91
OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
92 93 94 95 96
    : heap_(heap),
      state_(kOldSpaceState),
      old_iterator_(heap->old_space()->begin()),
      code_iterator_(heap->code_space()->begin()),
      map_iterator_(heap->map_space()->begin()),
97 98
      lo_iterator_(heap->lo_space()->begin()),
      code_lo_iterator_(heap->code_lo_space()->begin()) {}
99

100
MemoryChunk* OldGenerationMemoryChunkIterator::next() {
101 102
  switch (state_) {
    case kOldSpaceState: {
103
      if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
104
      state_ = kMapState;
105
      V8_FALLTHROUGH;
106 107
    }
    case kMapState: {
108
      if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
109
      state_ = kCodeState;
110
      V8_FALLTHROUGH;
111 112
    }
    case kCodeState: {
113 114
      if (code_iterator_ != heap_->code_space()->end())
        return *(code_iterator_++);
115
      state_ = kLargeObjectState;
116
      V8_FALLTHROUGH;
117 118
    }
    case kLargeObjectState: {
119
      if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
120 121 122 123 124 125
      state_ = kCodeLargeObjectState;
      V8_FALLTHROUGH;
    }
    case kCodeLargeObjectState: {
      if (code_lo_iterator_ != heap_->code_lo_space()->end())
        return *(code_lo_iterator_++);
126
      state_ = kFinishedState;
127
      V8_FALLTHROUGH;
128 129
    }
    case kFinishedState:
130
      return nullptr;
131 132 133 134
    default:
      break;
  }
  UNREACHABLE();
135 136
}

137 138 139 140 141 142 143 144 145 146
AllocationResult LocalAllocationBuffer::AllocateRawAligned(
    int size_in_bytes, AllocationAlignment alignment) {
  Address current_top = allocation_info_.top();
  int filler_size = Heap::GetFillToAlign(current_top, alignment);

  Address new_top = current_top + filler_size + size_in_bytes;
  if (new_top > allocation_info_.limit()) return AllocationResult::Retry();

  allocation_info_.set_top(new_top);
  if (filler_size > 0) {
147 148 149
    return Heap::PrecedeWithFiller(ReadOnlyRoots(heap_),
                                   HeapObject::FromAddress(current_top),
                                   filler_size);
150 151 152 153 154 155 156 157 158
  }

  return AllocationResult(HeapObject::FromAddress(current_top));
}

LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
                                                        AllocationResult result,
                                                        intptr_t size) {
  if (result.IsRetry()) return InvalidBuffer();
159
  HeapObject obj;
160 161 162
  bool ok = result.To(&obj);
  USE(ok);
  DCHECK(ok);
163
  Address top = HeapObject::cast(obj).address();
164
  return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
165 166 167 168 169 170
}


bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
  if (allocation_info_.top() == other->allocation_info_.limit()) {
    allocation_info_.set_top(other->allocation_info_.top());
171
    other->allocation_info_.Reset(kNullAddress, kNullAddress);
172 173 174 175 176
    return true;
  }
  return false;
}

177
bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
178
  if (IsValid()) {
179
    const Address object_address = object.address();
180 181 182 183 184 185 186 187
    if ((allocation_info_.top() - object_size) == object_address) {
      allocation_info_.set_top(object_address);
      return true;
    }
  }
  return false;
}

188 189
}  // namespace internal
}  // namespace v8
190

191
#endif  // V8_HEAP_SPACES_INL_H_