heap-utils.cc 8.68 KB
Newer Older
1
// Copyright 2016 the V8 project authors. All rights reserved.
2 3 4
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

5
#include "test/cctest/heap/heap-utils.h"
6

7
#include "src/execution/isolate.h"
8
#include "src/heap/factory.h"
9 10 11
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h"
12
#include "src/heap/memory-chunk.h"
13
#include "src/heap/safepoint.h"
14
#include "test/cctest/cctest.h"
15 16 17

namespace v8 {
namespace internal {
18 19
namespace heap {

20 21 22 23
void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }

void InvokeMarkSweep() { CcTest::CollectAllGarbage(); }

24
void SealCurrentObjects(Heap* heap) {
25 26
  CcTest::CollectAllGarbage();
  CcTest::CollectAllGarbage();
27
  heap->mark_compact_collector()->EnsureSweepingCompleted();
28
  heap->old_space()->FreeLinearAllocationArea();
29
  for (Page* page : *heap->old_space()) {
30 31 32
    page->MarkNeverAllocateForTesting();
  }
}
33

34
int FixedArrayLenFromSize(int size) {
35 36
  return Min((size - FixedArray::kHeaderSize) / kTaggedSize,
             FixedArray::kMaxRegularLength);
37 38
}

39 40
std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap,
                                                                int remainder) {
41
  PauseAllocationObserversScope pause_observers(heap);
42 43 44 45 46
  std::vector<Handle<FixedArray>> handles;
  Isolate* isolate = heap->isolate();
  const int kArraySize = 128;
  const int kArrayLen = heap::FixedArrayLenFromSize(kArraySize);
  Handle<FixedArray> array;
47 48
  int allocated = 0;
  do {
49 50
    if (allocated + kArraySize * 2 >
        static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) {
51 52
      int size =
          kArraySize * 2 -
53 54
          ((allocated + kArraySize * 2) -
           static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) -
55 56
          remainder;
      int last_array_len = heap::FixedArrayLenFromSize(size);
57 58
      array = isolate->factory()->NewFixedArray(last_array_len,
                                                AllocationType::kOld);
59 60
      CHECK_EQ(size, array->Size());
      allocated += array->Size() + remainder;
61
    } else {
62 63
      array =
          isolate->factory()->NewFixedArray(kArrayLen, AllocationType::kOld);
64
      allocated += array->Size();
65 66 67 68
      CHECK_EQ(kArraySize, array->Size());
    }
    if (handles.empty()) {
      // Check that allocations started on a new page.
69
      CHECK_EQ(array->address(), Page::FromHeapObject(*array)->area_start());
70 71
    }
    handles.push_back(array);
72 73
  } while (allocated <
           static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()));
74 75 76
  return handles;
}

77
std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
78
                                              AllocationType allocation,
79
                                              int object_size) {
80 81
  std::vector<Handle<FixedArray>> handles;
  Isolate* isolate = heap->isolate();
82 83 84
  int allocate_memory;
  int length;
  int free_memory = padding_size;
85
  if (allocation == i::AllocationType::kOld) {
86
    heap->old_space()->FreeLinearAllocationArea();
87 88
    int overall_free_memory = static_cast<int>(heap->old_space()->Available());
    CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
89
  } else {
90
    int overall_free_memory =
91 92
        static_cast<int>(*heap->new_space()->allocation_limit_address() -
                         *heap->new_space()->allocation_top_address());
93
    CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
94
  }
95 96 97
  while (free_memory > 0) {
    if (free_memory > object_size) {
      allocate_memory = object_size;
98
      length = FixedArrayLenFromSize(allocate_memory);
99 100
    } else {
      allocate_memory = free_memory;
101
      length = FixedArrayLenFromSize(allocate_memory);
102
      if (length <= 0) {
103 104
        // Not enough room to create another FixedArray, so create a filler.
        if (allocation == i::AllocationType::kOld) {
105
          heap->CreateFillerObjectAt(
106 107
              *heap->old_space()->allocation_top_address(), free_memory,
              ClearRecordedSlots::kNo);
108 109 110 111
        } else {
          heap->CreateFillerObjectAt(
              *heap->new_space()->allocation_top_address(), free_memory,
              ClearRecordedSlots::kNo);
112
        }
113 114 115
        break;
      }
    }
116 117
    handles.push_back(isolate->factory()->NewFixedArray(length, allocation));
    CHECK((allocation == AllocationType::kYoung &&
118
           heap->new_space()->Contains(*handles.back())) ||
119 120
          (allocation == AllocationType::kOld &&
           heap->InOldSpace(*handles.back())));
121
    free_memory -= handles.back()->Size();
122
  }
123
  return handles;
124 125
}

126
bool FillCurrentPage(v8::internal::NewSpace* space,
127
                     std::vector<Handle<FixedArray>>* out_handles) {
128
  return heap::FillCurrentPageButNBytes(space, 0, out_handles);
129
}
130

131 132
bool FillCurrentPageButNBytes(v8::internal::NewSpace* space, int extra_bytes,
                              std::vector<Handle<FixedArray>>* out_handles) {
133
  PauseAllocationObserversScope pause_observers(space->heap());
134 135 136 137 138 139 140 141 142 143
  // We cannot rely on `space->limit()` to point to the end of the current page
  // in the case where inline allocations are disabled, it actually points to
  // the current allocation pointer.
  DCHECK_IMPLIES(space->heap()->inline_allocation_disabled(),
                 space->limit() == space->top());
  int space_remaining =
      static_cast<int>(space->to_space().page_high() - space->top());
  CHECK(space_remaining >= extra_bytes);
  int new_linear_size = space_remaining - extra_bytes;
  if (new_linear_size == 0) return false;
144 145
  std::vector<Handle<FixedArray>> handles = heap::CreatePadding(
      space->heap(), space_remaining, i::AllocationType::kYoung);
146
  if (out_handles != nullptr) {
147
    out_handles->insert(out_handles->end(), handles.begin(), handles.end());
148
  }
149
  return true;
150 151
}

152 153
void SimulateFullSpace(v8::internal::NewSpace* space,
                       std::vector<Handle<FixedArray>>* out_handles) {
154
  while (heap::FillCurrentPage(space, out_handles) || space->AddFreshPage()) {
155 156 157
  }
}

158
void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
159
  const double kStepSizeInMs = 100;
160
  CHECK(FLAG_incremental_marking);
161
  i::IncrementalMarking* marking = heap->incremental_marking();
162
  i::MarkCompactCollector* collector = heap->mark_compact_collector();
163 164 165
  if (collector->sweeping_in_progress()) {
    collector->EnsureSweepingCompleted();
  }
166 167 168
  if (marking->IsSweeping()) {
    marking->FinalizeSweeping();
  }
169
  CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
170
  if (marking->IsStopped()) {
171 172
    heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
                                  i::GarbageCollectionReason::kTesting);
173
  }
174
  CHECK(marking->IsMarking() || marking->IsComplete());
175 176 177
  if (!force_completion) return;

  while (!marking->IsComplete()) {
178 179
    marking->Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
                  i::StepOrigin::kV8);
180
    if (marking->IsReadyToOverApproximateWeakClosure()) {
181
      SafepointScope scope(heap);
182 183 184 185 186 187
      marking->FinalizeIncrementally();
    }
  }
  CHECK(marking->IsComplete());
}

188
void SimulateFullSpace(v8::internal::PagedSpace* space) {
189
  CodeSpaceMemoryModificationScope modification_scope(space->heap());
190 191 192 193
  i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
  if (collector->sweeping_in_progress()) {
    collector->EnsureSweepingCompleted();
  }
194
  space->FreeLinearAllocationArea();
195 196 197
  space->ResetFreeList();
}

198
void AbandonCurrentlyFreeMemory(PagedSpace* space) {
199
  space->FreeLinearAllocationArea();
200 201
  for (Page* page : *space) {
    page->MarkNeverAllocateForTesting();
202 203 204 205
  }
}

void GcAndSweep(Heap* heap, AllocationSpace space) {
206
  heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
207 208 209 210 211
  if (heap->mark_compact_collector()->sweeping_in_progress()) {
    heap->mark_compact_collector()->EnsureSweepingCompleted();
  }
}

212 213 214 215
void ForceEvacuationCandidate(Page* page) {
  CHECK(FLAG_manual_evacuation_candidates_selection);
  page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
  PagedSpace* space = static_cast<PagedSpace*>(page->owner());
216
  DCHECK_NOT_NULL(space);
217 218 219 220 221
  Address top = space->top();
  Address limit = space->limit();
  if (top < limit && Page::FromAllocationAreaAddress(top) == page) {
    // Create filler object to keep page iterable if it was iterable.
    int remaining = static_cast<int>(limit - top);
222 223
    space->heap()->CreateFillerObjectAt(top, remaining,
                                        ClearRecordedSlots::kNo);
224
    space->FreeLinearAllocationArea();
225 226 227
  }
}

228 229 230 231 232
bool InCorrectGeneration(HeapObject object) {
  return FLAG_single_generation ? !i::Heap::InYoungGeneration(object)
                                : i::Heap::InYoungGeneration(object);
}

233
}  // namespace heap
234 235
}  // namespace internal
}  // namespace v8