Commit fdd9f6b9 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Harden heap-related cctests

- Move usable functions into proper heap-utils.h/.cc files and remove
  utils-inl.h file
- Fix assumptions accross the board relying on certain behavior that is not
  invariant

This is a requirement for modifying page size.

BUG=chromium:581412
LOG=N
R=ulan@chromium.org

Review-Url: https://codereview.chromium.org/1999753002
Cr-Commit-Position: refs/heads/master@{#36410}
parent a19404f0
......@@ -135,6 +135,26 @@ GCTracer::GCTracer(Heap* heap)
previous_ = previous_incremental_mark_compactor_event_ = current_;
}
void GCTracer::ResetForTesting() {
cumulative_incremental_marking_steps_ = 0.0;
cumulative_incremental_marking_bytes_ = 0.0;
cumulative_incremental_marking_duration_ = 0.0;
cumulative_pure_incremental_marking_duration_ = 0.0;
longest_incremental_marking_step_ = 0.0;
cumulative_incremental_marking_finalization_steps_ = 0.0;
cumulative_incremental_marking_finalization_duration_ = 0.0;
longest_incremental_marking_finalization_step_ = 0.0;
cumulative_marking_duration_ = 0.0;
cumulative_sweeping_duration_ = 0.0;
allocation_time_ms_ = 0.0;
new_space_allocation_counter_bytes_ = 0.0;
old_generation_allocation_counter_bytes_ = 0.0;
allocation_duration_since_gc_ = 0.0;
new_space_allocation_in_bytes_since_gc_ = 0.0;
old_generation_allocation_in_bytes_since_gc_ = 0.0;
combined_mark_compact_speed_cache_ = 0.0;
start_counter_ = 0;
}
void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
const char* collector_reason) {
......
......@@ -372,6 +372,8 @@ class GCTracer {
static double AverageSpeed(const RingBuffer<BytesAndDuration>& buffer,
const BytesAndDuration& initial, double time_ms);
void ResetForTesting();
private:
// Print one detailed trace line in name=value format.
// TODO(ernstm): Move to Heap.
......
......@@ -382,6 +382,7 @@ Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) {
}
void Page::MarkNeverAllocateForTesting() {
DCHECK(this->owner()->identity() != NEW_SPACE);
DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
SetFlag(NEVER_ALLOCATE_ON_PAGE);
reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
......
......@@ -99,6 +99,8 @@
'gay-precision.cc',
'gay-shortest.cc',
'heap/heap-tester.h',
'heap/heap-utils.cc',
'heap/heap-utils.h',
'heap/test-alloc.cc',
'heap/test-compaction.cc',
'heap/test-heap.cc',
......@@ -106,7 +108,6 @@
'heap/test-lab.cc',
'heap/test-mark-compact.cc',
'heap/test-spaces.cc',
'heap/utils-inl.h',
'print-extension.cc',
'profiler-extension.cc',
'test-accessors.cc',
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef HEAP_UTILS_H_
#define HEAP_UTILS_H_
#include "test/cctest/heap/heap-utils.h"
#include "src/factory.h"
#include "src/heap/heap-inl.h"
......@@ -11,18 +10,29 @@
#include "src/heap/mark-compact.h"
#include "src/isolate.h"
namespace v8 {
namespace internal {
namespace heap {
void SealCurrentObjects(Heap* heap) {
heap->CollectAllGarbage();
heap->CollectAllGarbage();
heap->mark_compact_collector()->EnsureSweepingCompleted();
PageIterator it(heap->old_space());
heap->old_space()->EmptyAllocationInfo();
while (it.has_next()) {
Page* page = it.next();
page->MarkNeverAllocateForTesting();
}
}
static int LenFromSize(int size) {
int FixedArrayLenFromSize(int size) {
return (size - FixedArray::kHeaderSize) / kPointerSize;
}
static inline std::vector<Handle<FixedArray>> CreatePadding(
Heap* heap, int padding_size, PretenureFlag tenure,
int object_size = Page::kMaxRegularHeapObjectSize) {
std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
PretenureFlag tenure,
int object_size) {
std::vector<Handle<FixedArray>> handles;
Isolate* isolate = heap->isolate();
int allocate_memory;
......@@ -42,14 +52,17 @@ static inline std::vector<Handle<FixedArray>> CreatePadding(
while (free_memory > 0) {
if (free_memory > object_size) {
allocate_memory = object_size;
length = LenFromSize(allocate_memory);
length = FixedArrayLenFromSize(allocate_memory);
} else {
allocate_memory = free_memory;
length = LenFromSize(allocate_memory);
length = FixedArrayLenFromSize(allocate_memory);
if (length <= 0) {
// Not enough room to create another fixed array. Let's create a filler.
heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(),
free_memory, ClearRecordedSlots::kNo);
if (free_memory > (2 * kPointerSize)) {
heap->CreateFillerObjectAt(
*heap->old_space()->allocation_top_address(), free_memory,
ClearRecordedSlots::kNo);
}
break;
}
}
......@@ -61,66 +74,46 @@ static inline std::vector<Handle<FixedArray>> CreatePadding(
return handles;
}
// Helper function that simulates a full new-space in the heap.
static inline bool FillUpOnePage(
v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles = nullptr) {
void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes,
std::vector<Handle<FixedArray>>* out_handles) {
space->DisableInlineAllocationSteps();
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
*space->allocation_top_address());
if (space_remaining == 0) return false;
CHECK(space_remaining >= extra_bytes);
int new_linear_size = space_remaining - extra_bytes;
if (new_linear_size == 0) return;
std::vector<Handle<FixedArray>> handles =
CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);
heap::CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED);
if (out_handles != nullptr)
out_handles->insert(out_handles->end(), handles.begin(), handles.end());
return true;
}
void FillCurrentPage(v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles) {
heap::AllocateAllButNBytes(space, 0, out_handles);
}
// Helper function that simulates a fill new-space in the heap.
static inline void AllocateAllButNBytes(
v8::internal::NewSpace* space, int extra_bytes,
std::vector<Handle<FixedArray>>* out_handles = nullptr) {
bool FillUpOnePage(v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles) {
space->DisableInlineAllocationSteps();
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
*space->allocation_top_address());
CHECK(space_remaining >= extra_bytes);
int new_linear_size = space_remaining - extra_bytes;
if (new_linear_size == 0) return;
if (space_remaining == 0) return false;
std::vector<Handle<FixedArray>> handles =
CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED);
heap::CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);
if (out_handles != nullptr)
out_handles->insert(out_handles->end(), handles.begin(), handles.end());
return true;
}
static inline void FillCurrentPage(
v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles = nullptr) {
AllocateAllButNBytes(space, 0, out_handles);
}
static inline void SimulateFullSpace(
v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles = nullptr) {
FillCurrentPage(space, out_handles);
while (FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
void SimulateFullSpace(v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles) {
heap::FillCurrentPage(space, out_handles);
while (heap::FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
}
}
// Helper function that simulates a full old-space in the heap.
static inline void SimulateFullSpace(v8::internal::PagedSpace* space) {
space->EmptyAllocationInfo();
space->ResetFreeList();
space->ClearStats();
}
// Helper function that simulates many incremental marking steps until
// marking is completed.
static inline void SimulateIncrementalMarking(i::Heap* heap,
bool force_completion = true) {
void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
......@@ -142,7 +135,12 @@ static inline void SimulateIncrementalMarking(i::Heap* heap,
CHECK(marking->IsComplete());
}
void SimulateFullSpace(v8::internal::PagedSpace* space) {
space->EmptyAllocationInfo();
space->ResetFreeList();
space->ClearStats();
}
} // namespace heap
} // namespace internal
} // namespace v8
#endif // HEAP_UTILS_H_
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef HEAP_HEAP_UTILS_H_
#define HEAP_HEAP_UTILS_H_
#include "src/heap/heap.h"
namespace v8 {
namespace internal {
namespace heap {
void SealCurrentObjects(Heap* heap);
int FixedArrayLenFromSize(int size);
std::vector<Handle<FixedArray>> CreatePadding(
Heap* heap, int padding_size, PretenureFlag tenure,
int object_size = Page::kMaxRegularHeapObjectSize);
void AllocateAllButNBytes(
v8::internal::NewSpace* space, int extra_bytes,
std::vector<Handle<FixedArray>>* out_handles = nullptr);
void FillCurrentPage(v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles = nullptr);
// Helper function that simulates a full new-space in the heap.
bool FillUpOnePage(v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles = nullptr);
void SimulateFullSpace(v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles = nullptr);
// Helper function that simulates many incremental marking steps until
// marking is completed.
void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true);
// Helper function that simulates a full old-space in the heap.
void SimulateFullSpace(v8::internal::PagedSpace* space);
} // namespace heap
} // namespace internal
} // namespace v8
#endif // HEAP_HEAP_UTILS_H_
......@@ -31,7 +31,7 @@
#include "src/accessors.h"
#include "src/api.h"
#include "test/cctest/heap/heap-tester.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
using namespace v8::internal;
......@@ -52,11 +52,11 @@ AllocationResult v8::internal::HeapTester::AllocateAfterFailures() {
heap->CopyJSObject(JSObject::cast(object)).ToObjectChecked();
// Old data space.
SimulateFullSpace(heap->old_space());
heap::SimulateFullSpace(heap->old_space());
heap->AllocateByteArray(100, TENURED).ToObjectChecked();
// Old pointer space.
SimulateFullSpace(heap->old_space());
heap::SimulateFullSpace(heap->old_space());
heap->AllocateFixedArray(10000, TENURED).ToObjectChecked();
// Large object space.
......@@ -72,12 +72,12 @@ AllocationResult v8::internal::HeapTester::AllocateAfterFailures() {
kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked();
// Map space.
SimulateFullSpace(heap->map_space());
heap::SimulateFullSpace(heap->map_space());
int instance_size = JSObject::kHeaderSize;
heap->AllocateMap(JS_OBJECT_TYPE, instance_size).ToObjectChecked();
// Test that we can allocate in old pointer space and code space.
SimulateFullSpace(heap->code_space());
heap::SimulateFullSpace(heap->code_space());
heap->AllocateFixedArray(100, TENURED).ToObjectChecked();
heap->CopyCode(CcTest::i_isolate()->builtins()->builtin(
Builtins::kIllegal)).ToObjectChecked();
......
......@@ -4,12 +4,14 @@
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-tester.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
namespace v8 {
namespace internal {
static void CheckInvariantsOfAbortedPage(Page* page) {
namespace {
void CheckInvariantsOfAbortedPage(Page* page) {
// Check invariants:
// 1) Markbits are cleared
// 2) The page is not marked as evacuation candidate anymore
......@@ -19,6 +21,14 @@ static void CheckInvariantsOfAbortedPage(Page* page) {
CHECK(!page->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
}
void CheckAllObjectsOnPage(std::vector<Handle<FixedArray>>& handles,
Page* page) {
for (auto& fixed_array : handles) {
CHECK(Page::FromAddress(fixed_array->address()) == page);
}
}
} // namespace
HEAP_TEST(CompactionFullAbortedPage) {
// Test the scenario where we reach OOM during compaction and the whole page
......@@ -33,20 +43,19 @@ HEAP_TEST(CompactionFullAbortedPage) {
Heap* heap = isolate->heap();
{
HandleScope scope1(isolate);
PageIterator it(heap->old_space());
while (it.has_next()) {
it.next()->MarkNeverAllocateForTesting();
}
heap::SealCurrentObjects(heap);
{
HandleScope scope2(isolate);
CHECK(heap->old_space()->Expand());
auto compaction_page_handles =
CreatePadding(heap, Page::kAllocatableMemory, TENURED);
heap::CreatePadding(heap, Page::kAllocatableMemory, TENURED);
Page* to_be_aborted_page =
Page::FromAddress(compaction_page_handles.front()->address());
to_be_aborted_page->SetFlag(
MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
heap->set_force_oom(true);
heap->CollectAllGarbage();
......@@ -72,29 +81,29 @@ HEAP_TEST(CompactionPartiallyAbortedPage) {
FLAG_concurrent_sweeping = false;
FLAG_manual_evacuation_candidates_selection = true;
const int object_size = 128 * KB;
const int objects_per_page = 10;
const int object_size = Page::kAllocatableMemory / objects_per_page;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
{
HandleScope scope1(isolate);
PageIterator it(heap->old_space());
while (it.has_next()) {
it.next()->MarkNeverAllocateForTesting();
}
heap::SealCurrentObjects(heap);
{
HandleScope scope2(isolate);
// Fill another page with objects of size {object_size} (last one is
// properly adjusted).
CHECK(heap->old_space()->Expand());
auto compaction_page_handles =
CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size);
auto compaction_page_handles = heap::CreatePadding(
heap, Page::kAllocatableMemory, TENURED, object_size);
Page* to_be_aborted_page =
Page::FromAddress(compaction_page_handles.front()->address());
to_be_aborted_page->SetFlag(
MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
{
// Add another page that is filled with {num_objects} objects of size
......@@ -102,8 +111,9 @@ HEAP_TEST(CompactionPartiallyAbortedPage) {
HandleScope scope3(isolate);
CHECK(heap->old_space()->Expand());
const int num_objects = 3;
std::vector<Handle<FixedArray>> page_to_fill_handles = CreatePadding(
heap, object_size * num_objects, TENURED, object_size);
std::vector<Handle<FixedArray>> page_to_fill_handles =
heap::CreatePadding(heap, object_size * num_objects, TENURED,
object_size);
Page* page_to_fill =
Page::FromAddress(page_to_fill_handles.front()->address());
......@@ -145,7 +155,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
FLAG_concurrent_sweeping = false;
FLAG_manual_evacuation_candidates_selection = true;
const int object_size = 128 * KB;
const int objects_per_page = 10;
const int object_size = Page::kAllocatableMemory / objects_per_page;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
......@@ -155,10 +166,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
Handle<FixedArray> root_array =
isolate->factory()->NewFixedArray(10, TENURED);
PageIterator it(heap->old_space());
while (it.has_next()) {
it.next()->MarkNeverAllocateForTesting();
}
heap::SealCurrentObjects(heap);
Page* to_be_aborted_page = nullptr;
{
......@@ -167,7 +175,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
// properly adjusted).
CHECK(heap->old_space()->Expand());
std::vector<Handle<FixedArray>> compaction_page_handles =
CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size);
heap::CreatePadding(heap, Page::kAllocatableMemory, TENURED,
object_size);
to_be_aborted_page =
Page::FromAddress(compaction_page_handles.front()->address());
to_be_aborted_page->SetFlag(
......@@ -176,8 +185,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
compaction_page_handles[i]->set(0, *compaction_page_handles[i - 1]);
}
root_array->set(0, *compaction_page_handles.back());
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
}
{
// Add another page that is filled with {num_objects} objects of size
// {object_size}.
......@@ -186,7 +195,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
const int num_objects = 2;
int used_memory = object_size * num_objects;
std::vector<Handle<FixedArray>> page_to_fill_handles =
CreatePadding(heap, used_memory, TENURED, object_size);
heap::CreatePadding(heap, used_memory, TENURED, object_size);
Page* page_to_fill =
Page::FromAddress(page_to_fill_handles.front()->address());
......@@ -233,7 +242,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
FLAG_concurrent_sweeping = false;
FLAG_manual_evacuation_candidates_selection = true;
const int object_size = 128 * KB;
const int objects_per_page = 10;
const int object_size = Page::kAllocatableMemory / objects_per_page;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
......@@ -242,10 +252,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
HandleScope scope1(isolate);
Handle<FixedArray> root_array =
isolate->factory()->NewFixedArray(10, TENURED);
PageIterator it(heap->old_space());
while (it.has_next()) {
it.next()->MarkNeverAllocateForTesting();
}
heap::SealCurrentObjects(heap);
Page* to_be_aborted_page = nullptr;
{
......@@ -253,8 +260,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
// Fill another page with objects of size {object_size} (last one is
// properly adjusted).
CHECK(heap->old_space()->Expand());
auto compaction_page_handles =
CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size);
auto compaction_page_handles = heap::CreatePadding(
heap, Page::kAllocatableMemory, TENURED, object_size);
// Sanity check that we have enough space for linking up arrays.
CHECK_GE(compaction_page_handles.front()->length(), 2);
to_be_aborted_page =
......@@ -270,6 +277,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
isolate->factory()->NewFixedArray(1, NOT_TENURED);
CHECK(heap->InNewSpace(*new_space_array));
compaction_page_handles.front()->set(1, *new_space_array);
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
}
{
......@@ -280,7 +288,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
const int num_objects = 2;
int used_memory = object_size * num_objects;
std::vector<Handle<FixedArray>> page_to_fill_handles =
CreatePadding(heap, used_memory, TENURED, object_size);
heap::CreatePadding(heap, used_memory, TENURED, object_size);
Page* page_to_fill =
Page::FromAddress(page_to_fill_handles.front()->address());
......
This diff is collapsed.
......@@ -19,8 +19,7 @@
#include "src/full-codegen/full-codegen.h"
#include "src/global-handles.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
using v8::IdleTask;
using v8::Task;
......@@ -120,7 +119,7 @@ TEST(IncrementalMarkingUsingIdleTasks) {
v8::Platform* old_platform = i::V8::GetCurrentPlatform();
MockPlatform platform(old_platform);
i::V8::SetPlatformForTesting(&platform);
SimulateFullSpace(CcTest::heap()->old_space());
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
marking->Stop();
marking->Start();
......@@ -145,7 +144,7 @@ TEST(IncrementalMarkingUsingIdleTasksAfterGC) {
v8::Platform* old_platform = i::V8::GetCurrentPlatform();
MockPlatform platform(old_platform);
i::V8::SetPlatformForTesting(&platform);
SimulateFullSpace(CcTest::heap()->old_space());
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
CcTest::heap()->CollectAllGarbage();
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
marking->Stop();
......@@ -171,7 +170,7 @@ TEST(IncrementalMarkingUsingDelayedTasks) {
v8::Platform* old_platform = i::V8::GetCurrentPlatform();
MockPlatform platform(old_platform);
i::V8::SetPlatformForTesting(&platform);
SimulateFullSpace(CcTest::heap()->old_space());
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
marking->Stop();
marking->Start();
......
......@@ -43,8 +43,7 @@
#include "src/global-handles.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-tester.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
using namespace v8::internal;
using v8::Just;
......@@ -76,58 +75,49 @@ TEST(MarkingDeque) {
DeleteArray(mem);
}
HEAP_TEST(Promotion) {
TEST(Promotion) {
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
heap->ConfigureHeap(1, 1, 1, 0);
v8::HandleScope sc(CcTest::isolate());
// Allocate a fixed array in the new space.
int array_length =
(Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
(4 * kPointerSize);
Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked();
Handle<FixedArray> array(FixedArray::cast(obj));
Isolate* isolate = CcTest::i_isolate();
{
v8::HandleScope sc(CcTest::isolate());
Heap* heap = isolate->heap();
// Array should be in the new space.
CHECK(heap->InSpace(*array, NEW_SPACE));
heap::SealCurrentObjects(heap);
// Call mark compact GC, so array becomes an old object.
heap->CollectAllGarbage();
heap->CollectAllGarbage();
int array_length =
heap::FixedArrayLenFromSize(Page::kMaxRegularHeapObjectSize);
Handle<FixedArray> array = isolate->factory()->NewFixedArray(array_length);
// Array now sits in the old space
CHECK(heap->InSpace(*array, OLD_SPACE));
// Array should be in the new space.
CHECK(heap->InSpace(*array, NEW_SPACE));
heap->CollectAllGarbage();
heap->CollectAllGarbage();
CHECK(heap->InSpace(*array, OLD_SPACE));
}
}
HEAP_TEST(NoPromotion) {
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
heap->ConfigureHeap(1, 1, 1, 0);
v8::HandleScope sc(CcTest::isolate());
// Allocate a big fixed array in the new space.
int array_length =
(Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
(2 * kPointerSize);
Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked();
Handle<FixedArray> array(FixedArray::cast(obj));
Isolate* isolate = CcTest::i_isolate();
{
v8::HandleScope sc(CcTest::isolate());
Heap* heap = isolate->heap();
// Array should be in the new space.
CHECK(heap->InSpace(*array, NEW_SPACE));
heap::SealCurrentObjects(heap);
// Simulate a full old space to make promotion fail.
SimulateFullSpace(heap->old_space());
int array_length =
heap::FixedArrayLenFromSize(Page::kMaxRegularHeapObjectSize);
Handle<FixedArray> array = isolate->factory()->NewFixedArray(array_length);
// Call mark compact GC, and it should pass.
heap->CollectGarbage(OLD_SPACE);
heap->set_force_oom(true);
// Array should be in the new space.
CHECK(heap->InSpace(*array, NEW_SPACE));
heap->CollectAllGarbage();
heap->CollectAllGarbage();
CHECK(heap->InSpace(*array, NEW_SPACE));
}
}
HEAP_TEST(MarkCompactCollector) {
FLAG_incremental_marking = false;
FLAG_retain_maps_for_n_gc = 0;
......
......@@ -32,7 +32,6 @@
#include "src/v8.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-tester.h"
#include "test/cctest/heap/utils-inl.h"
namespace v8 {
namespace internal {
......@@ -208,14 +207,15 @@ TEST(Regress3540) {
0));
TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
CodeRange* code_range = new CodeRange(isolate);
const size_t code_range_size = 4 * Page::kPageSize;
size_t code_range_size =
kMinimumCodeRangeSize > 0 ? kMinimumCodeRangeSize : 3 * MB;
if (!code_range->SetUp(code_range_size)) {
return;
}
Address address;
size_t size;
size_t request_size = code_range_size - 2 * Page::kPageSize;
size_t request_size = code_range_size - Page::kPageSize;
address = code_range->AllocateRawMemory(
request_size, request_size - (2 * MemoryAllocator::CodePageGuardSize()),
&size);
......
......@@ -51,7 +51,7 @@
#include "src/utils.h"
#include "src/vm-state.h"
#include "test/cctest/heap/heap-tester.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
static const bool kLogThreading = false;
......@@ -634,7 +634,7 @@ TEST(MakingExternalUnalignedOneByteString) {
"slice('abcdefghijklmnopqrstuvwxyz');"));
// Trigger GCs so that the newly allocated string moves to old gen.
SimulateFullSpace(CcTest::heap()->old_space());
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
......@@ -14798,8 +14798,8 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) {
for (int i = 0; i < kIterations; ++i) {
LocalContext env(isolate);
i::AlwaysAllocateScope always_allocate(i_isolate);
SimulateFullSpace(i::FLAG_ignition ? heap->old_space()
: heap->code_space());
i::heap::SimulateFullSpace(i::FLAG_ignition ? heap->old_space()
: heap->code_space());
CompileRun(script);
// Keep a strong reference to the code object in the handle scope.
......@@ -19001,7 +19001,7 @@ void PrologueCallbackAlloc(v8::Isolate* isolate,
++prologue_call_count_alloc;
// Simulate full heap to see if we will reenter this callback
SimulateFullSpace(CcTest::heap()->new_space());
i::heap::SimulateFullSpace(CcTest::heap()->new_space());
Local<Object> obj = Object::New(isolate);
CHECK(!obj.IsEmpty());
......@@ -19021,7 +19021,7 @@ void EpilogueCallbackAlloc(v8::Isolate* isolate,
++epilogue_call_count_alloc;
// Simulate full heap to see if we will reenter this callback
SimulateFullSpace(CcTest::heap()->new_space());
i::heap::SimulateFullSpace(CcTest::heap()->new_space());
Local<Object> obj = Object::New(isolate);
CHECK(!obj.IsEmpty());
......
......@@ -35,7 +35,7 @@
#include "src/global-handles.h"
#include "src/macro-assembler.h"
#include "src/objects.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
using namespace v8::internal;
......@@ -175,8 +175,8 @@ static void TestHashSetCausesGC(Handle<HashSet> table) {
// Simulate a full heap so that generating an identity hash code
// in subsequent calls will request GC.
SimulateFullSpace(CcTest::heap()->new_space());
SimulateFullSpace(CcTest::heap()->old_space());
heap::SimulateFullSpace(CcTest::heap()->new_space());
heap::SimulateFullSpace(CcTest::heap()->old_space());
// Calling Contains() should not cause GC ever.
int gc_count = isolate->heap()->gc_count();
......@@ -206,8 +206,8 @@ static void TestHashMapCausesGC(Handle<HashMap> table) {
// Simulate a full heap so that generating an identity hash code
// in subsequent calls will request GC.
SimulateFullSpace(CcTest::heap()->new_space());
SimulateFullSpace(CcTest::heap()->old_space());
heap::SimulateFullSpace(CcTest::heap()->new_space());
heap::SimulateFullSpace(CcTest::heap()->old_space());
// Calling Lookup() should not cause GC ever.
CHECK(table->Lookup(key)->IsTheHole());
......
......@@ -47,7 +47,7 @@
#include "src/snapshot/snapshot.h"
#include "src/snapshot/startup-serializer.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
using namespace v8::internal;
......@@ -1828,7 +1828,7 @@ TEST(Regress503552) {
false);
delete script_data;
SimulateIncrementalMarking(isolate->heap());
heap::SimulateIncrementalMarking(isolate->heap());
script_data = CodeSerializer::Serialize(isolate, shared, source);
delete script_data;
......
......@@ -4,7 +4,7 @@
#include "src/heap/slots-buffer.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
namespace v8 {
namespace internal {
......@@ -101,7 +101,7 @@ TEST(FilterInvalidSlotsBufferEntries) {
// Write an old space reference into field 4 which points to an object on an
// evacuation candidate.
SimulateFullSpace(heap->old_space());
heap::SimulateFullSpace(heap->old_space());
Handle<FixedArray> valid_object =
isolate->factory()->NewFixedArray(23, TENURED);
Page* page = Page::FromAddress(valid_object->address());
......
......@@ -15,7 +15,7 @@
#include "src/ic/ic.h"
#include "src/macro-assembler.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
using namespace v8::base;
using namespace v8::internal;
......@@ -1112,7 +1112,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
{
AlwaysAllocateScope always_allocate(isolate);
// Make sure |obj_value| is placed on an old-space evacuation candidate.
SimulateFullSpace(old_space);
heap::SimulateFullSpace(old_space);
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
ec_page = Page::FromAddress(obj_value->address());
}
......@@ -1142,7 +1142,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
FLAG_stress_compaction = true;
FLAG_manual_evacuation_candidates_selection = true;
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
SimulateIncrementalMarking(heap);
heap::SimulateIncrementalMarking(heap);
// Disable stress compaction mode in order to let GC do scavenge.
FLAG_stress_compaction = false;
......@@ -1451,7 +1451,7 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
CHECK(old_space->Contains(*obj));
// Make sure |obj_value| is placed on an old-space evacuation candidate.
SimulateFullSpace(old_space);
heap::SimulateFullSpace(old_space);
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
ec_page = Page::FromAddress(obj_value->address());
CHECK_NE(ec_page, Page::FromAddress(obj->address()));
......@@ -1460,7 +1460,7 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
// Heap is ready, force |ec_page| to become an evacuation candidate and
// simulate incremental marking.
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
SimulateIncrementalMarking(heap);
heap::SimulateIncrementalMarking(heap);
// Check that everything is ready for triggering incremental write barrier
// (i.e. that both |obj| and |obj_value| are black and the marking phase is
......
......@@ -31,7 +31,7 @@
#include "src/global-handles.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
using namespace v8::internal;
......@@ -177,7 +177,7 @@ TEST(Regress2060a) {
// Start second old-space page so that values land on evacuation candidate.
Page* first_page = heap->old_space()->anchor()->next_page();
SimulateFullSpace(heap->old_space());
heap::SimulateFullSpace(heap->old_space());
// Fill up weak map with values on an evacuation candidate.
{
......@@ -216,7 +216,7 @@ TEST(Regress2060b) {
// Start second old-space page so that keys land on evacuation candidate.
Page* first_page = heap->old_space()->anchor()->next_page();
SimulateFullSpace(heap->old_space());
heap::SimulateFullSpace(heap->old_space());
// Fill up weak map with keys on an evacuation candidate.
Handle<JSObject> keys[32];
......@@ -249,7 +249,7 @@ TEST(Regress399527) {
{
HandleScope scope(isolate);
AllocateJSWeakMap(isolate);
SimulateIncrementalMarking(heap);
heap::SimulateIncrementalMarking(heap);
}
// The weak map is marked black here but leaving the handle scope will make
// the object unreachable. Aborting incremental marking will clear all the
......
......@@ -31,7 +31,7 @@
#include "src/global-handles.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/utils-inl.h"
#include "test/cctest/heap/heap-utils.h"
using namespace v8::internal;
......@@ -176,7 +176,7 @@ TEST(WeakSet_Regress2060a) {
// Start second old-space page so that values land on evacuation candidate.
Page* first_page = heap->old_space()->anchor()->next_page();
SimulateFullSpace(heap->old_space());
heap::SimulateFullSpace(heap->old_space());
// Fill up weak set with values on an evacuation candidate.
{
......@@ -215,7 +215,7 @@ TEST(WeakSet_Regress2060b) {
// Start second old-space page so that keys land on evacuation candidate.
Page* first_page = heap->old_space()->anchor()->next_page();
SimulateFullSpace(heap->old_space());
heap::SimulateFullSpace(heap->old_space());
// Fill up weak set with keys on an evacuation candidate.
Handle<JSObject> keys[32];
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment