Commit 161a0e00 authored by mlippautz's avatar mlippautz Committed by Commit bot

[cctest] Add tests for aborting compaction of pages

Tests for
* aborting a full page.
* partially aborting a page.
* partially aborting a page with pointers between aborted pages.
* partially aborting a page with store buffer entries.

Also introduces ShouldForceOOM() which prohibits a PagedSpace from expanding.
Compaction spaces refer to the corresponding actual space.

BUG=chromium:524425
LOG=N

Review URL: https://codereview.chromium.org/1511933002

Cr-Commit-Position: refs/heads/master@{#32783}
parent 7e5ff19e
......@@ -164,7 +164,8 @@ Heap::Heap()
deserialization_complete_(false),
concurrent_sweeping_enabled_(false),
strong_roots_list_(NULL),
array_buffer_tracker_(NULL) {
array_buffer_tracker_(NULL),
force_oom_(false) {
// Allow build-time customization of the max semispace size. Building
// V8 with snapshots and a non-default max semispace size is much
// easier if you can define it as part of the build environment.
......
......@@ -817,6 +817,7 @@ class Heap {
// TODO(hpayer): There is still a missmatch between capacity and actual
// committed memory size.
bool CanExpandOldGeneration(int size) {
if (force_oom_) return false;
return (CommittedOldGenerationMemory() + size) < MaxOldGenerationSize();
}
......@@ -2119,6 +2120,8 @@ class Heap {
MUST_USE_RESULT AllocationResult InternalizeString(String* str);
void set_force_oom(bool value) { force_oom_ = value; }
// The amount of external memory registered through the API kept alive
// by global handles
int64_t amount_of_external_allocated_memory_;
......@@ -2369,6 +2372,9 @@ class Heap {
ArrayBufferTracker* array_buffer_tracker_;
// Used for testing purposes.
bool force_oom_;
// Classes in "heap" can be friends.
friend class AlwaysAllocateScope;
friend class GCCallbacksScope;
......
......@@ -96,6 +96,7 @@
'gay-shortest.cc',
'heap/heap-tester.h',
'heap/test-alloc.cc',
'heap/test-compaction.cc',
'heap/test-heap.cc',
'heap/test-incremental-marking.cc',
'heap/test-mark-compact.cc',
......
......@@ -10,20 +10,24 @@
// Tests that should have access to private methods of {v8::internal::Heap}.
// Those tests need to be defined using HEAP_TEST(Name) { ... }.
#define HEAP_TEST_METHODS(V) \
V(CompactionSpaceDivideMultiplePages) \
V(CompactionSpaceDivideSinglePage) \
V(GCFlags) \
V(MarkCompactCollector) \
V(NoPromotion) \
V(NumberStringCacheSize) \
V(ObjectGroups) \
V(Promotion) \
V(Regression39128) \
V(ResetWeakHandle) \
V(StressHandles) \
V(TestMemoryReducerSampleJsCalls) \
V(TestSizeOfObjects) \
#define HEAP_TEST_METHODS(V) \
V(CompactionFullAbortedPage) \
V(CompactionPartiallyAbortedPage) \
V(CompactionPartiallyAbortedPageIntraAbortedPointers) \
V(CompactionPartiallyAbortedPageWithStoreBufferEntries) \
V(CompactionSpaceDivideMultiplePages) \
V(CompactionSpaceDivideSinglePage) \
V(GCFlags) \
V(MarkCompactCollector) \
V(NoPromotion) \
V(NumberStringCacheSize) \
V(ObjectGroups) \
V(Promotion) \
V(Regression39128) \
V(ResetWeakHandle) \
V(StressHandles) \
V(TestMemoryReducerSampleJsCalls) \
V(TestSizeOfObjects) \
V(WriteBarriersInCopyJSObject)
......
This diff is collapsed.
......@@ -16,16 +16,15 @@ namespace v8 {
namespace internal {
static int LenFromSize(int size) {
return (size - i::FixedArray::kHeaderSize) / i::kPointerSize;
return (size - FixedArray::kHeaderSize) / kPointerSize;
}
static inline void CreatePadding(i::Heap* heap, int padding_size,
i::PretenureFlag tenure) {
const int max_number_of_objects = 20;
v8::internal::Handle<v8::internal::FixedArray>
big_objects[max_number_of_objects];
i::Isolate* isolate = heap->isolate();
static inline std::vector<Handle<FixedArray>> CreatePadding(
Heap* heap, int padding_size, PretenureFlag tenure,
int object_size = Page::kMaxRegularHeapObjectSize) {
std::vector<Handle<FixedArray>> handles;
Isolate* isolate = heap->isolate();
int allocate_memory;
int length;
int free_memory = padding_size;
......@@ -41,9 +40,10 @@ static inline void CreatePadding(i::Heap* heap, int padding_size,
*heap->new_space()->allocation_top_address());
CHECK(padding_size <= current_free_memory || current_free_memory == 0);
}
for (int i = 0; i < max_number_of_objects && free_memory > 0; i++) {
if (free_memory > i::Page::kMaxRegularHeapObjectSize) {
allocate_memory = i::Page::kMaxRegularHeapObjectSize;
while (free_memory > 0) {
// for (int i = 0; i < max_number_of_objects && free_memory > 0; i++) {
if (free_memory > object_size) {
allocate_memory = object_size;
length = LenFromSize(allocate_memory);
} else {
allocate_memory = free_memory;
......@@ -55,11 +55,12 @@ static inline void CreatePadding(i::Heap* heap, int padding_size,
break;
}
}
big_objects[i] = isolate->factory()->NewFixedArray(length, tenure);
CHECK((tenure == i::NOT_TENURED && heap->InNewSpace(*big_objects[i])) ||
(tenure == i::TENURED && heap->InOldSpace(*big_objects[i])));
handles.push_back(isolate->factory()->NewFixedArray(length, tenure));
CHECK((tenure == NOT_TENURED && heap->InNewSpace(*handles.back())) ||
(tenure == TENURED && heap->InOldSpace(*handles.back())));
free_memory -= allocate_memory;
}
return handles;
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment