Commit 7f8f2833 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Fix top_on_previous_step_ check in PagedSpace::AllocateRaw.

Both the top_ pointer and the top_on_previous_step_ pointer can be one
byte beyond the current page. Page::FromAddress call should take that
into account.

Bug: chromium:777177
Change-Id: I9cbb5bc6eab932afc6d0c915fd70a9a7b20ba62c
Reviewed-on: https://chromium-review.googlesource.com/738204
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48962}
parent 7876d4e0
...@@ -370,10 +370,13 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, ...@@ -370,10 +370,13 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment) {
if (top() < top_on_previous_step_) { if (top_on_previous_step_ && top() < top_on_previous_step_ &&
// Generated code decreased the top() pointer to do folded allocations SupportsInlineAllocation()) {
DCHECK_EQ(Page::FromAddress(top()), // Generated code decreased the top() pointer to do folded allocations.
Page::FromAddress(top_on_previous_step_)); // The top_on_previous_step_ can be one byte beyond the current page.
DCHECK_NOT_NULL(top());
DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
top_on_previous_step_ = top(); top_on_previous_step_ = top();
} }
size_t bytes_since_last = size_t bytes_since_last =
......
...@@ -40,6 +40,7 @@ ...@@ -40,6 +40,7 @@
V(Regress658718) \ V(Regress658718) \
V(Regress670675) \ V(Regress670675) \
V(Regress5831) \ V(Regress5831) \
V(Regress777177) \
V(RegressMissingWriteBarrierInAllocate) \ V(RegressMissingWriteBarrierInAllocate) \
V(WriteBarriersInCopyJSObject) V(WriteBarriersInCopyJSObject)
......
...@@ -80,8 +80,6 @@ class TestCodeRangeScope { ...@@ -80,8 +80,6 @@ class TestCodeRangeScope {
DISALLOW_COPY_AND_ASSIGN(TestCodeRangeScope); DISALLOW_COPY_AND_ASSIGN(TestCodeRangeScope);
}; };
namespace test_spaces {
static void VerifyMemoryChunk(Isolate* isolate, static void VerifyMemoryChunk(Isolate* isolate,
Heap* heap, Heap* heap,
CodeRange* code_range, CodeRange* code_range,
...@@ -136,7 +134,6 @@ static void VerifyMemoryChunk(Isolate* isolate, ...@@ -136,7 +134,6 @@ static void VerifyMemoryChunk(Isolate* isolate,
delete memory_allocator; delete memory_allocator;
} }
TEST(Regress3540) { TEST(Regress3540) {
Isolate* isolate = CcTest::i_isolate(); Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap(); Heap* heap = isolate->heap();
...@@ -562,7 +559,6 @@ UNINITIALIZED_TEST(AllocationObserver) { ...@@ -562,7 +559,6 @@ UNINITIALIZED_TEST(AllocationObserver) {
isolate->Dispose(); isolate->Dispose();
} }
UNINITIALIZED_TEST(InlineAllocationObserverCadence) { UNINITIALIZED_TEST(InlineAllocationObserverCadence) {
v8::Isolate::CreateParams create_params; v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
...@@ -600,6 +596,49 @@ UNINITIALIZED_TEST(InlineAllocationObserverCadence) { ...@@ -600,6 +596,49 @@ UNINITIALIZED_TEST(InlineAllocationObserverCadence) {
isolate->Dispose(); isolate->Dispose();
} }
HEAP_TEST(Regress777177) {
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
HandleScope scope(isolate);
PagedSpace* old_space = heap->old_space();
Observer observer(128);
old_space->AddAllocationObserver(&observer);
int area_size = old_space->AreaSize();
int max_object_size = kMaxRegularHeapObjectSize;
int filler_size = area_size - max_object_size;
{
// Ensure a new linear allocation area on a fresh page.
AlwaysAllocateScope always_allocate(isolate);
heap::SimulateFullSpace(old_space);
AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
HeapObject* obj = result.ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), filler_size,
ClearRecordedSlots::kNo);
}
{
// Allocate all bytes of the linear allocation area. This moves top_ and
// top_on_previous_step_ to the next page.
AllocationResult result =
old_space->AllocateRaw(max_object_size, kWordAligned);
HeapObject* obj = result.ToObjectChecked();
// Simulate allocation folding moving the top pointer back.
old_space->SetTopAndLimit(obj->address(), old_space->limit());
}
{
// This triggers assert in crbug.com/777177.
AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
HeapObject* obj = result.ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), filler_size,
ClearRecordedSlots::kNo);
}
old_space->RemoveAllocationObserver(&observer);
}
TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) { TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) {
FLAG_stress_incremental_marking = false; FLAG_stress_incremental_marking = false;
CcTest::InitializeVM(); CcTest::InitializeVM();
...@@ -704,7 +743,6 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) { ...@@ -704,7 +743,6 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
CHECK_EQ(0u, shrunk); CHECK_EQ(0u, shrunk);
} }
} // namespace test_spaces
} // namespace heap } // namespace heap
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment