Commit d56da546 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Small fixes for young large objects

This replaces Heap::InNewSpace with Heap::InYoungGeneration and
fixes tests that are sensitive to page size.

Bug: chromium:852420
Change-Id: I32b1eafb45813ea3bdcbda075f9e6156aaf4c5e3
Reviewed-on: https://chromium-review.googlesource.com/c/1475766Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#59647}
parent ec68d97d
...@@ -316,34 +316,6 @@ void Heap::FinalizeExternalString(String string) { ...@@ -316,34 +316,6 @@ void Heap::FinalizeExternalString(String string) {
Address Heap::NewSpaceTop() { return new_space_->top(); } Address Heap::NewSpaceTop() { return new_space_->top(); }
// static
bool Heap::InNewSpace(Object object) {
DCHECK(!HasWeakHeapObjectTag(object));
return object->IsHeapObject() && InNewSpace(HeapObject::cast(object));
}
// static
bool Heap::InNewSpace(MaybeObject object) {
HeapObject heap_object;
return object->GetHeapObject(&heap_object) && InNewSpace(heap_object);
}
// static
bool Heap::InNewSpace(HeapObject heap_object) {
// Inlined check from NewSpace::Contains.
bool result = MemoryChunk::FromHeapObject(heap_object)->InNewSpace();
#ifdef DEBUG
// If in NEW_SPACE, then check we're either not in the middle of GC or the
// object is in to-space.
if (result) {
// If the object is in NEW_SPACE, then it's not in RO_SPACE so this is safe.
Heap* heap = Heap::FromWritableHeapObject(heap_object);
DCHECK_IMPLIES(heap->gc_state_ == NOT_IN_GC, InToPage(heap_object));
}
#endif
return result;
}
bool Heap::InYoungGeneration(Object object) { bool Heap::InYoungGeneration(Object object) {
DCHECK(!HasWeakHeapObjectTag(object)); DCHECK(!HasWeakHeapObjectTag(object));
return object->IsHeapObject() && InYoungGeneration(HeapObject::cast(object)); return object->IsHeapObject() && InYoungGeneration(HeapObject::cast(object));
...@@ -528,7 +500,7 @@ void Heap::ExternalStringTable::AddString(String string) { ...@@ -528,7 +500,7 @@ void Heap::ExternalStringTable::AddString(String string) {
DCHECK(string->IsExternalString()); DCHECK(string->IsExternalString());
DCHECK(!Contains(string)); DCHECK(!Contains(string));
if (InNewSpace(string)) { if (InYoungGeneration(string)) {
young_strings_.push_back(string); young_strings_.push_back(string);
} else { } else {
old_strings_.push_back(string); old_strings_.push_back(string);
......
...@@ -256,7 +256,8 @@ size_t Heap::CommittedMemoryOfUnmapper() { ...@@ -256,7 +256,8 @@ size_t Heap::CommittedMemoryOfUnmapper() {
size_t Heap::CommittedMemory() { size_t Heap::CommittedMemory() {
if (!HasBeenSetUp()) return 0; if (!HasBeenSetUp()) return 0;
return new_space_->CommittedMemory() + CommittedOldGenerationMemory(); return new_space_->CommittedMemory() + new_lo_space_->Size() +
CommittedOldGenerationMemory();
} }
......
...@@ -912,11 +912,6 @@ class Heap { ...@@ -912,11 +912,6 @@ class Heap {
static inline bool InYoungGeneration(Object object); static inline bool InYoungGeneration(Object object);
static inline bool InYoungGeneration(MaybeObject object); static inline bool InYoungGeneration(MaybeObject object);
static inline bool InYoungGeneration(HeapObject heap_object); static inline bool InYoungGeneration(HeapObject heap_object);
// TODO(ulan): Remove once all call sites are changed to use
// InYoungGeneration.
static inline bool InNewSpace(Object object);
static inline bool InNewSpace(MaybeObject object);
static inline bool InNewSpace(HeapObject heap_object);
static inline bool InFromPage(Object object); static inline bool InFromPage(Object object);
static inline bool InFromPage(MaybeObject object); static inline bool InFromPage(MaybeObject object);
static inline bool InFromPage(HeapObject heap_object); static inline bool InFromPage(HeapObject heap_object);
......
...@@ -172,6 +172,7 @@ class FullMarkingVerifier : public MarkingVerifier { ...@@ -172,6 +172,7 @@ class FullMarkingVerifier : public MarkingVerifier {
void Run() override { void Run() override {
VerifyRoots(VISIT_ONLY_STRONG); VerifyRoots(VISIT_ONLY_STRONG);
VerifyMarking(heap_->new_space()); VerifyMarking(heap_->new_space());
VerifyMarking(heap_->new_lo_space());
VerifyMarking(heap_->old_space()); VerifyMarking(heap_->old_space());
VerifyMarking(heap_->code_space()); VerifyMarking(heap_->code_space());
VerifyMarking(heap_->map_space()); VerifyMarking(heap_->map_space());
...@@ -1141,7 +1142,7 @@ class RecordMigratedSlotVisitor : public ObjectVisitor { ...@@ -1141,7 +1142,7 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
inline virtual void RecordMigratedSlot(HeapObject host, MaybeObject value, inline virtual void RecordMigratedSlot(HeapObject host, MaybeObject value,
Address slot) { Address slot) {
if (value->IsStrongOrWeak()) { if (value->IsStrongOrWeak()) {
Page* p = Page::FromAddress(value.ptr()); MemoryChunk* p = MemoryChunk::FromAddress(value.ptr());
if (p->InYoungGeneration()) { if (p->InYoungGeneration()) {
DCHECK_IMPLIES( DCHECK_IMPLIES(
p->IsToPage(), p->IsToPage(),
...@@ -4042,16 +4043,16 @@ class YoungGenerationRecordMigratedSlotVisitor final ...@@ -4042,16 +4043,16 @@ class YoungGenerationRecordMigratedSlotVisitor final
inline void RecordMigratedSlot(HeapObject host, MaybeObject value, inline void RecordMigratedSlot(HeapObject host, MaybeObject value,
Address slot) final { Address slot) final {
if (value->IsStrongOrWeak()) { if (value->IsStrongOrWeak()) {
Page* p = Page::FromAddress(value.ptr()); MemoryChunk* p = MemoryChunk::FromAddress(value.ptr());
if (p->InYoungGeneration()) { if (p->InYoungGeneration()) {
DCHECK_IMPLIES( DCHECK_IMPLIES(
p->IsToPage(), p->IsToPage(),
p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) || p->IsLargePage()); p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) || p->IsLargePage());
RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>( RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(
Page::FromHeapObject(host), slot); MemoryChunk::FromHeapObject(host), slot);
} else if (p->IsEvacuationCandidate() && IsLive(host)) { } else if (p->IsEvacuationCandidate() && IsLive(host)) {
RememberedSet<OLD_TO_OLD>::Insert<AccessMode::NON_ATOMIC>( RememberedSet<OLD_TO_OLD>::Insert<AccessMode::NON_ATOMIC>(
Page::FromHeapObject(host), slot); MemoryChunk::FromHeapObject(host), slot);
} }
} }
} }
......
...@@ -308,10 +308,11 @@ TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) { ...@@ -308,10 +308,11 @@ TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) { UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
if (FLAG_optimize_for_size) return; if (FLAG_optimize_for_size) return;
ManualGCScope manual_gc_scope;
// Test allocates JSArrayBuffer on different pages before triggering a // Test allocates JSArrayBuffer on different pages before triggering a
// full GC that performs the semispace copy. If parallelized, this test // full GC that performs the semispace copy. If parallelized, this test
// ensures proper synchronization in TSAN configurations. // ensures proper synchronization in TSAN configurations.
FLAG_min_semi_space_size = 2 * Page::kPageSize / MB; FLAG_min_semi_space_size = Max(2 * Page::kPageSize / MB, 1);
v8::Isolate::CreateParams create_params; v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params); v8::Isolate* isolate = v8::Isolate::New(create_params);
......
...@@ -1927,7 +1927,7 @@ TEST(HeapNumberAlignment) { ...@@ -1927,7 +1927,7 @@ TEST(HeapNumberAlignment) {
AlignNewSpace(required_alignment, offset); AlignNewSpace(required_alignment, offset);
Handle<Object> number_new = factory->NewNumber(1.000123); Handle<Object> number_new = factory->NewNumber(1.000123);
CHECK(number_new->IsHeapNumber()); CHECK(number_new->IsHeapNumber());
CHECK(Heap::InNewSpace(*number_new)); CHECK(Heap::InYoungGeneration(*number_new));
CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(), CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
required_alignment)); required_alignment));
...@@ -1956,7 +1956,7 @@ TEST(MutableHeapNumberAlignment) { ...@@ -1956,7 +1956,7 @@ TEST(MutableHeapNumberAlignment) {
AlignNewSpace(required_alignment, offset); AlignNewSpace(required_alignment, offset);
Handle<Object> number_new = factory->NewMutableHeapNumber(1.000123); Handle<Object> number_new = factory->NewMutableHeapNumber(1.000123);
CHECK(number_new->IsMutableHeapNumber()); CHECK(number_new->IsMutableHeapNumber());
CHECK(Heap::InNewSpace(*number_new)); CHECK(Heap::InYoungGeneration(*number_new));
CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(), CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
required_alignment)); required_alignment));
...@@ -2021,6 +2021,8 @@ TEST(GrowAndShrinkNewSpace) { ...@@ -2021,6 +2021,8 @@ TEST(GrowAndShrinkNewSpace) {
// Make sure we're in a consistent state to start out. // Make sure we're in a consistent state to start out.
CcTest::CollectAllGarbage(); CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
new_space->Shrink();
// Explicitly growing should double the space capacity. // Explicitly growing should double the space capacity.
size_t old_capacity, new_capacity; size_t old_capacity, new_capacity;
...@@ -3150,6 +3152,9 @@ TEST(ReleaseOverReservedPages) { ...@@ -3150,6 +3152,9 @@ TEST(ReleaseOverReservedPages) {
Factory* factory = isolate->factory(); Factory* factory = isolate->factory();
Heap* heap = isolate->heap(); Heap* heap = isolate->heap();
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
// Ensure that the young generation is empty.
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(NEW_SPACE);
static const int number_of_test_pages = 20; static const int number_of_test_pages = 20;
// Prepare many pages with low live-bytes count. // Prepare many pages with low live-bytes count.
...@@ -3183,7 +3188,7 @@ TEST(ReleaseOverReservedPages) { ...@@ -3183,7 +3188,7 @@ TEST(ReleaseOverReservedPages) {
// boots, but if the 20 small arrays don't fit on the first page then that's // boots, but if the 20 small arrays don't fit on the first page then that's
// an indication that it is too small. // an indication that it is too small.
CcTest::CollectAllAvailableGarbage(); CcTest::CollectAllAvailableGarbage();
CHECK_EQ(initial_page_count, old_space->CountTotalPages()); CHECK_GE(initial_page_count, old_space->CountTotalPages());
} }
static int forced_gc_counter = 0; static int forced_gc_counter = 0;
...@@ -6133,7 +6138,7 @@ HEAP_TEST(Regress670675) { ...@@ -6133,7 +6138,7 @@ HEAP_TEST(Regress670675) {
if (marking->IsStopped()) { if (marking->IsStopped()) {
marking->Start(i::GarbageCollectionReason::kTesting); marking->Start(i::GarbageCollectionReason::kTesting);
} }
size_t array_length = Page::kPageSize / kTaggedSize + 100; size_t array_length = 128 * KB;
size_t n = heap->OldGenerationSpaceAvailable() / array_length; size_t n = heap->OldGenerationSpaceAvailable() / array_length;
for (size_t i = 0; i < n + 40; i++) { for (size_t i = 0; i < n + 40; i++) {
{ {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment