Commit 8ca0137a authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

[heap] Ensure consistent large MemoryChunk checks.

Bug: chromium:852420
Change-Id: I47ea0994b7f1933095995c051fce76c288a25d6a
Reviewed-on: https://chromium-review.googlesource.com/c/1356515
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#57975}
parent 07537cdb
...@@ -2511,8 +2511,13 @@ bool Heap::IsImmovable(HeapObject* object) { ...@@ -2511,8 +2511,13 @@ bool Heap::IsImmovable(HeapObject* object) {
} }
bool Heap::IsLargeObject(HeapObject* object) { bool Heap::IsLargeObject(HeapObject* object) {
return lo_space()->Contains(object) || code_lo_space()->Contains(object) || return IsLargeMemoryChunk(MemoryChunk::FromHeapObject(object));
new_lo_space()->Contains(object); }
bool Heap::IsLargeMemoryChunk(MemoryChunk* chunk) {
return chunk->owner()->identity() == NEW_LO_SPACE ||
chunk->owner()->identity() == LO_SPACE ||
chunk->owner()->identity() == CODE_LO_SPACE;
} }
bool Heap::IsInYoungGeneration(HeapObject* object) { bool Heap::IsInYoungGeneration(HeapObject* object) {
......
...@@ -389,6 +389,7 @@ class Heap { ...@@ -389,6 +389,7 @@ class Heap {
bool IsImmovable(HeapObject* object); bool IsImmovable(HeapObject* object);
bool IsLargeObject(HeapObject* object); bool IsLargeObject(HeapObject* object);
bool IsLargeMemoryChunk(MemoryChunk* chunk);
inline bool IsWithinLargeObject(Address address); inline bool IsWithinLargeObject(Address address);
bool IsInYoungGeneration(HeapObject* object); bool IsInYoungGeneration(HeapObject* object);
......
...@@ -758,10 +758,6 @@ size_t MemoryChunk::CommittedPhysicalMemory() { ...@@ -758,10 +758,6 @@ size_t MemoryChunk::CommittedPhysicalMemory() {
return high_water_mark_; return high_water_mark_;
} }
bool MemoryChunk::IsPagedSpace() const {
return owner()->identity() != LO_SPACE;
}
bool MemoryChunk::InOldSpace() const { bool MemoryChunk::InOldSpace() const {
return owner()->identity() == OLD_SPACE; return owner()->identity() == OLD_SPACE;
} }
...@@ -1284,7 +1280,7 @@ void MemoryChunk::ReleaseAllocatedMemory() { ...@@ -1284,7 +1280,7 @@ void MemoryChunk::ReleaseAllocatedMemory() {
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap(); if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
if (marking_bitmap_ != nullptr) ReleaseMarkingBitmap(); if (marking_bitmap_ != nullptr) ReleaseMarkingBitmap();
if (IsPagedSpace()) { if (!heap_->IsLargeMemoryChunk(this)) {
Page* page = static_cast<Page*>(this); Page* page = static_cast<Page*>(this);
page->ReleaseFreeListCategories(); page->ReleaseFreeListCategories();
} }
......
...@@ -643,8 +643,6 @@ class MemoryChunk { ...@@ -643,8 +643,6 @@ class MemoryChunk {
void set_owner(Space* space) { owner_ = space; } void set_owner(Space* space) { owner_ = space; }
bool IsPagedSpace() const;
// Emits a memory barrier. For TSAN builds the other thread needs to perform // Emits a memory barrier. For TSAN builds the other thread needs to perform
// MemoryChunk::synchronized_heap() to simulate the barrier. // MemoryChunk::synchronized_heap() to simulate the barrier.
void InitializationMemoryFence(); void InitializationMemoryFence();
...@@ -1179,7 +1177,8 @@ class V8_EXPORT_PRIVATE MemoryAllocator { ...@@ -1179,7 +1177,8 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
} }
void AddMemoryChunkSafe(MemoryChunk* chunk) { void AddMemoryChunkSafe(MemoryChunk* chunk) {
if (chunk->IsPagedSpace() && chunk->executable() != EXECUTABLE) { if (!heap_->IsLargeMemoryChunk(chunk) &&
chunk->executable() != EXECUTABLE) {
AddMemoryChunkSafe<kRegular>(chunk); AddMemoryChunkSafe<kRegular>(chunk);
} else { } else {
AddMemoryChunkSafe<kNonRegular>(chunk); AddMemoryChunkSafe<kNonRegular>(chunk);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment