Commit ffcff3a0 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Cleanup MemoryChunk's declarations

- Remove unused methods that we should never actually use like SetArea() or
  set_size().
- Live bytes are now reported with --trace-live-bytes and not gc-verbose.

BUG=chromium:581076
LOG=N

Review URL: https://codereview.chromium.org/1686413002

Cr-Commit-Position: refs/heads/master@{#33908}
parent 2749ebba
...@@ -968,6 +968,8 @@ DEFINE_BOOL(trace_lazy, false, "trace lazy compilation") ...@@ -968,6 +968,8 @@ DEFINE_BOOL(trace_lazy, false, "trace lazy compilation")
DEFINE_BOOL(collect_heap_spill_statistics, false, DEFINE_BOOL(collect_heap_spill_statistics, false,
"report heap spill statistics along with heap_stats " "report heap spill statistics along with heap_stats "
"(requires heap_stats)") "(requires heap_stats)")
DEFINE_BOOL(trace_live_bytes, false,
"trace incrementing and resetting of live bytes")
DEFINE_BOOL(trace_isolates, false, "trace isolate state changes") DEFINE_BOOL(trace_isolates, false, "trace isolate state changes")
......
...@@ -3858,7 +3858,7 @@ int MarkCompactCollector::SweepInParallel(PagedSpace* space, ...@@ -3858,7 +3858,7 @@ int MarkCompactCollector::SweepInParallel(PagedSpace* space,
int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) { int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) {
int max_freed = 0; int max_freed = 0;
if (page->TryLock()) { if (page->mutex()->TryLock()) {
// If this page was already swept in the meantime, we can return here. // If this page was already swept in the meantime, we can return here.
if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) { if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) {
page->mutex()->Unlock(); page->mutex()->Unlock();
......
...@@ -251,6 +251,36 @@ Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, ...@@ -251,6 +251,36 @@ Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
return page; return page;
} }
void MemoryChunk::IncrementLiveBytesFromGC(HeapObject* object, int by) {
MemoryChunk::FromAddress(object->address())->IncrementLiveBytes(by);
}
void MemoryChunk::ResetLiveBytes() {
if (FLAG_trace_live_bytes) {
PrintIsolate(heap()->isolate(), "live-bytes: reset page=%p %d->0\n", this,
live_byte_count_);
}
live_byte_count_ = 0;
}
void MemoryChunk::IncrementLiveBytes(int by) {
if (FLAG_trace_live_bytes) {
PrintIsolate(heap()->isolate(),
"live-bytes: update page=%p delta=%d %d->%d\n", this, by,
live_byte_count_, live_byte_count_ + by);
}
live_byte_count_ += by;
DCHECK_GE(live_byte_count_, 0);
DCHECK_LE(static_cast<size_t>(live_byte_count_), size_);
}
void MemoryChunk::IncrementLiveBytesFromMutator(HeapObject* object, int by) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
if (!chunk->InNewSpace() && !static_cast<Page*>(chunk)->SweepingDone()) {
static_cast<PagedSpace*>(chunk->owner())->Allocate(by);
}
chunk->IncrementLiveBytes(by);
}
bool PagedSpace::Contains(Address addr) { bool PagedSpace::Contains(Address addr) {
Page* p = Page::FromAddress(addr); Page* p = Page::FromAddress(addr);
......
...@@ -428,7 +428,7 @@ NewSpacePage* NewSpacePage::Initialize(Heap* heap, Address start, ...@@ -428,7 +428,7 @@ NewSpacePage* NewSpacePage::Initialize(Heap* heap, Address start,
MemoryChunk* chunk = MemoryChunk* chunk =
MemoryChunk::Initialize(heap, start, Page::kPageSize, area_start, MemoryChunk::Initialize(heap, start, Page::kPageSize, area_start,
area_end, NOT_EXECUTABLE, semi_space); area_end, NOT_EXECUTABLE, semi_space, nullptr);
bool in_to_space = (semi_space->id() != kFromSpace); bool in_to_space = (semi_space->id() != kFromSpace);
chunk->SetFlag(in_to_space ? MemoryChunk::IN_TO_SPACE chunk->SetFlag(in_to_space ? MemoryChunk::IN_TO_SPACE
: MemoryChunk::IN_FROM_SPACE); : MemoryChunk::IN_FROM_SPACE);
...@@ -449,10 +449,10 @@ void NewSpacePage::InitializeAsAnchor(SemiSpace* semi_space) { ...@@ -449,10 +449,10 @@ void NewSpacePage::InitializeAsAnchor(SemiSpace* semi_space) {
SetFlags(0, ~0); SetFlags(0, ~0);
} }
MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size, MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
Address area_start, Address area_end, Address area_start, Address area_end,
Executability executable, Space* owner) { Executability executable, Space* owner,
base::VirtualMemory* reservation) {
MemoryChunk* chunk = FromAddress(base); MemoryChunk* chunk = FromAddress(base);
DCHECK(base == chunk->address()); DCHECK(base == chunk->address());
...@@ -490,6 +490,10 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size, ...@@ -490,6 +490,10 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->SetFlag(IS_EXECUTABLE); chunk->SetFlag(IS_EXECUTABLE);
} }
if (reservation != nullptr) {
chunk->reservation_.TakeControl(reservation);
}
return chunk; return chunk;
} }
...@@ -691,10 +695,8 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t reserve_area_size, ...@@ -691,10 +695,8 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t reserve_area_size,
PerformAllocationCallback(space, kAllocationActionAllocate, chunk_size); PerformAllocationCallback(space, kAllocationActionAllocate, chunk_size);
} }
MemoryChunk* result = MemoryChunk::Initialize( return MemoryChunk::Initialize(heap, base, chunk_size, area_start, area_end,
heap, base, chunk_size, area_start, area_end, executable, owner); executable, owner, &reservation);
result->set_reserved_memory(&reservation);
return result;
} }
...@@ -920,19 +922,13 @@ bool MemoryAllocator::CommitExecutableMemory(base::VirtualMemory* vm, ...@@ -920,19 +922,13 @@ bool MemoryAllocator::CommitExecutableMemory(base::VirtualMemory* vm,
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// MemoryChunk implementation // MemoryChunk implementation
void MemoryChunk::IncrementLiveBytesFromMutator(HeapObject* object, int by) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
if (!chunk->InNewSpace() && !static_cast<Page*>(chunk)->SweepingDone()) {
static_cast<PagedSpace*>(chunk->owner())->Allocate(by);
}
chunk->IncrementLiveBytes(by);
}
void MemoryChunk::ReleaseAllocatedMemory() { void MemoryChunk::ReleaseAllocatedMemory() {
delete slots_buffer_; delete slots_buffer_;
slots_buffer_ = nullptr;
delete skip_list_; delete skip_list_;
skip_list_ = nullptr;
delete mutex_; delete mutex_;
mutex_ = nullptr;
ReleaseOldToNewSlots(); ReleaseOldToNewSlots();
} }
......
This diff is collapsed.
...@@ -1383,8 +1383,9 @@ void HeapObject::VerifySmiField(int offset) { ...@@ -1383,8 +1383,9 @@ void HeapObject::VerifySmiField(int offset) {
Heap* HeapObject::GetHeap() const { Heap* HeapObject::GetHeap() const {
Heap* heap = Heap* heap = MemoryChunk::FromAddress(
MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap(); reinterpret_cast<Address>(const_cast<HeapObject*>(this)))
->heap();
SLOW_DCHECK(heap != NULL); SLOW_DCHECK(heap != NULL);
return heap; return heap;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment