Commit 2b63d5d0 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Add flag for disabling map space

Now that we are able to compact map space, we can also get rid of the
map space and allocate maps in the old space instead. This CL introduces
a FLAG_map_space for enabling/disabling the map space but the map space
remains enabled by default for now.

Without a separate space for maps, the GC can't prevent relocation of
maps anymore. Therefore this CL always allows compaction of maps when
running without a map space. Rename flag to --compact-maps to better fit
this scenario.

mkgrokdump and debug_helper also need to be updated to look for maps
also in the old space. The map space is now optional.

Bug: v8:12578
Change-Id: Ic4e4abd0b58bee26e64329b1c92dbccb07d8105a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3424483Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarSeth Brenith <seth.brenith@microsoft.com>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79165}
parent 5839231f
...@@ -523,7 +523,7 @@ DEFINE_WEAK_IMPLICATION(future, short_builtin_calls) ...@@ -523,7 +523,7 @@ DEFINE_WEAK_IMPLICATION(future, short_builtin_calls)
#if !MUST_WRITE_PROTECT_CODE_MEMORY #if !MUST_WRITE_PROTECT_CODE_MEMORY
DEFINE_WEAK_VALUE_IMPLICATION(future, write_protect_code_memory, false) DEFINE_WEAK_VALUE_IMPLICATION(future, write_protect_code_memory, false)
#endif #endif
DEFINE_WEAK_IMPLICATION(future, compact_map_space) DEFINE_WEAK_IMPLICATION(future, compact_maps)
DEFINE_BOOL_READONLY(dict_property_const_tracking, DEFINE_BOOL_READONLY(dict_property_const_tracking,
V8_DICT_PROPERTY_CONST_TRACKING_BOOL, V8_DICT_PROPERTY_CONST_TRACKING_BOOL,
...@@ -1322,8 +1322,11 @@ DEFINE_BOOL(compact, true, ...@@ -1322,8 +1322,11 @@ DEFINE_BOOL(compact, true,
"Perform compaction on full GCs based on V8's default heuristics") "Perform compaction on full GCs based on V8's default heuristics")
DEFINE_BOOL(compact_code_space, true, DEFINE_BOOL(compact_code_space, true,
"Perform code space compaction on full collections.") "Perform code space compaction on full collections.")
DEFINE_BOOL(compact_map_space, false, DEFINE_BOOL(compact_maps, false,
"Perform map space compaction on full collections.") "Perform compaction on maps on full collections.")
DEFINE_BOOL(use_map_space, true, "Use separate space for maps.")
// Without a map space we have to compact maps.
DEFINE_NEG_VALUE_IMPLICATION(use_map_space, compact_maps, true)
DEFINE_BOOL(compact_on_every_full_gc, false, DEFINE_BOOL(compact_on_every_full_gc, false,
"Perform compaction on every full GC") "Perform compaction on every full GC")
DEFINE_BOOL(compact_with_stack, true, DEFINE_BOOL(compact_with_stack, true,
......
...@@ -35,7 +35,10 @@ class EvacuationAllocator { ...@@ -35,7 +35,10 @@ class EvacuationAllocator {
heap_->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); heap_->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE));
heap_->code_space()->MergeCompactionSpace( heap_->code_space()->MergeCompactionSpace(
compaction_spaces_.Get(CODE_SPACE)); compaction_spaces_.Get(CODE_SPACE));
heap_->map_space()->MergeCompactionSpace(compaction_spaces_.Get(MAP_SPACE)); if (heap_->map_space()) {
heap_->map_space()->MergeCompactionSpace(
compaction_spaces_.Get(MAP_SPACE));
}
// Give back remaining LAB space if this EvacuationAllocator's new space LAB // Give back remaining LAB space if this EvacuationAllocator's new space LAB
// sits right next to new space allocation top. // sits right next to new space allocation top.
......
...@@ -20,6 +20,7 @@ ...@@ -20,6 +20,7 @@
#include "src/execution/isolate.h" #include "src/execution/isolate.h"
#include "src/heap/code-object-registry.h" #include "src/heap/code-object-registry.h"
#include "src/heap/concurrent-allocator-inl.h" #include "src/heap/concurrent-allocator-inl.h"
#include "src/heap/concurrent-allocator.h"
#include "src/heap/heap-write-barrier.h" #include "src/heap/heap-write-barrier.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/heap/large-spaces.h" #include "src/heap/large-spaces.h"
...@@ -100,6 +101,16 @@ int64_t Heap::update_external_memory(int64_t delta) { ...@@ -100,6 +101,16 @@ int64_t Heap::update_external_memory(int64_t delta) {
return external_memory_.Update(delta); return external_memory_.Update(delta);
} }
PagedSpace* Heap::space_for_maps() {
return V8_LIKELY(map_space_) ? static_cast<PagedSpace*>(map_space_)
: static_cast<PagedSpace*>(old_space_);
}
ConcurrentAllocator* Heap::concurrent_allocator_for_maps() {
return V8_LIKELY(shared_map_allocator_) ? shared_map_allocator_.get()
: shared_old_allocator_.get();
}
RootsTable& Heap::roots_table() { return isolate()->roots_table(); } RootsTable& Heap::roots_table() { return isolate()->roots_table(); }
#define ROOT_ACCESSOR(Type, name, CamelName) \ #define ROOT_ACCESSOR(Type, name, CamelName) \
...@@ -238,19 +249,26 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type, ...@@ -238,19 +249,26 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
DCHECK(AllowCodeAllocation::IsAllowed()); DCHECK(AllowCodeAllocation::IsAllowed());
allocation = code_space_->AllocateRawUnaligned(size_in_bytes); allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
break; break;
case AllocationType::kMap: case AllocationType::kMap: {
DCHECK_EQ(alignment, AllocationAlignment::kTaggedAligned); DCHECK_EQ(alignment, AllocationAlignment::kTaggedAligned);
allocation = map_space_->AllocateRawUnaligned(size_in_bytes); PagedSpace* allocation_space =
V8_LIKELY(map_space_) ? static_cast<PagedSpace*>(map_space_)
: static_cast<PagedSpace*>(old_space_);
allocation = allocation_space->AllocateRawUnaligned(size_in_bytes);
break; break;
}
case AllocationType::kReadOnly: case AllocationType::kReadOnly:
DCHECK(CanAllocateInReadOnlySpace()); DCHECK(CanAllocateInReadOnlySpace());
DCHECK_EQ(AllocationOrigin::kRuntime, origin); DCHECK_EQ(AllocationOrigin::kRuntime, origin);
allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment); allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
break; break;
case AllocationType::kSharedMap: case AllocationType::kSharedMap: {
allocation = shared_map_allocator_->AllocateRaw(size_in_bytes, ConcurrentAllocator* allocator = V8_LIKELY(shared_map_allocator_)
alignment, origin); ? shared_map_allocator_.get()
: shared_old_allocator_.get();
allocation = allocator->AllocateRaw(size_in_bytes, alignment, origin);
break; break;
}
case AllocationType::kSharedOld: case AllocationType::kSharedOld:
allocation = shared_old_allocator_->AllocateRaw(size_in_bytes, allocation = shared_old_allocator_->AllocateRaw(size_in_bytes,
alignment, origin); alignment, origin);
......
...@@ -534,12 +534,14 @@ void Heap::PrintShortHeapStatistics() { ...@@ -534,12 +534,14 @@ void Heap::PrintShortHeapStatistics() {
", committed: %6zu KB\n", ", committed: %6zu KB\n",
code_space_->SizeOfObjects() / KB, code_space_->Available() / KB, code_space_->SizeOfObjects() / KB, code_space_->Available() / KB,
code_space_->CommittedMemory() / KB); code_space_->CommittedMemory() / KB);
PrintIsolate(isolate_, if (map_space()) {
"Map space, used: %6zu KB" PrintIsolate(isolate_,
", available: %6zu KB" "Map space, used: %6zu KB"
", committed: %6zu KB\n", ", available: %6zu KB"
map_space_->SizeOfObjects() / KB, map_space_->Available() / KB, ", committed: %6zu KB\n",
map_space_->CommittedMemory() / KB); map_space_->SizeOfObjects() / KB, map_space_->Available() / KB,
map_space_->CommittedMemory() / KB);
}
PrintIsolate(isolate_, PrintIsolate(isolate_,
"Large object space, used: %6zu KB" "Large object space, used: %6zu KB"
", available: %6zu KB" ", available: %6zu KB"
...@@ -1409,7 +1411,11 @@ void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) { ...@@ -1409,7 +1411,11 @@ void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) {
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space) UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space)
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space) UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
if (map_space()) {
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
}
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space) UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space)
#undef UPDATE_COUNTERS_FOR_SPACE #undef UPDATE_COUNTERS_FOR_SPACE
#undef UPDATE_FRAGMENTATION_FOR_SPACE #undef UPDATE_FRAGMENTATION_FOR_SPACE
...@@ -1478,8 +1484,10 @@ void Heap::GarbageCollectionEpilogue(GarbageCollector collector) { ...@@ -1478,8 +1484,10 @@ void Heap::GarbageCollectionEpilogue(GarbageCollector collector) {
static_cast<int>(CommittedMemory() / KB)); static_cast<int>(CommittedMemory() / KB));
isolate_->counters()->heap_sample_total_used()->AddSample( isolate_->counters()->heap_sample_total_used()->AddSample(
static_cast<int>(SizeOfObjects() / KB)); static_cast<int>(SizeOfObjects() / KB));
isolate_->counters()->heap_sample_map_space_committed()->AddSample( if (map_space()) {
static_cast<int>(map_space()->CommittedMemory() / KB)); isolate_->counters()->heap_sample_map_space_committed()->AddSample(
static_cast<int>(map_space()->CommittedMemory() / KB));
}
isolate_->counters()->heap_sample_code_space_committed()->AddSample( isolate_->counters()->heap_sample_code_space_committed()->AddSample(
static_cast<int>(code_space()->CommittedMemory() / KB)); static_cast<int>(code_space()->CommittedMemory() / KB));
...@@ -3685,7 +3693,7 @@ void Heap::FreeSharedLinearAllocationAreas() { ...@@ -3685,7 +3693,7 @@ void Heap::FreeSharedLinearAllocationAreas() {
void Heap::FreeMainThreadSharedLinearAllocationAreas() { void Heap::FreeMainThreadSharedLinearAllocationAreas() {
if (!isolate()->shared_isolate()) return; if (!isolate()->shared_isolate()) return;
shared_old_allocator_->FreeLinearAllocationArea(); shared_old_allocator_->FreeLinearAllocationArea();
shared_map_allocator_->FreeLinearAllocationArea(); if (shared_map_allocator_) shared_map_allocator_->FreeLinearAllocationArea();
main_thread_local_heap()->FreeSharedLinearAllocationArea(); main_thread_local_heap()->FreeSharedLinearAllocationArea();
} }
...@@ -4444,8 +4452,8 @@ bool Heap::Contains(HeapObject value) const { ...@@ -4444,8 +4452,8 @@ bool Heap::Contains(HeapObject value) const {
return HasBeenSetUp() && return HasBeenSetUp() &&
((new_space_ && new_space_->ToSpaceContains(value)) || ((new_space_ && new_space_->ToSpaceContains(value)) ||
old_space_->Contains(value) || code_space_->Contains(value) || old_space_->Contains(value) || code_space_->Contains(value) ||
map_space_->Contains(value) || lo_space_->Contains(value) || (map_space_ && map_space_->Contains(value)) ||
code_lo_space_->Contains(value) || lo_space_->Contains(value) || code_lo_space_->Contains(value) ||
(new_lo_space_ && new_lo_space_->Contains(value))); (new_lo_space_ && new_lo_space_->Contains(value)));
} }
...@@ -4464,7 +4472,7 @@ bool Heap::ContainsCode(HeapObject value) const { ...@@ -4464,7 +4472,7 @@ bool Heap::ContainsCode(HeapObject value) const {
bool Heap::SharedHeapContains(HeapObject value) const { bool Heap::SharedHeapContains(HeapObject value) const {
if (shared_old_space_) if (shared_old_space_)
return shared_old_space_->Contains(value) || return shared_old_space_->Contains(value) ||
shared_map_space_->Contains(value); (shared_map_space_ && shared_map_space_->Contains(value));
return false; return false;
} }
...@@ -4496,6 +4504,7 @@ bool Heap::InSpace(HeapObject value, AllocationSpace space) const { ...@@ -4496,6 +4504,7 @@ bool Heap::InSpace(HeapObject value, AllocationSpace space) const {
case CODE_SPACE: case CODE_SPACE:
return code_space_->Contains(value); return code_space_->Contains(value);
case MAP_SPACE: case MAP_SPACE:
DCHECK(map_space_);
return map_space_->Contains(value); return map_space_->Contains(value);
case LO_SPACE: case LO_SPACE:
return lo_space_->Contains(value); return lo_space_->Contains(value);
...@@ -4525,6 +4534,7 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const { ...@@ -4525,6 +4534,7 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const {
case CODE_SPACE: case CODE_SPACE:
return code_space_->ContainsSlow(addr); return code_space_->ContainsSlow(addr);
case MAP_SPACE: case MAP_SPACE:
DCHECK(map_space_);
return map_space_->ContainsSlow(addr); return map_space_->ContainsSlow(addr);
case LO_SPACE: case LO_SPACE:
return lo_space_->ContainsSlow(addr); return lo_space_->ContainsSlow(addr);
...@@ -4586,7 +4596,9 @@ void Heap::Verify() { ...@@ -4586,7 +4596,9 @@ void Heap::Verify() {
if (new_space_) new_space_->Verify(isolate()); if (new_space_) new_space_->Verify(isolate());
old_space_->Verify(isolate(), &visitor); old_space_->Verify(isolate(), &visitor);
map_space_->Verify(isolate(), &visitor); if (map_space_) {
map_space_->Verify(isolate(), &visitor);
}
VerifyPointersVisitor no_dirty_regions_visitor(this); VerifyPointersVisitor no_dirty_regions_visitor(this);
code_space_->Verify(isolate(), &no_dirty_regions_visitor); code_space_->Verify(isolate(), &no_dirty_regions_visitor);
...@@ -5308,8 +5320,8 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) { ...@@ -5308,8 +5320,8 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->old_space_capacity = old_space_->Capacity(); *stats->old_space_capacity = old_space_->Capacity();
*stats->code_space_size = code_space_->SizeOfObjects(); *stats->code_space_size = code_space_->SizeOfObjects();
*stats->code_space_capacity = code_space_->Capacity(); *stats->code_space_capacity = code_space_->Capacity();
*stats->map_space_size = map_space_->SizeOfObjects(); *stats->map_space_size = map_space_ ? map_space_->SizeOfObjects() : 0;
*stats->map_space_capacity = map_space_->Capacity(); *stats->map_space_capacity = map_space_ ? map_space_->Capacity() : 0;
*stats->lo_space_size = lo_space_->Size(); *stats->lo_space_size = lo_space_->Size();
*stats->code_lo_space_size = code_lo_space_->Size(); *stats->code_lo_space_size = code_lo_space_->Size();
isolate_->global_handles()->RecordStats(stats); isolate_->global_handles()->RecordStats(stats);
...@@ -5880,7 +5892,9 @@ void Heap::SetUpSpaces(LinearAllocationArea* new_allocation_info, ...@@ -5880,7 +5892,9 @@ void Heap::SetUpSpaces(LinearAllocationArea* new_allocation_info,
} }
space_[OLD_SPACE] = old_space_ = new OldSpace(this, old_allocation_info); space_[OLD_SPACE] = old_space_ = new OldSpace(this, old_allocation_info);
space_[CODE_SPACE] = code_space_ = new CodeSpace(this); space_[CODE_SPACE] = code_space_ = new CodeSpace(this);
space_[MAP_SPACE] = map_space_ = new MapSpace(this); if (FLAG_use_map_space) {
space_[MAP_SPACE] = map_space_ = new MapSpace(this);
}
space_[LO_SPACE] = lo_space_ = new OldLargeObjectSpace(this); space_[LO_SPACE] = lo_space_ = new OldLargeObjectSpace(this);
space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this); space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this);
...@@ -5938,13 +5952,17 @@ void Heap::SetUpSpaces(LinearAllocationArea* new_allocation_info, ...@@ -5938,13 +5952,17 @@ void Heap::SetUpSpaces(LinearAllocationArea* new_allocation_info,
write_protect_code_memory_ = FLAG_write_protect_code_memory; write_protect_code_memory_ = FLAG_write_protect_code_memory;
if (isolate()->shared_isolate()) { if (isolate()->shared_isolate()) {
shared_old_space_ = isolate()->shared_isolate()->heap()->old_space(); Heap* shared_heap = isolate()->shared_isolate()->heap();
shared_old_space_ = shared_heap->old_space();
shared_old_allocator_.reset( shared_old_allocator_.reset(
new ConcurrentAllocator(main_thread_local_heap(), shared_old_space_)); new ConcurrentAllocator(main_thread_local_heap(), shared_old_space_));
shared_map_space_ = isolate()->shared_isolate()->heap()->map_space(); if (shared_heap->map_space()) {
shared_map_allocator_.reset( shared_map_space_ = shared_heap->map_space();
new ConcurrentAllocator(main_thread_local_heap(), shared_map_space_)); shared_map_allocator_.reset(
new ConcurrentAllocator(main_thread_local_heap(), shared_map_space_));
}
} }
main_thread_local_heap()->SetUpMainThread(); main_thread_local_heap()->SetUpMainThread();
......
...@@ -844,6 +844,7 @@ class Heap { ...@@ -844,6 +844,7 @@ class Heap {
OldSpace* shared_old_space() { return shared_old_space_; } OldSpace* shared_old_space() { return shared_old_space_; }
CodeSpace* code_space() { return code_space_; } CodeSpace* code_space() { return code_space_; }
MapSpace* map_space() { return map_space_; } MapSpace* map_space() { return map_space_; }
inline PagedSpace* space_for_maps();
OldLargeObjectSpace* lo_space() { return lo_space_; } OldLargeObjectSpace* lo_space() { return lo_space_; }
CodeLargeObjectSpace* code_lo_space() { return code_lo_space_; } CodeLargeObjectSpace* code_lo_space() { return code_lo_space_; }
NewLargeObjectSpace* new_lo_space() { return new_lo_space_; } NewLargeObjectSpace* new_lo_space() { return new_lo_space_; }
...@@ -863,6 +864,8 @@ class Heap { ...@@ -863,6 +864,8 @@ class Heap {
return memory_allocator_.get(); return memory_allocator_.get();
} }
inline ConcurrentAllocator* concurrent_allocator_for_maps();
inline Isolate* isolate(); inline Isolate* isolate();
MarkCompactCollector* mark_compact_collector() { MarkCompactCollector* mark_compact_collector() {
......
...@@ -285,7 +285,7 @@ void IncrementalMarking::StartBlackAllocation() { ...@@ -285,7 +285,7 @@ void IncrementalMarking::StartBlackAllocation() {
DCHECK(IsMarking()); DCHECK(IsMarking());
black_allocation_ = true; black_allocation_ = true;
heap()->old_space()->MarkLinearAllocationAreaBlack(); heap()->old_space()->MarkLinearAllocationAreaBlack();
heap()->map_space()->MarkLinearAllocationAreaBlack(); if (heap()->map_space()) heap()->map_space()->MarkLinearAllocationAreaBlack();
heap()->code_space()->MarkLinearAllocationAreaBlack(); heap()->code_space()->MarkLinearAllocationAreaBlack();
heap()->safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) { heap()->safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
local_heap->MarkLinearAllocationAreaBlack(); local_heap->MarkLinearAllocationAreaBlack();
...@@ -299,7 +299,7 @@ void IncrementalMarking::StartBlackAllocation() { ...@@ -299,7 +299,7 @@ void IncrementalMarking::StartBlackAllocation() {
void IncrementalMarking::PauseBlackAllocation() { void IncrementalMarking::PauseBlackAllocation() {
DCHECK(IsMarking()); DCHECK(IsMarking());
heap()->old_space()->UnmarkLinearAllocationArea(); heap()->old_space()->UnmarkLinearAllocationArea();
heap()->map_space()->UnmarkLinearAllocationArea(); if (heap()->map_space()) heap()->map_space()->UnmarkLinearAllocationArea();
heap()->code_space()->UnmarkLinearAllocationArea(); heap()->code_space()->UnmarkLinearAllocationArea();
heap()->safepoint()->IterateLocalHeaps( heap()->safepoint()->IterateLocalHeaps(
[](LocalHeap* local_heap) { local_heap->UnmarkLinearAllocationArea(); }); [](LocalHeap* local_heap) { local_heap->UnmarkLinearAllocationArea(); });
......
...@@ -377,7 +377,8 @@ void LargeObjectSpace::Verify(Isolate* isolate) { ...@@ -377,7 +377,8 @@ void LargeObjectSpace::Verify(Isolate* isolate) {
// in map space or read-only space. // in map space or read-only space.
Map map = object.map(cage_base); Map map = object.map(cage_base);
CHECK(map.IsMap(cage_base)); CHECK(map.IsMap(cage_base));
CHECK(ReadOnlyHeap::Contains(map) || heap()->map_space()->Contains(map)); CHECK(ReadOnlyHeap::Contains(map) ||
isolate->heap()->space_for_maps()->Contains(map));
// We have only the following types in the large object space: // We have only the following types in the large object space:
const bool is_valid_lo_space_object = // const bool is_valid_lo_space_object = //
......
...@@ -217,7 +217,7 @@ class FullMarkingVerifier : public MarkingVerifier { ...@@ -217,7 +217,7 @@ class FullMarkingVerifier : public MarkingVerifier {
VerifyMarking(heap_->new_lo_space()); VerifyMarking(heap_->new_lo_space());
VerifyMarking(heap_->old_space()); VerifyMarking(heap_->old_space());
VerifyMarking(heap_->code_space()); VerifyMarking(heap_->code_space());
VerifyMarking(heap_->map_space()); if (heap_->map_space()) VerifyMarking(heap_->map_space());
VerifyMarking(heap_->lo_space()); VerifyMarking(heap_->lo_space());
VerifyMarking(heap_->code_lo_space()); VerifyMarking(heap_->code_lo_space());
} }
...@@ -399,7 +399,7 @@ class FullEvacuationVerifier : public EvacuationVerifier { ...@@ -399,7 +399,7 @@ class FullEvacuationVerifier : public EvacuationVerifier {
VerifyEvacuation(heap_->new_space()); VerifyEvacuation(heap_->new_space());
VerifyEvacuation(heap_->old_space()); VerifyEvacuation(heap_->old_space());
VerifyEvacuation(heap_->code_space()); VerifyEvacuation(heap_->code_space());
VerifyEvacuation(heap_->map_space()); if (heap_->map_space()) VerifyEvacuation(heap_->map_space());
} }
protected: protected:
...@@ -560,7 +560,7 @@ bool MarkCompactCollector::StartCompaction(StartCompactionMode mode) { ...@@ -560,7 +560,7 @@ bool MarkCompactCollector::StartCompaction(StartCompactionMode mode) {
CollectEvacuationCandidates(heap()->old_space()); CollectEvacuationCandidates(heap()->old_space());
if (FLAG_compact_map_space) { if (heap()->map_space() && FLAG_compact_maps) {
CollectEvacuationCandidates(heap()->map_space()); CollectEvacuationCandidates(heap()->map_space());
} }
...@@ -571,7 +571,7 @@ bool MarkCompactCollector::StartCompaction(StartCompactionMode mode) { ...@@ -571,7 +571,7 @@ bool MarkCompactCollector::StartCompaction(StartCompactionMode mode) {
TraceFragmentation(heap()->code_space()); TraceFragmentation(heap()->code_space());
} }
if (FLAG_trace_fragmentation) { if (FLAG_trace_fragmentation && heap()->map_space()) {
TraceFragmentation(heap()->map_space()); TraceFragmentation(heap()->map_space());
} }
...@@ -663,7 +663,9 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(LargeObjectSpace* space) { ...@@ -663,7 +663,9 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(LargeObjectSpace* space) {
void MarkCompactCollector::VerifyMarkbitsAreClean() { void MarkCompactCollector::VerifyMarkbitsAreClean() {
VerifyMarkbitsAreClean(heap_->old_space()); VerifyMarkbitsAreClean(heap_->old_space());
VerifyMarkbitsAreClean(heap_->code_space()); VerifyMarkbitsAreClean(heap_->code_space());
VerifyMarkbitsAreClean(heap_->map_space()); if (heap_->map_space()) {
VerifyMarkbitsAreClean(heap_->map_space());
}
VerifyMarkbitsAreClean(heap_->new_space()); VerifyMarkbitsAreClean(heap_->new_space());
// Read-only space should always be black since we never collect any objects // Read-only space should always be black since we never collect any objects
// in it or linked from it. // in it or linked from it.
...@@ -684,8 +686,10 @@ void MarkCompactCollector::EnsureSweepingCompleted() { ...@@ -684,8 +686,10 @@ void MarkCompactCollector::EnsureSweepingCompleted() {
sweeper()->EnsureCompleted(); sweeper()->EnsureCompleted();
heap()->old_space()->RefillFreeList(); heap()->old_space()->RefillFreeList();
heap()->code_space()->RefillFreeList(); heap()->code_space()->RefillFreeList();
heap()->map_space()->RefillFreeList(); if (heap()->map_space()) {
heap()->map_space()->SortFreeList(); heap()->map_space()->RefillFreeList();
heap()->map_space()->SortFreeList();
}
heap()->tracer()->NotifySweepingCompleted(); heap()->tracer()->NotifySweepingCompleted();
...@@ -997,7 +1001,7 @@ void MarkCompactCollector::VerifyMarking() { ...@@ -997,7 +1001,7 @@ void MarkCompactCollector::VerifyMarking() {
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap) { if (FLAG_verify_heap) {
heap()->old_space()->VerifyLiveBytes(); heap()->old_space()->VerifyLiveBytes();
heap()->map_space()->VerifyLiveBytes(); if (heap()->map_space()) heap()->map_space()->VerifyLiveBytes();
heap()->code_space()->VerifyLiveBytes(); heap()->code_space()->VerifyLiveBytes();
} }
#endif #endif
...@@ -4601,8 +4605,10 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() { ...@@ -4601,8 +4605,10 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
RememberedSetUpdatingMode::ALL); RememberedSetUpdatingMode::ALL);
CollectRememberedSetUpdatingItems(&updating_items, heap()->code_lo_space(), CollectRememberedSetUpdatingItems(&updating_items, heap()->code_lo_space(),
RememberedSetUpdatingMode::ALL); RememberedSetUpdatingMode::ALL);
CollectRememberedSetUpdatingItems(&updating_items, heap()->map_space(), if (heap()->map_space()) {
RememberedSetUpdatingMode::ALL); CollectRememberedSetUpdatingItems(&updating_items, heap()->map_space(),
RememberedSetUpdatingMode::ALL);
}
// Iterating to space may require a valid body descriptor for e.g. // Iterating to space may require a valid body descriptor for e.g.
// WasmStruct which races with updating a slot in Map. Since to space is // WasmStruct which races with updating a slot in Map. Since to space is
...@@ -4837,7 +4843,7 @@ void MarkCompactCollector::StartSweepSpaces() { ...@@ -4837,7 +4843,7 @@ void MarkCompactCollector::StartSweepSpaces() {
heap()->tracer(), GCTracer::Scope::MC_SWEEP_CODE, ThreadKind::kMain); heap()->tracer(), GCTracer::Scope::MC_SWEEP_CODE, ThreadKind::kMain);
StartSweepSpace(heap()->code_space()); StartSweepSpace(heap()->code_space());
} }
{ if (heap()->map_space()) {
GCTracer::Scope sweep_scope( GCTracer::Scope sweep_scope(
heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP, ThreadKind::kMain); heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP, ThreadKind::kMain);
StartSweepSpace(heap()->map_space()); StartSweepSpace(heap()->map_space());
...@@ -4937,7 +4943,7 @@ class YoungGenerationEvacuationVerifier : public EvacuationVerifier { ...@@ -4937,7 +4943,7 @@ class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
VerifyEvacuation(heap_->new_space()); VerifyEvacuation(heap_->new_space());
VerifyEvacuation(heap_->old_space()); VerifyEvacuation(heap_->old_space());
VerifyEvacuation(heap_->code_space()); VerifyEvacuation(heap_->code_space());
VerifyEvacuation(heap_->map_space()); if (heap_->map_space()) VerifyEvacuation(heap_->map_space());
} }
protected: protected:
...@@ -5205,8 +5211,11 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() { ...@@ -5205,8 +5211,11 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
CollectRememberedSetUpdatingItems(&updating_items, heap()->code_space(), CollectRememberedSetUpdatingItems(&updating_items, heap()->code_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
CollectRememberedSetUpdatingItems(&updating_items, heap()->map_space(), if (heap()->map_space()) {
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); CollectRememberedSetUpdatingItems(
&updating_items, heap()->map_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
}
CollectRememberedSetUpdatingItems(&updating_items, heap()->lo_space(), CollectRememberedSetUpdatingItems(&updating_items, heap()->lo_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
CollectRememberedSetUpdatingItems(&updating_items, heap()->code_lo_space(), CollectRememberedSetUpdatingItems(&updating_items, heap()->code_lo_space(),
......
...@@ -194,7 +194,7 @@ void MarkingBarrier::Deactivate() { ...@@ -194,7 +194,7 @@ void MarkingBarrier::Deactivate() {
is_compacting_ = false; is_compacting_ = false;
if (is_main_thread_barrier_) { if (is_main_thread_barrier_) {
DeactivateSpace(heap_->old_space()); DeactivateSpace(heap_->old_space());
DeactivateSpace(heap_->map_space()); if (heap_->map_space()) DeactivateSpace(heap_->map_space());
DeactivateSpace(heap_->code_space()); DeactivateSpace(heap_->code_space());
DeactivateSpace(heap_->new_space()); DeactivateSpace(heap_->new_space());
for (LargePage* p : *heap_->new_lo_space()) { for (LargePage* p : *heap_->new_lo_space()) {
...@@ -233,7 +233,7 @@ void MarkingBarrier::Activate(bool is_compacting) { ...@@ -233,7 +233,7 @@ void MarkingBarrier::Activate(bool is_compacting) {
is_activated_ = true; is_activated_ = true;
if (is_main_thread_barrier_) { if (is_main_thread_barrier_) {
ActivateSpace(heap_->old_space()); ActivateSpace(heap_->old_space());
ActivateSpace(heap_->map_space()); if (heap_->map_space()) ActivateSpace(heap_->map_space());
ActivateSpace(heap_->code_space()); ActivateSpace(heap_->code_space());
ActivateSpace(heap_->new_space()); ActivateSpace(heap_->new_space());
......
...@@ -720,7 +720,8 @@ void NewSpace::Verify(Isolate* isolate) { ...@@ -720,7 +720,8 @@ void NewSpace::Verify(Isolate* isolate) {
// be in map space or read-only space. // be in map space or read-only space.
Map map = object.map(cage_base); Map map = object.map(cage_base);
CHECK(map.IsMap(cage_base)); CHECK(map.IsMap(cage_base));
CHECK(ReadOnlyHeap::Contains(map) || heap()->map_space()->Contains(map)); CHECK(ReadOnlyHeap::Contains(map) ||
isolate->heap()->space_for_maps()->Contains(map));
// The object should not be code or a map. // The object should not be code or a map.
CHECK(!object.IsMap(cage_base)); CHECK(!object.IsMap(cage_base));
......
...@@ -742,7 +742,7 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) { ...@@ -742,7 +742,7 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
Map map = object.map(cage_base); Map map = object.map(cage_base);
CHECK(map.IsMap(cage_base)); CHECK(map.IsMap(cage_base));
CHECK(ReadOnlyHeap::Contains(map) || CHECK(ReadOnlyHeap::Contains(map) ||
isolate->heap()->map_space()->Contains(map)); isolate->heap()->space_for_maps()->Contains(map));
// Perform space-specific object verification. // Perform space-specific object verification.
VerifyObject(object); VerifyObject(object);
......
...@@ -591,8 +591,7 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging, ...@@ -591,8 +591,7 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
is_logging_(is_logging), is_logging_(is_logging),
is_incremental_marking_(heap->incremental_marking()->IsMarking()), is_incremental_marking_(heap->incremental_marking()->IsMarking()),
is_compacting_(heap->incremental_marking()->IsCompacting()), is_compacting_(heap->incremental_marking()->IsCompacting()),
is_compacting_including_map_space_(is_compacting_ && is_compacting_including_map_space_(is_compacting_ && FLAG_compact_maps),
FLAG_compact_map_space),
shared_string_table_(shared_old_allocator_.get() != nullptr) {} shared_string_table_(shared_old_allocator_.get() != nullptr) {}
void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map, void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map,
......
...@@ -93,7 +93,8 @@ OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap) ...@@ -93,7 +93,8 @@ OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
state_(kOldSpaceState), state_(kOldSpaceState),
old_iterator_(heap->old_space()->begin()), old_iterator_(heap->old_space()->begin()),
code_iterator_(heap->code_space()->begin()), code_iterator_(heap->code_space()->begin()),
map_iterator_(heap->map_space()->begin()), map_iterator_(heap->map_space() ? heap->map_space()->begin()
: PageRange::iterator(nullptr)),
lo_iterator_(heap->lo_space()->begin()), lo_iterator_(heap->lo_space()->begin()),
code_lo_iterator_(heap->code_lo_space()->begin()) {} code_lo_iterator_(heap->code_lo_space()->begin()) {}
......
...@@ -78,7 +78,7 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() { ...@@ -78,7 +78,7 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo); heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
// Map space. // Map space.
heap::SimulateFullSpace(heap->map_space()); heap::SimulateFullSpace(heap->space_for_maps());
obj = heap->AllocateRaw(Map::kSize, AllocationType::kMap).ToObjectChecked(); obj = heap->AllocateRaw(Map::kSize, AllocationType::kMap).ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), Map::kSize, heap->CreateFillerObjectAt(obj.address(), Map::kSize,
ClearRecordedSlots::kNo); ClearRecordedSlots::kNo);
......
...@@ -394,6 +394,9 @@ TEST(SizeOfInitialHeap) { ...@@ -394,6 +394,9 @@ TEST(SizeOfInitialHeap) {
Heap* heap = isolate->heap(); Heap* heap = isolate->heap();
for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE; for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
i++) { i++) {
// Map space might be disabled.
if (i == MAP_SPACE && !heap->paged_space(i)) continue;
// Debug code can be very large, so skip CODE_SPACE if we are generating it. // Debug code can be very large, so skip CODE_SPACE if we are generating it.
if (i == CODE_SPACE && i::FLAG_debug_code) continue; if (i == CODE_SPACE && i::FLAG_debug_code) continue;
......
...@@ -151,12 +151,13 @@ static int DumpHeapConstants(FILE* out, const char* argv0) { ...@@ -151,12 +151,13 @@ static int DumpHeapConstants(FILE* out, const char* argv0) {
DumpKnownMap(out, heap, i::BaseSpace::GetSpaceName(i::RO_SPACE), DumpKnownMap(out, heap, i::BaseSpace::GetSpaceName(i::RO_SPACE),
object); object);
} }
i::PagedSpaceObjectIterator iterator(heap, heap->map_space());
i::PagedSpace* space_for_maps = heap->space_for_maps();
i::PagedSpaceObjectIterator iterator(heap, space_for_maps);
for (i::HeapObject object = iterator.Next(); !object.is_null(); for (i::HeapObject object = iterator.Next(); !object.is_null();
object = iterator.Next()) { object = iterator.Next()) {
if (!object.IsMap()) continue; if (!object.IsMap()) continue;
DumpKnownMap(out, heap, i::BaseSpace::GetSpaceName(i::MAP_SPACE), DumpKnownMap(out, heap, space_for_maps->name(), object);
object);
} }
i::PrintF(out, "}\n"); i::PrintF(out, "}\n");
} }
......
...@@ -31,5 +31,5 @@ ...@@ -31,5 +31,5 @@
[0.1, "--no-wasm-generic-wrapper"], [0.1, "--no-wasm-generic-wrapper"],
[0.1, "--turbo-force-mid-tier-regalloc"], [0.1, "--turbo-force-mid-tier-regalloc"],
[0.0001, "--simulate-errors"], [0.0001, "--simulate-errors"],
[0.25, "--compact-map-space"] [0.25, "--compact-maps"]
] ]
[ [
{"app_args": "--assert-types", "app_name": "d8", "probability": 0.25}, {"app_args": "--assert-types", "app_name": "d8", "probability": 0.25},
{"app_args": "--interrupt-budget-for-feedback-vector-allocation=0", "app_name": "d8", "probability": 0.05}, {"app_args": "--interrupt-budget-for-feedback-vector-allocation=0", "app_name": "d8", "probability": 0.05},
{"app_args": "--compact-map-space", "app_name": "d8", "probability": 0.25}, {"app_args": "--compact-maps", "app_name": "d8", "probability": 0.25},
{"app_args": "--force-slow-path", "app_name": "d8", "probability": 0.05}, {"app_args": "--force-slow-path", "app_name": "d8", "probability": 0.05},
{"app_args": "--future", "app_name": "d8", "probability": 0.25}, {"app_args": "--future", "app_name": "d8", "probability": 0.25},
{"app_args": "--interrupt-budget=1000", "app_name": "d8", "probability": 0.25}, {"app_args": "--interrupt-budget=1000", "app_name": "d8", "probability": 0.25},
......
...@@ -31,7 +31,7 @@ def iterate_objects(target_space, camel_space_name): ...@@ -31,7 +31,7 @@ def iterate_objects(target_space, camel_space_name):
if space == target_space: if space == target_space:
result.append((offset, name)) result.append((offset, name))
for (space, offset), name in v8heapconst.KNOWN_OBJECTS.items(): for (space, offset), name in v8heapconst.KNOWN_OBJECTS.items():
if space == target_space: if space == target_space and (space, offset) not in v8heapconst.KNOWN_MAPS:
result.append((offset, name)) result.append((offset, name))
out = out + '\nstd::string FindKnownObjectIn' + camel_space_name \ out = out + '\nstd::string FindKnownObjectIn' + camel_space_name \
+ '(uintptr_t offset) {\n switch (offset) {\n' + '(uintptr_t offset) {\n switch (offset) {\n'
...@@ -40,8 +40,9 @@ def iterate_objects(target_space, camel_space_name): ...@@ -40,8 +40,9 @@ def iterate_objects(target_space, camel_space_name):
out = out + ' default: return "";\n }\n}\n' out = out + ' default: return "";\n }\n}\n'
iterate_objects('map_space', 'MapSpace') iterate_objects('map_space', 'MapSpace')
iterate_objects('read_only_space', 'ReadOnlySpace')
iterate_objects('old_space', 'OldSpace') iterate_objects('old_space', 'OldSpace')
iterate_objects('read_only_space', 'ReadOnlySpace')
def iterate_maps(target_space, camel_space_name): def iterate_maps(target_space, camel_space_name):
global out global out
...@@ -54,6 +55,7 @@ def iterate_maps(target_space, camel_space_name): ...@@ -54,6 +55,7 @@ def iterate_maps(target_space, camel_space_name):
out = out + ' default: return -1;\n }\n}\n' out = out + ' default: return -1;\n }\n}\n'
iterate_maps('map_space', 'MapSpace') iterate_maps('map_space', 'MapSpace')
iterate_maps('old_space', 'OldSpace')
iterate_maps('read_only_space', 'ReadOnlySpace') iterate_maps('read_only_space', 'ReadOnlySpace')
out = out + '\nvoid FillInUnknownHeapAddresses(' + \ out = out + '\nvoid FillInUnknownHeapAddresses(' + \
......
...@@ -61,6 +61,10 @@ KnownInstanceType FindKnownMapInstanceTypes( ...@@ -61,6 +61,10 @@ KnownInstanceType FindKnownMapInstanceTypes(
return KnownInstanceType( return KnownInstanceType(
FindKnownMapInstanceTypeInMapSpace(offset_in_page)); FindKnownMapInstanceTypeInMapSpace(offset_in_page));
} }
if (containing_page == heap_addresses.old_space_first_page) {
return KnownInstanceType(
FindKnownMapInstanceTypeInOldSpace(offset_in_page));
}
if (containing_page == heap_addresses.read_only_space_first_page) { if (containing_page == heap_addresses.read_only_space_first_page) {
return KnownInstanceType( return KnownInstanceType(
FindKnownMapInstanceTypeInReadOnlySpace(offset_in_page)); FindKnownMapInstanceTypeInReadOnlySpace(offset_in_page));
...@@ -74,6 +78,12 @@ KnownInstanceType FindKnownMapInstanceTypes( ...@@ -74,6 +78,12 @@ KnownInstanceType FindKnownMapInstanceTypes(
result.types.push_back(static_cast<i::InstanceType>(sub_result)); result.types.push_back(static_cast<i::InstanceType>(sub_result));
} }
} }
if (heap_addresses.old_space_first_page == 0) {
int sub_result = FindKnownMapInstanceTypeInOldSpace(offset_in_page);
if (sub_result >= 0) {
result.types.push_back(static_cast<i::InstanceType>(sub_result));
}
}
if (heap_addresses.read_only_space_first_page == 0) { if (heap_addresses.read_only_space_first_page == 0) {
int sub_result = FindKnownMapInstanceTypeInReadOnlySpace(offset_in_page); int sub_result = FindKnownMapInstanceTypeInReadOnlySpace(offset_in_page);
if (sub_result >= 0) { if (sub_result >= 0) {
......
...@@ -34,6 +34,7 @@ void FillInUnknownHeapAddresses(d::HeapAddresses* heap_addresses, ...@@ -34,6 +34,7 @@ void FillInUnknownHeapAddresses(d::HeapAddresses* heap_addresses,
// Returns the instance type for the known Map, given its offset within the // Returns the instance type for the known Map, given its offset within the
// first page of the space, or empty string on failure. // first page of the space, or empty string on failure.
int FindKnownMapInstanceTypeInMapSpace(uintptr_t offset); int FindKnownMapInstanceTypeInMapSpace(uintptr_t offset);
int FindKnownMapInstanceTypeInOldSpace(uintptr_t offset);
int FindKnownMapInstanceTypeInReadOnlySpace(uintptr_t offset); int FindKnownMapInstanceTypeInReadOnlySpace(uintptr_t offset);
// ===== End of generated functions. =========================================== // ===== End of generated functions. ===========================================
......
...@@ -15,7 +15,7 @@ EXTRA_FLAGS = [ ...@@ -15,7 +15,7 @@ EXTRA_FLAGS = [
(0.1, '--assert-types'), (0.1, '--assert-types'),
(0.1, '--interrupt-budget-for-feedback-allocation=0'), (0.1, '--interrupt-budget-for-feedback-allocation=0'),
(0.1, '--cache=code'), (0.1, '--cache=code'),
(0.25, '--compact-map-space'), (0.25, '--compact-maps'),
(0.1, '--force-slow-path'), (0.1, '--force-slow-path'),
(0.2, '--future'), (0.2, '--future'),
(0.1, '--interrupt-budget=100'), (0.1, '--interrupt-budget=100'),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment