Commit 6a5e24b2 authored by mlippautz's avatar mlippautz Committed by Commit bot

Move MemoryAllocator and CodeRange into Heap

- MemoryAllocator is now part of Heap
- CodeRange is now part of MemoryAllocator

BUG=chromium:581076
LOG=N

Review URL: https://codereview.chromium.org/1862653002

Cr-Commit-Position: refs/heads/master@{#35294}
parent 08454486
...@@ -7189,7 +7189,7 @@ void Isolate::AddMemoryAllocationCallback(MemoryAllocationCallback callback, ...@@ -7189,7 +7189,7 @@ void Isolate::AddMemoryAllocationCallback(MemoryAllocationCallback callback,
ObjectSpace space, ObjectSpace space,
AllocationAction action) { AllocationAction action) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->memory_allocator()->AddMemoryAllocationCallback( isolate->heap()->memory_allocator()->AddMemoryAllocationCallback(
callback, space, action); callback, space, action);
} }
...@@ -7197,8 +7197,7 @@ void Isolate::AddMemoryAllocationCallback(MemoryAllocationCallback callback, ...@@ -7197,8 +7197,7 @@ void Isolate::AddMemoryAllocationCallback(MemoryAllocationCallback callback,
void Isolate::RemoveMemoryAllocationCallback( void Isolate::RemoveMemoryAllocationCallback(
MemoryAllocationCallback callback) { MemoryAllocationCallback callback) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->memory_allocator()->RemoveMemoryAllocationCallback( isolate->heap()->memory_allocator()->RemoveMemoryAllocationCallback(callback);
callback);
} }
...@@ -7693,9 +7692,10 @@ void Isolate::SetStackLimit(uintptr_t stack_limit) { ...@@ -7693,9 +7692,10 @@ void Isolate::SetStackLimit(uintptr_t stack_limit) {
void Isolate::GetCodeRange(void** start, size_t* length_in_bytes) { void Isolate::GetCodeRange(void** start, size_t* length_in_bytes) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
if (isolate->code_range()->valid()) { if (isolate->heap()->memory_allocator()->code_range()->valid()) {
*start = isolate->code_range()->start(); *start = isolate->heap()->memory_allocator()->code_range()->start();
*length_in_bytes = isolate->code_range()->size(); *length_in_bytes =
isolate->heap()->memory_allocator()->code_range()->size();
} else { } else {
*start = NULL; *start = NULL;
*length_in_bytes = 0; *length_in_bytes = 0;
......
...@@ -116,7 +116,7 @@ void StatisticsExtension::GetCounters( ...@@ -116,7 +116,7 @@ void StatisticsExtension::GetCounters(
}; };
const StatisticNumber numbers[] = { const StatisticNumber numbers[] = {
{isolate->memory_allocator()->Size(), "total_committed_bytes"}, {heap->memory_allocator()->Size(), "total_committed_bytes"},
{heap->new_space()->Size(), "new_space_live_bytes"}, {heap->new_space()->Size(), "new_space_live_bytes"},
{heap->new_space()->Available(), "new_space_available_bytes"}, {heap->new_space()->Available(), "new_space_available_bytes"},
{heap->new_space()->CommittedMemory(), "new_space_commited_bytes"}, {heap->new_space()->CommittedMemory(), "new_space_commited_bytes"},
......
...@@ -1401,8 +1401,10 @@ Handle<Code> Factory::NewCode(const CodeDesc& desc, ...@@ -1401,8 +1401,10 @@ Handle<Code> Factory::NewCode(const CodeDesc& desc,
int obj_size = Code::SizeFor(body_size); int obj_size = Code::SizeFor(body_size);
Handle<Code> code = NewCodeRaw(obj_size, immovable); Handle<Code> code = NewCodeRaw(obj_size, immovable);
DCHECK(isolate()->code_range() == NULL || !isolate()->code_range()->valid() || DCHECK(isolate()->heap()->memory_allocator()->code_range() == NULL ||
isolate()->code_range()->contains(code->address()) || !isolate()->heap()->memory_allocator()->code_range()->valid() ||
isolate()->heap()->memory_allocator()->code_range()->contains(
code->address()) ||
obj_size <= isolate()->heap()->code_space()->AreaSize()); obj_size <= isolate()->heap()->code_space()->AreaSize());
// The code object has not been fully initialized yet. We rely on the // The code object has not been fully initialized yet. We rely on the
......
...@@ -164,7 +164,7 @@ void GCTracer::Start(GarbageCollector collector, const char* gc_reason, ...@@ -164,7 +164,7 @@ void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
current_.reduce_memory = heap_->ShouldReduceMemory(); current_.reduce_memory = heap_->ShouldReduceMemory();
current_.start_time = start_time; current_.start_time = start_time;
current_.start_object_size = heap_->SizeOfObjects(); current_.start_object_size = heap_->SizeOfObjects();
current_.start_memory_size = heap_->isolate()->memory_allocator()->Size(); current_.start_memory_size = heap_->memory_allocator()->Size();
current_.start_holes_size = CountTotalHolesSize(heap_); current_.start_holes_size = CountTotalHolesSize(heap_);
current_.new_space_object_size = current_.new_space_object_size =
heap_->new_space()->top() - heap_->new_space()->bottom(); heap_->new_space()->top() - heap_->new_space()->bottom();
...@@ -214,7 +214,7 @@ void GCTracer::Stop(GarbageCollector collector) { ...@@ -214,7 +214,7 @@ void GCTracer::Stop(GarbageCollector collector) {
current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
current_.end_object_size = heap_->SizeOfObjects(); current_.end_object_size = heap_->SizeOfObjects();
current_.end_memory_size = heap_->isolate()->memory_allocator()->Size(); current_.end_memory_size = heap_->memory_allocator()->Size();
current_.end_holes_size = CountTotalHolesSize(heap_); current_.end_holes_size = CountTotalHolesSize(heap_);
current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize(); current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize();
......
...@@ -71,7 +71,7 @@ class IdleScavengeObserver : public AllocationObserver { ...@@ -71,7 +71,7 @@ class IdleScavengeObserver : public AllocationObserver {
Heap::Heap() Heap::Heap()
: amount_of_external_allocated_memory_(0), : amount_of_external_allocated_memory_(0),
amount_of_external_allocated_memory_at_last_global_gc_(0), amount_of_external_allocated_memory_at_last_global_gc_(0),
isolate_(NULL), isolate_(nullptr),
code_range_size_(0), code_range_size_(0),
// semispace_size_ should be a power of 2 and old_generation_size_ should // semispace_size_ should be a power of 2 and old_generation_size_ should
// be a multiple of Page::kPageSize. // be a multiple of Page::kPageSize.
...@@ -136,6 +136,7 @@ Heap::Heap() ...@@ -136,6 +136,7 @@ Heap::Heap()
last_gc_time_(0.0), last_gc_time_(0.0),
scavenge_collector_(nullptr), scavenge_collector_(nullptr),
mark_compact_collector_(nullptr), mark_compact_collector_(nullptr),
memory_allocator_(nullptr),
store_buffer_(this), store_buffer_(this),
incremental_marking_(nullptr), incremental_marking_(nullptr),
gc_idle_time_handler_(nullptr), gc_idle_time_handler_(nullptr),
...@@ -225,7 +226,7 @@ size_t Heap::CommittedPhysicalMemory() { ...@@ -225,7 +226,7 @@ size_t Heap::CommittedPhysicalMemory() {
intptr_t Heap::CommittedMemoryExecutable() { intptr_t Heap::CommittedMemoryExecutable() {
if (!HasBeenSetUp()) return 0; if (!HasBeenSetUp()) return 0;
return isolate()->memory_allocator()->SizeExecutable(); return memory_allocator()->SizeExecutable();
} }
...@@ -296,7 +297,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space, ...@@ -296,7 +297,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
// and does not count available bytes already in the old space or code // and does not count available bytes already in the old space or code
// space. Undercounting is safe---we may get an unrequested full GC when // space. Undercounting is safe---we may get an unrequested full GC when
// a scavenge would have succeeded. // a scavenge would have succeeded.
if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) { if (memory_allocator()->MaxAvailable() <= new_space_.Size()) {
isolate_->counters() isolate_->counters()
->gc_compactor_caused_by_oldspace_exhaustion() ->gc_compactor_caused_by_oldspace_exhaustion()
->Increment(); ->Increment();
...@@ -339,8 +340,8 @@ void Heap::PrintShortHeapStatistics() { ...@@ -339,8 +340,8 @@ void Heap::PrintShortHeapStatistics() {
PrintIsolate(isolate_, "Memory allocator, used: %6" V8_PTR_PREFIX PrintIsolate(isolate_, "Memory allocator, used: %6" V8_PTR_PREFIX
"d KB" "d KB"
", available: %6" V8_PTR_PREFIX "d KB\n", ", available: %6" V8_PTR_PREFIX "d KB\n",
isolate_->memory_allocator()->Size() / KB, memory_allocator()->Size() / KB,
isolate_->memory_allocator()->Available() / KB); memory_allocator()->Available() / KB);
PrintIsolate(isolate_, "New space, used: %6" V8_PTR_PREFIX PrintIsolate(isolate_, "New space, used: %6" V8_PTR_PREFIX
"d KB" "d KB"
", available: %6" V8_PTR_PREFIX ", available: %6" V8_PTR_PREFIX
...@@ -3341,8 +3342,9 @@ AllocationResult Heap::AllocateCode(int object_size, bool immovable) { ...@@ -3341,8 +3342,9 @@ AllocationResult Heap::AllocateCode(int object_size, bool immovable) {
result->set_map_no_write_barrier(code_map()); result->set_map_no_write_barrier(code_map());
Code* code = Code::cast(result); Code* code = Code::cast(result);
DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment)); DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment));
DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || DCHECK(memory_allocator()->code_range() == NULL ||
isolate_->code_range()->contains(code->address()) || !memory_allocator()->code_range()->valid() ||
memory_allocator()->code_range()->contains(code->address()) ||
object_size <= code_space()->AreaSize()); object_size <= code_space()->AreaSize());
code->set_gc_metadata(Smi::FromInt(0)); code->set_gc_metadata(Smi::FromInt(0));
code->set_ic_age(global_ic_age_); code->set_ic_age(global_ic_age_);
...@@ -3367,8 +3369,9 @@ AllocationResult Heap::CopyCode(Code* code) { ...@@ -3367,8 +3369,9 @@ AllocationResult Heap::CopyCode(Code* code) {
// Relocate the copy. // Relocate the copy.
DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || DCHECK(memory_allocator()->code_range() == NULL ||
isolate_->code_range()->contains(code->address()) || !memory_allocator()->code_range()->valid() ||
memory_allocator()->code_range()->contains(code->address()) ||
obj_size <= code_space()->AreaSize()); obj_size <= code_space()->AreaSize());
new_code->Relocate(new_addr - old_addr); new_code->Relocate(new_addr - old_addr);
// We have to iterate over the object and process its pointers when black // We have to iterate over the object and process its pointers when black
...@@ -3436,8 +3439,9 @@ AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) { ...@@ -3436,8 +3439,9 @@ AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
// Relocate the copy. // Relocate the copy.
DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || DCHECK(memory_allocator()->code_range() == NULL ||
isolate_->code_range()->contains(code->address()) || !memory_allocator()->code_range()->valid() ||
memory_allocator()->code_range()->contains(code->address()) ||
new_obj_size <= code_space()->AreaSize()); new_obj_size <= code_space()->AreaSize());
new_code->Relocate(new_addr - old_addr); new_code->Relocate(new_addr - old_addr);
...@@ -4482,7 +4486,7 @@ void Heap::ReportHeapStatistics(const char* title) { ...@@ -4482,7 +4486,7 @@ void Heap::ReportHeapStatistics(const char* title) {
PrintF("\n"); PrintF("\n");
PrintF("Heap statistics : "); PrintF("Heap statistics : ");
isolate_->memory_allocator()->ReportStatistics(); memory_allocator()->ReportStatistics();
PrintF("To space : "); PrintF("To space : ");
new_space_.ReportStatistics(); new_space_.ReportStatistics();
PrintF("Old space : "); PrintF("Old space : ");
...@@ -4499,7 +4503,7 @@ void Heap::ReportHeapStatistics(const char* title) { ...@@ -4499,7 +4503,7 @@ void Heap::ReportHeapStatistics(const char* title) {
#endif // DEBUG #endif // DEBUG
bool Heap::Contains(HeapObject* value) { bool Heap::Contains(HeapObject* value) {
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(value->address())) { if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
return false; return false;
} }
return HasBeenSetUp() && return HasBeenSetUp() &&
...@@ -4509,7 +4513,7 @@ bool Heap::Contains(HeapObject* value) { ...@@ -4509,7 +4513,7 @@ bool Heap::Contains(HeapObject* value) {
} }
bool Heap::ContainsSlow(Address addr) { bool Heap::ContainsSlow(Address addr) {
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) { if (memory_allocator()->IsOutsideAllocatedSpace(addr)) {
return false; return false;
} }
return HasBeenSetUp() && return HasBeenSetUp() &&
...@@ -4519,7 +4523,7 @@ bool Heap::ContainsSlow(Address addr) { ...@@ -4519,7 +4523,7 @@ bool Heap::ContainsSlow(Address addr) {
} }
bool Heap::InSpace(HeapObject* value, AllocationSpace space) { bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(value->address())) { if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
return false; return false;
} }
if (!HasBeenSetUp()) return false; if (!HasBeenSetUp()) return false;
...@@ -4541,7 +4545,7 @@ bool Heap::InSpace(HeapObject* value, AllocationSpace space) { ...@@ -4541,7 +4545,7 @@ bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
} }
bool Heap::InSpaceSlow(Address addr, AllocationSpace space) { bool Heap::InSpaceSlow(Address addr, AllocationSpace space) {
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) { if (memory_allocator()->IsOutsideAllocatedSpace(addr)) {
return false; return false;
} }
if (!HasBeenSetUp()) return false; if (!HasBeenSetUp()) return false;
...@@ -4982,12 +4986,11 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) { ...@@ -4982,12 +4986,11 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->map_space_capacity = map_space_->Capacity(); *stats->map_space_capacity = map_space_->Capacity();
*stats->lo_space_size = lo_space_->Size(); *stats->lo_space_size = lo_space_->Size();
isolate_->global_handles()->RecordStats(stats); isolate_->global_handles()->RecordStats(stats);
*stats->memory_allocator_size = isolate()->memory_allocator()->Size(); *stats->memory_allocator_size = memory_allocator()->Size();
*stats->memory_allocator_capacity = *stats->memory_allocator_capacity =
isolate()->memory_allocator()->Size() + memory_allocator()->Size() + memory_allocator()->Available();
isolate()->memory_allocator()->Available();
*stats->os_error = base::OS::GetLastError(); *stats->os_error = base::OS::GetLastError();
isolate()->memory_allocator()->Available(); memory_allocator()->Available();
if (take_snapshot) { if (take_snapshot) {
HeapIterator iterator(this); HeapIterator iterator(this);
for (HeapObject* obj = iterator.next(); obj != NULL; for (HeapObject* obj = iterator.next(); obj != NULL;
...@@ -5222,7 +5225,9 @@ bool Heap::SetUp() { ...@@ -5222,7 +5225,9 @@ bool Heap::SetUp() {
base::CallOnce(&initialize_gc_once, &InitializeGCOnce); base::CallOnce(&initialize_gc_once, &InitializeGCOnce);
// Set up memory allocator. // Set up memory allocator.
if (!isolate_->memory_allocator()->SetUp(MaxReserved(), MaxExecutableSize())) memory_allocator_ = new MemoryAllocator(isolate_);
if (!memory_allocator_->SetUp(MaxReserved(), MaxExecutableSize(),
code_range_size_))
return false; return false;
// Initialize incremental marking. // Initialize incremental marking.
...@@ -5239,8 +5244,6 @@ bool Heap::SetUp() { ...@@ -5239,8 +5244,6 @@ bool Heap::SetUp() {
if (old_space_ == NULL) return false; if (old_space_ == NULL) return false;
if (!old_space_->SetUp()) return false; if (!old_space_->SetUp()) return false;
if (!isolate_->code_range()->SetUp(code_range_size_)) return false;
// Initialize the code space, set its maximum capacity to the old // Initialize the code space, set its maximum capacity to the old
// generation size. It needs executable memory. // generation size. It needs executable memory.
code_space_ = new OldSpace(this, CODE_SPACE, EXECUTABLE); code_space_ = new OldSpace(this, CODE_SPACE, EXECUTABLE);
...@@ -5480,7 +5483,7 @@ void Heap::TearDown() { ...@@ -5480,7 +5483,7 @@ void Heap::TearDown() {
store_buffer()->TearDown(); store_buffer()->TearDown();
isolate_->memory_allocator()->TearDown(); memory_allocator()->TearDown();
StrongRootsList* next = NULL; StrongRootsList* next = NULL;
for (StrongRootsList* list = strong_roots_list_; list; list = next) { for (StrongRootsList* list = strong_roots_list_; list; list = next) {
...@@ -5488,6 +5491,9 @@ void Heap::TearDown() { ...@@ -5488,6 +5491,9 @@ void Heap::TearDown() {
delete list; delete list;
} }
strong_roots_list_ = NULL; strong_roots_list_ = NULL;
delete memory_allocator_;
memory_allocator_ = nullptr;
} }
...@@ -6287,7 +6293,7 @@ void Heap::WaitUntilUnmappingOfFreeChunksCompleted() { ...@@ -6287,7 +6293,7 @@ void Heap::WaitUntilUnmappingOfFreeChunksCompleted() {
void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) {
// PreFree logically frees the memory chunk. However, the actual freeing // PreFree logically frees the memory chunk. However, the actual freeing
// will happen on a separate thread sometime later. // will happen on a separate thread sometime later.
isolate_->memory_allocator()->PreFreeMemory(chunk); memory_allocator()->PreFreeMemory(chunk);
// The chunks added to this queue will be freed by a concurrent thread. // The chunks added to this queue will be freed by a concurrent thread.
chunk->set_next_chunk(chunks_queued_for_free_); chunk->set_next_chunk(chunks_queued_for_free_);
...@@ -6320,7 +6326,7 @@ void Heap::FreeQueuedChunks(MemoryChunk* list_head) { ...@@ -6320,7 +6326,7 @@ void Heap::FreeQueuedChunks(MemoryChunk* list_head) {
MemoryChunk* chunk; MemoryChunk* chunk;
for (chunk = list_head; chunk != NULL; chunk = next) { for (chunk = list_head; chunk != NULL; chunk = next) {
next = chunk->next_chunk(); next = chunk->next_chunk();
isolate_->memory_allocator()->PerformFreeMemory(chunk); memory_allocator()->PerformFreeMemory(chunk);
} }
} }
......
...@@ -927,6 +927,8 @@ class Heap { ...@@ -927,6 +927,8 @@ class Heap {
GCTracer* tracer() { return tracer_; } GCTracer* tracer() { return tracer_; }
MemoryAllocator* memory_allocator() { return memory_allocator_; }
EmbedderHeapTracer* embedder_heap_tracer() { return embedder_heap_tracer_; } EmbedderHeapTracer* embedder_heap_tracer() { return embedder_heap_tracer_; }
PromotionQueue* promotion_queue() { return &promotion_queue_; } PromotionQueue* promotion_queue() { return &promotion_queue_; }
...@@ -2149,6 +2151,8 @@ class Heap { ...@@ -2149,6 +2151,8 @@ class Heap {
MarkCompactCollector* mark_compact_collector_; MarkCompactCollector* mark_compact_collector_;
MemoryAllocator* memory_allocator_;
StoreBuffer store_buffer_; StoreBuffer store_buffer_;
IncrementalMarking* incremental_marking_; IncrementalMarking* incremental_marking_;
...@@ -2252,6 +2256,7 @@ class Heap { ...@@ -2252,6 +2256,7 @@ class Heap {
friend class Page; friend class Page;
friend class Scavenger; friend class Scavenger;
friend class StoreBuffer; friend class StoreBuffer;
friend class TestMemoryAllocatorScope;
// The allocator interface. // The allocator interface.
friend class Factory; friend class Factory;
......
...@@ -680,7 +680,7 @@ LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk, ...@@ -680,7 +680,7 @@ LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk,
intptr_t LargeObjectSpace::Available() { intptr_t LargeObjectSpace::Available() {
return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); return ObjectSizeFor(heap()->memory_allocator()->Available());
} }
......
...@@ -222,7 +222,7 @@ Address CodeRange::AllocateRawMemory(const size_t requested_size, ...@@ -222,7 +222,7 @@ Address CodeRange::AllocateRawMemory(const size_t requested_size,
*allocated = current.size; *allocated = current.size;
DCHECK(*allocated <= current.size); DCHECK(*allocated <= current.size);
DCHECK(IsAddressAligned(current.start, MemoryChunk::kAlignment)); DCHECK(IsAddressAligned(current.start, MemoryChunk::kAlignment));
if (!isolate_->memory_allocator()->CommitExecutableMemory( if (!isolate_->heap()->memory_allocator()->CommitExecutableMemory(
code_range_, current.start, commit_size, *allocated)) { code_range_, current.start, commit_size, *allocated)) {
*allocated = 0; *allocated = 0;
ReleaseBlock(&current); ReleaseBlock(&current);
...@@ -233,7 +233,8 @@ Address CodeRange::AllocateRawMemory(const size_t requested_size, ...@@ -233,7 +233,8 @@ Address CodeRange::AllocateRawMemory(const size_t requested_size,
bool CodeRange::CommitRawMemory(Address start, size_t length) { bool CodeRange::CommitRawMemory(Address start, size_t length) {
return isolate_->memory_allocator()->CommitMemory(start, length, EXECUTABLE); return isolate_->heap()->memory_allocator()->CommitMemory(start, length,
EXECUTABLE);
} }
...@@ -294,6 +295,7 @@ void CodeRange::ReleaseBlock(const FreeBlock* block) { ...@@ -294,6 +295,7 @@ void CodeRange::ReleaseBlock(const FreeBlock* block) {
MemoryAllocator::MemoryAllocator(Isolate* isolate) MemoryAllocator::MemoryAllocator(Isolate* isolate)
: isolate_(isolate), : isolate_(isolate),
code_range_(nullptr),
capacity_(0), capacity_(0),
capacity_executable_(0), capacity_executable_(0),
size_(0), size_(0),
...@@ -301,8 +303,8 @@ MemoryAllocator::MemoryAllocator(Isolate* isolate) ...@@ -301,8 +303,8 @@ MemoryAllocator::MemoryAllocator(Isolate* isolate)
lowest_ever_allocated_(reinterpret_cast<void*>(-1)), lowest_ever_allocated_(reinterpret_cast<void*>(-1)),
highest_ever_allocated_(reinterpret_cast<void*>(0)) {} highest_ever_allocated_(reinterpret_cast<void*>(0)) {}
bool MemoryAllocator::SetUp(intptr_t capacity, intptr_t capacity_executable,
bool MemoryAllocator::SetUp(intptr_t capacity, intptr_t capacity_executable) { intptr_t code_range_size) {
capacity_ = RoundUp(capacity, Page::kPageSize); capacity_ = RoundUp(capacity, Page::kPageSize);
capacity_executable_ = RoundUp(capacity_executable, Page::kPageSize); capacity_executable_ = RoundUp(capacity_executable, Page::kPageSize);
DCHECK_GE(capacity_, capacity_executable_); DCHECK_GE(capacity_, capacity_executable_);
...@@ -310,6 +312,9 @@ bool MemoryAllocator::SetUp(intptr_t capacity, intptr_t capacity_executable) { ...@@ -310,6 +312,9 @@ bool MemoryAllocator::SetUp(intptr_t capacity, intptr_t capacity_executable) {
size_ = 0; size_ = 0;
size_executable_ = 0; size_executable_ = 0;
code_range_ = new CodeRange(isolate_);
if (!code_range_->SetUp(static_cast<size_t>(code_range_size))) return false;
return true; return true;
} }
...@@ -325,6 +330,9 @@ void MemoryAllocator::TearDown() { ...@@ -325,6 +330,9 @@ void MemoryAllocator::TearDown() {
// DCHECK(size_executable_ == 0); // DCHECK(size_executable_ == 0);
capacity_ = 0; capacity_ = 0;
capacity_executable_ = 0; capacity_executable_ = 0;
delete code_range_;
code_range_ = nullptr;
} }
bool MemoryAllocator::CommitMemory(Address base, size_t size, bool MemoryAllocator::CommitMemory(Address base, size_t size,
...@@ -342,12 +350,10 @@ void MemoryAllocator::FreeMemory(base::VirtualMemory* reservation, ...@@ -342,12 +350,10 @@ void MemoryAllocator::FreeMemory(base::VirtualMemory* reservation,
Executability executable) { Executability executable) {
// TODO(gc) make code_range part of memory allocator? // TODO(gc) make code_range part of memory allocator?
// Code which is part of the code-range does not have its own VirtualMemory. // Code which is part of the code-range does not have its own VirtualMemory.
DCHECK(isolate_->code_range() == NULL || DCHECK(code_range() == NULL ||
!isolate_->code_range()->contains( !code_range()->contains(static_cast<Address>(reservation->address())));
static_cast<Address>(reservation->address()))); DCHECK(executable == NOT_EXECUTABLE || code_range() == NULL ||
DCHECK(executable == NOT_EXECUTABLE || isolate_->code_range() == NULL || !code_range()->valid() || reservation->size() <= Page::kPageSize);
!isolate_->code_range()->valid() ||
reservation->size() <= Page::kPageSize);
reservation->Release(); reservation->Release();
} }
...@@ -356,20 +362,19 @@ void MemoryAllocator::FreeMemory(base::VirtualMemory* reservation, ...@@ -356,20 +362,19 @@ void MemoryAllocator::FreeMemory(base::VirtualMemory* reservation,
void MemoryAllocator::FreeMemory(Address base, size_t size, void MemoryAllocator::FreeMemory(Address base, size_t size,
Executability executable) { Executability executable) {
// TODO(gc) make code_range part of memory allocator? // TODO(gc) make code_range part of memory allocator?
if (isolate_->code_range() != NULL && if (code_range() != NULL &&
isolate_->code_range()->contains(static_cast<Address>(base))) { code_range()->contains(static_cast<Address>(base))) {
DCHECK(executable == EXECUTABLE); DCHECK(executable == EXECUTABLE);
isolate_->code_range()->FreeRawMemory(base, size); code_range()->FreeRawMemory(base, size);
} else { } else {
DCHECK(executable == NOT_EXECUTABLE || isolate_->code_range() == NULL || DCHECK(executable == NOT_EXECUTABLE || code_range() == NULL ||
!isolate_->code_range()->valid()); !code_range()->valid());
bool result = base::VirtualMemory::ReleaseRegion(base, size); bool result = base::VirtualMemory::ReleaseRegion(base, size);
USE(result); USE(result);
DCHECK(result); DCHECK(result);
} }
} }
Address MemoryAllocator::ReserveAlignedMemory(size_t size, size_t alignment, Address MemoryAllocator::ReserveAlignedMemory(size_t size, size_t alignment,
base::VirtualMemory* controller) { base::VirtualMemory* controller) {
base::VirtualMemory reservation(size, alignment); base::VirtualMemory reservation(size, alignment);
...@@ -382,7 +387,6 @@ Address MemoryAllocator::ReserveAlignedMemory(size_t size, size_t alignment, ...@@ -382,7 +387,6 @@ Address MemoryAllocator::ReserveAlignedMemory(size_t size, size_t alignment,
return base; return base;
} }
Address MemoryAllocator::AllocateAlignedMemory( Address MemoryAllocator::AllocateAlignedMemory(
size_t reserve_size, size_t commit_size, size_t alignment, size_t reserve_size, size_t commit_size, size_t alignment,
Executability executable, base::VirtualMemory* controller) { Executability executable, base::VirtualMemory* controller) {
...@@ -496,19 +500,19 @@ bool MemoryChunk::CommitArea(size_t requested) { ...@@ -496,19 +500,19 @@ bool MemoryChunk::CommitArea(size_t requested) {
if (reservation_.IsReserved()) { if (reservation_.IsReserved()) {
Executability executable = Executability executable =
IsFlagSet(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE; IsFlagSet(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE;
if (!heap()->isolate()->memory_allocator()->CommitMemory(start, length, if (!heap()->memory_allocator()->CommitMemory(start, length,
executable)) { executable)) {
return false; return false;
} }
} else { } else {
CodeRange* code_range = heap_->isolate()->code_range(); CodeRange* code_range = heap_->memory_allocator()->code_range();
DCHECK(code_range != NULL && code_range->valid() && DCHECK(code_range != NULL && code_range->valid() &&
IsFlagSet(IS_EXECUTABLE)); IsFlagSet(IS_EXECUTABLE));
if (!code_range->CommitRawMemory(start, length)) return false; if (!code_range->CommitRawMemory(start, length)) return false;
} }
if (Heap::ShouldZapGarbage()) { if (Heap::ShouldZapGarbage()) {
heap_->isolate()->memory_allocator()->ZapBlock(start, length); heap_->memory_allocator()->ZapBlock(start, length);
} }
} else if (commit_size < committed_size) { } else if (commit_size < committed_size) {
DCHECK(commit_size > 0); DCHECK(commit_size > 0);
...@@ -518,7 +522,7 @@ bool MemoryChunk::CommitArea(size_t requested) { ...@@ -518,7 +522,7 @@ bool MemoryChunk::CommitArea(size_t requested) {
if (reservation_.IsReserved()) { if (reservation_.IsReserved()) {
if (!reservation_.Uncommit(start, length)) return false; if (!reservation_.Uncommit(start, length)) return false;
} else { } else {
CodeRange* code_range = heap_->isolate()->code_range(); CodeRange* code_range = heap_->memory_allocator()->code_range();
DCHECK(code_range != NULL && code_range->valid() && DCHECK(code_range != NULL && code_range->valid() &&
IsFlagSet(IS_EXECUTABLE)); IsFlagSet(IS_EXECUTABLE));
if (!code_range->UncommitRawMemory(start, length)) return false; if (!code_range->UncommitRawMemory(start, length)) return false;
...@@ -614,13 +618,13 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t reserve_area_size, ...@@ -614,13 +618,13 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t reserve_area_size,
#ifdef V8_TARGET_ARCH_MIPS64 #ifdef V8_TARGET_ARCH_MIPS64
// Use code range only for large object space on mips64 to keep address // Use code range only for large object space on mips64 to keep address
// range within 256-MB memory region. // range within 256-MB memory region.
if (isolate_->code_range() != NULL && isolate_->code_range()->valid() && if (code_range() != NULL && code_range()->valid() &&
reserve_area_size > CodePageAreaSize()) { reserve_area_size > CodePageAreaSize()) {
#else #else
if (isolate_->code_range() != NULL && isolate_->code_range()->valid()) { if (code_range() != NULL && code_range()->valid()) {
#endif #endif
base = isolate_->code_range()->AllocateRawMemory(chunk_size, commit_size, base =
&chunk_size); code_range()->AllocateRawMemory(chunk_size, commit_size, &chunk_size);
DCHECK( DCHECK(
IsAligned(reinterpret_cast<intptr_t>(base), MemoryChunk::kAlignment)); IsAligned(reinterpret_cast<intptr_t>(base), MemoryChunk::kAlignment));
if (base == NULL) return NULL; if (base == NULL) return NULL;
...@@ -1036,7 +1040,7 @@ bool PagedSpace::HasBeenSetUp() { return true; } ...@@ -1036,7 +1040,7 @@ bool PagedSpace::HasBeenSetUp() { return true; }
void PagedSpace::TearDown() { void PagedSpace::TearDown() {
PageIterator iterator(this); PageIterator iterator(this);
while (iterator.has_next()) { while (iterator.has_next()) {
heap()->isolate()->memory_allocator()->Free(iterator.next()); heap()->memory_allocator()->Free(iterator.next());
} }
anchor_.set_next_page(&anchor_); anchor_.set_next_page(&anchor_);
anchor_.set_prev_page(&anchor_); anchor_.set_prev_page(&anchor_);
...@@ -1172,8 +1176,8 @@ bool PagedSpace::Expand() { ...@@ -1172,8 +1176,8 @@ bool PagedSpace::Expand() {
if (!CanExpand(size)) return false; if (!CanExpand(size)) return false;
Page* p = heap()->isolate()->memory_allocator()->AllocatePage<Page>( Page* p =
size, this, executable()); heap()->memory_allocator()->AllocatePage<Page>(size, this, executable());
if (p == NULL) return false; if (p == NULL) return false;
AccountCommitted(static_cast<intptr_t>(p->size())); AccountCommitted(static_cast<intptr_t>(p->size()));
...@@ -1682,7 +1686,6 @@ bool SemiSpace::Commit() { ...@@ -1682,7 +1686,6 @@ bool SemiSpace::Commit() {
for (int i = 0; i < num_pages; i++) { for (int i = 0; i < num_pages; i++) {
NewSpacePage* new_page = NewSpacePage* new_page =
heap() heap()
->isolate()
->memory_allocator() ->memory_allocator()
->AllocatePage<NewSpacePage, MemoryAllocator::kPooled>( ->AllocatePage<NewSpacePage, MemoryAllocator::kPooled>(
NewSpacePage::kAllocatableMemory, this, executable()); NewSpacePage::kAllocatableMemory, this, executable());
...@@ -1703,8 +1706,7 @@ bool SemiSpace::Uncommit() { ...@@ -1703,8 +1706,7 @@ bool SemiSpace::Uncommit() {
DCHECK(is_committed()); DCHECK(is_committed());
NewSpacePageIterator it(this); NewSpacePageIterator it(this);
while (it.has_next()) { while (it.has_next()) {
heap()->isolate()->memory_allocator()->Free<MemoryAllocator::kPooled>( heap()->memory_allocator()->Free<MemoryAllocator::kPooled>(it.next());
it.next());
} }
anchor()->set_next_page(anchor()); anchor()->set_next_page(anchor());
anchor()->set_prev_page(anchor()); anchor()->set_prev_page(anchor());
...@@ -1740,7 +1742,6 @@ bool SemiSpace::GrowTo(int new_capacity) { ...@@ -1740,7 +1742,6 @@ bool SemiSpace::GrowTo(int new_capacity) {
while (delta_pages > 0) { while (delta_pages > 0) {
NewSpacePage* new_page = NewSpacePage* new_page =
heap() heap()
->isolate()
->memory_allocator() ->memory_allocator()
->AllocatePage<NewSpacePage, MemoryAllocator::kPooled>( ->AllocatePage<NewSpacePage, MemoryAllocator::kPooled>(
NewSpacePage::kAllocatableMemory, this, executable()); NewSpacePage::kAllocatableMemory, this, executable());
...@@ -1773,8 +1774,7 @@ bool SemiSpace::ShrinkTo(int new_capacity) { ...@@ -1773,8 +1774,7 @@ bool SemiSpace::ShrinkTo(int new_capacity) {
new_last_page = last_page->prev_page(); new_last_page = last_page->prev_page();
new_last_page->set_next_page(anchor()); new_last_page->set_next_page(anchor());
anchor()->set_prev_page(new_last_page); anchor()->set_prev_page(new_last_page);
heap()->isolate()->memory_allocator()->Free<MemoryAllocator::kPooled>( heap()->memory_allocator()->Free<MemoryAllocator::kPooled>(last_page);
last_page);
delta_pages--; delta_pages--;
} }
AccountUncommitted(static_cast<intptr_t>(delta)); AccountUncommitted(static_cast<intptr_t>(delta));
...@@ -2860,9 +2860,9 @@ void LargeObjectSpace::TearDown() { ...@@ -2860,9 +2860,9 @@ void LargeObjectSpace::TearDown() {
LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address())); LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address()));
ObjectSpace space = static_cast<ObjectSpace>(1 << identity()); ObjectSpace space = static_cast<ObjectSpace>(1 << identity());
heap()->isolate()->memory_allocator()->PerformAllocationCallback( heap()->memory_allocator()->PerformAllocationCallback(
space, kAllocationActionFree, page->size()); space, kAllocationActionFree, page->size());
heap()->isolate()->memory_allocator()->Free(page); heap()->memory_allocator()->Free(page);
} }
SetUp(); SetUp();
} }
...@@ -2876,9 +2876,8 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size, ...@@ -2876,9 +2876,8 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
return AllocationResult::Retry(identity()); return AllocationResult::Retry(identity());
} }
LargePage* page = LargePage* page = heap()->memory_allocator()->AllocatePage<LargePage>(
heap()->isolate()->memory_allocator()->AllocatePage<LargePage>( object_size, this, executable);
object_size, this, executable);
if (page == NULL) return AllocationResult::Retry(identity()); if (page == NULL) return AllocationResult::Retry(identity());
DCHECK(page->area_size() >= object_size); DCHECK(page->area_size() >= object_size);
......
...@@ -1257,7 +1257,8 @@ class MemoryAllocator { ...@@ -1257,7 +1257,8 @@ class MemoryAllocator {
// Initializes its internal bookkeeping structures. // Initializes its internal bookkeeping structures.
// Max capacity of the total space and executable memory limit. // Max capacity of the total space and executable memory limit.
bool SetUp(intptr_t max_capacity, intptr_t capacity_executable); bool SetUp(intptr_t max_capacity, intptr_t capacity_executable,
intptr_t code_range_size);
void TearDown(); void TearDown();
...@@ -1383,6 +1384,8 @@ class MemoryAllocator { ...@@ -1383,6 +1384,8 @@ class MemoryAllocator {
Address start, size_t commit_size, Address start, size_t commit_size,
size_t reserved_size); size_t reserved_size);
CodeRange* code_range() { return code_range_; }
private: private:
// See AllocatePage for public interface. Note that currently we only support // See AllocatePage for public interface. Note that currently we only support
// pools for NOT_EXECUTABLE pages of size MemoryChunk::kPageSize. // pools for NOT_EXECUTABLE pages of size MemoryChunk::kPageSize.
...@@ -1394,6 +1397,8 @@ class MemoryAllocator { ...@@ -1394,6 +1397,8 @@ class MemoryAllocator {
Isolate* isolate_; Isolate* isolate_;
CodeRange* code_range_;
// Maximum space size in bytes. // Maximum space size in bytes.
intptr_t capacity_; intptr_t capacity_;
// Maximum subset of capacity_ that can be executable // Maximum subset of capacity_ that can be executable
...@@ -1447,6 +1452,8 @@ class MemoryAllocator { ...@@ -1447,6 +1452,8 @@ class MemoryAllocator {
List<MemoryChunk*> chunk_pool_; List<MemoryChunk*> chunk_pool_;
friend class TestCodeRangeScope;
DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryAllocator); DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryAllocator);
}; };
......
...@@ -1798,7 +1798,6 @@ Isolate::Isolate(bool enable_serializer) ...@@ -1798,7 +1798,6 @@ Isolate::Isolate(bool enable_serializer)
runtime_profiler_(NULL), runtime_profiler_(NULL),
compilation_cache_(NULL), compilation_cache_(NULL),
counters_(NULL), counters_(NULL),
code_range_(NULL),
logger_(NULL), logger_(NULL),
stats_table_(NULL), stats_table_(NULL),
stub_cache_(NULL), stub_cache_(NULL),
...@@ -1809,7 +1808,6 @@ Isolate::Isolate(bool enable_serializer) ...@@ -1809,7 +1808,6 @@ Isolate::Isolate(bool enable_serializer)
capture_stack_trace_for_uncaught_exceptions_(false), capture_stack_trace_for_uncaught_exceptions_(false),
stack_trace_for_uncaught_exceptions_frame_limit_(0), stack_trace_for_uncaught_exceptions_frame_limit_(0),
stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview), stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview),
memory_allocator_(NULL),
keyed_lookup_cache_(NULL), keyed_lookup_cache_(NULL),
context_slot_cache_(NULL), context_slot_cache_(NULL),
descriptor_lookup_cache_(NULL), descriptor_lookup_cache_(NULL),
...@@ -2067,10 +2065,6 @@ Isolate::~Isolate() { ...@@ -2067,10 +2065,6 @@ Isolate::~Isolate() {
delete thread_manager_; delete thread_manager_;
thread_manager_ = NULL; thread_manager_ = NULL;
delete memory_allocator_;
memory_allocator_ = NULL;
delete code_range_;
code_range_ = NULL;
delete global_handles_; delete global_handles_;
global_handles_ = NULL; global_handles_ = NULL;
delete eternal_handles_; delete eternal_handles_;
...@@ -2164,9 +2158,6 @@ bool Isolate::Init(Deserializer* des) { ...@@ -2164,9 +2158,6 @@ bool Isolate::Init(Deserializer* des) {
// The initialization process does not handle memory exhaustion. // The initialization process does not handle memory exhaustion.
AlwaysAllocateScope always_allocate(this); AlwaysAllocateScope always_allocate(this);
memory_allocator_ = new MemoryAllocator(this);
code_range_ = new CodeRange(this);
// Safe after setting Heap::isolate_, and initializing StackGuard // Safe after setting Heap::isolate_, and initializing StackGuard
heap_.SetStackLimits(); heap_.SetStackLimits();
...@@ -2225,7 +2216,7 @@ bool Isolate::Init(Deserializer* des) { ...@@ -2225,7 +2216,7 @@ bool Isolate::Init(Deserializer* des) {
return false; return false;
} }
deoptimizer_data_ = new DeoptimizerData(memory_allocator_); deoptimizer_data_ = new DeoptimizerData(heap()->memory_allocator());
const bool create_heap_objects = (des == NULL); const bool create_heap_objects = (des == NULL);
if (create_heap_objects && !heap_.CreateHeapObjects()) { if (create_heap_objects && !heap_.CreateHeapObjects()) {
......
...@@ -817,7 +817,6 @@ class Isolate { ...@@ -817,7 +817,6 @@ class Isolate {
DCHECK(counters_ != NULL); DCHECK(counters_ != NULL);
return counters_; return counters_;
} }
CodeRange* code_range() { return code_range_; }
RuntimeProfiler* runtime_profiler() { return runtime_profiler_; } RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
CompilationCache* compilation_cache() { return compilation_cache_; } CompilationCache* compilation_cache() { return compilation_cache_; }
Logger* logger() { Logger* logger() {
...@@ -841,10 +840,6 @@ class Isolate { ...@@ -841,10 +840,6 @@ class Isolate {
return materialized_object_store_; return materialized_object_store_;
} }
MemoryAllocator* memory_allocator() {
return memory_allocator_;
}
KeyedLookupCache* keyed_lookup_cache() { KeyedLookupCache* keyed_lookup_cache() {
return keyed_lookup_cache_; return keyed_lookup_cache_;
} }
...@@ -1240,7 +1235,6 @@ class Isolate { ...@@ -1240,7 +1235,6 @@ class Isolate {
RuntimeProfiler* runtime_profiler_; RuntimeProfiler* runtime_profiler_;
CompilationCache* compilation_cache_; CompilationCache* compilation_cache_;
Counters* counters_; Counters* counters_;
CodeRange* code_range_;
base::RecursiveMutex break_access_; base::RecursiveMutex break_access_;
Logger* logger_; Logger* logger_;
StackGuard stack_guard_; StackGuard stack_guard_;
...@@ -1254,7 +1248,6 @@ class Isolate { ...@@ -1254,7 +1248,6 @@ class Isolate {
bool capture_stack_trace_for_uncaught_exceptions_; bool capture_stack_trace_for_uncaught_exceptions_;
int stack_trace_for_uncaught_exceptions_frame_limit_; int stack_trace_for_uncaught_exceptions_frame_limit_;
StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_; StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
MemoryAllocator* memory_allocator_;
KeyedLookupCache* keyed_lookup_cache_; KeyedLookupCache* keyed_lookup_cache_;
ContextSlotCache* context_slot_cache_; ContextSlotCache* context_slot_cache_;
DescriptorLookupCache* descriptor_lookup_cache_; DescriptorLookupCache* descriptor_lookup_cache_;
...@@ -1381,8 +1374,6 @@ class Isolate { ...@@ -1381,8 +1374,6 @@ class Isolate {
friend class Simulator; friend class Simulator;
friend class StackGuard; friend class StackGuard;
friend class ThreadId; friend class ThreadId;
friend class TestMemoryAllocatorScope;
friend class TestCodeRangeScope;
friend class v8::Isolate; friend class v8::Isolate;
friend class v8::Locker; friend class v8::Locker;
friend class v8::Unlocker; friend class v8::Unlocker;
......
...@@ -6762,7 +6762,7 @@ bool RegExpEngine::TooMuchRegExpCode(Handle<String> pattern) { ...@@ -6762,7 +6762,7 @@ bool RegExpEngine::TooMuchRegExpCode(Handle<String> pattern) {
Heap* heap = pattern->GetHeap(); Heap* heap = pattern->GetHeap();
bool too_much = pattern->length() > RegExpImpl::kRegExpTooLargeToOptimize; bool too_much = pattern->length() > RegExpImpl::kRegExpTooLargeToOptimize;
if (heap->total_regexp_code_generated() > RegExpImpl::kRegExpCompiledLimit && if (heap->total_regexp_code_generated() > RegExpImpl::kRegExpCompiledLimit &&
heap->isolate()->memory_allocator()->SizeExecutable() > heap->memory_allocator()->SizeExecutable() >
RegExpImpl::kRegExpExecutableMemoryLimit) { RegExpImpl::kRegExpExecutableMemoryLimit) {
too_much = true; too_much = true;
} }
......
...@@ -78,7 +78,8 @@ void Assembler::emit_code_target(Handle<Code> target, ...@@ -78,7 +78,8 @@ void Assembler::emit_code_target(Handle<Code> target,
void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) { void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
DCHECK(RelocInfo::IsRuntimeEntry(rmode)); DCHECK(RelocInfo::IsRuntimeEntry(rmode));
RecordRelocInfo(rmode); RecordRelocInfo(rmode);
emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start())); emitl(static_cast<uint32_t>(
entry - isolate()->heap()->memory_allocator()->code_range()->start()));
} }
...@@ -299,7 +300,8 @@ Handle<Object> Assembler::code_target_object_handle_at(Address pc) { ...@@ -299,7 +300,8 @@ Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
Address Assembler::runtime_entry_at(Address pc) { Address Assembler::runtime_entry_at(Address pc) {
return Memory::int32_at(pc) + isolate()->code_range()->start(); return Memory::int32_at(pc) +
isolate()->heap()->memory_allocator()->code_range()->start();
} }
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
......
...@@ -107,13 +107,12 @@ TEST(Page) { ...@@ -107,13 +107,12 @@ TEST(Page) {
class TestMemoryAllocatorScope { class TestMemoryAllocatorScope {
public: public:
TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator) TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
: isolate_(isolate), : isolate_(isolate), old_allocator_(isolate->heap()->memory_allocator()) {
old_allocator_(isolate->memory_allocator_) { isolate->heap()->memory_allocator_ = allocator;
isolate->memory_allocator_ = allocator;
} }
~TestMemoryAllocatorScope() { ~TestMemoryAllocatorScope() {
isolate_->memory_allocator_ = old_allocator_; isolate_->heap()->memory_allocator_ = old_allocator_;
} }
private: private:
...@@ -129,12 +128,12 @@ class TestCodeRangeScope { ...@@ -129,12 +128,12 @@ class TestCodeRangeScope {
public: public:
TestCodeRangeScope(Isolate* isolate, CodeRange* code_range) TestCodeRangeScope(Isolate* isolate, CodeRange* code_range)
: isolate_(isolate), : isolate_(isolate),
old_code_range_(isolate->code_range_) { old_code_range_(isolate->heap()->memory_allocator()->code_range()) {
isolate->code_range_ = code_range; isolate->heap()->memory_allocator()->code_range_ = code_range;
} }
~TestCodeRangeScope() { ~TestCodeRangeScope() {
isolate_->code_range_ = old_code_range_; isolate_->heap()->memory_allocator()->code_range_ = old_code_range_;
} }
private: private:
...@@ -153,50 +152,49 @@ static void VerifyMemoryChunk(Isolate* isolate, ...@@ -153,50 +152,49 @@ static void VerifyMemoryChunk(Isolate* isolate,
size_t second_commit_area_size, size_t second_commit_area_size,
Executability executable) { Executability executable) {
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
heap->MaxExecutableSize())); 0));
TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator); {
TestCodeRangeScope test_code_range_scope(isolate, code_range); TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
TestCodeRangeScope test_code_range_scope(isolate, code_range);
size_t header_size = (executable == EXECUTABLE)
? MemoryAllocator::CodePageGuardStartOffset() size_t header_size = (executable == EXECUTABLE)
: MemoryChunk::kObjectStartOffset; ? MemoryAllocator::CodePageGuardStartOffset()
size_t guard_size = (executable == EXECUTABLE) : MemoryChunk::kObjectStartOffset;
? MemoryAllocator::CodePageGuardSize() size_t guard_size =
: 0; (executable == EXECUTABLE) ? MemoryAllocator::CodePageGuardSize() : 0;
MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(reserve_area_size, MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(
commit_area_size, reserve_area_size, commit_area_size, executable, NULL);
executable, size_t alignment = code_range != NULL && code_range->valid()
NULL); ? MemoryChunk::kAlignment
size_t alignment = code_range != NULL && code_range->valid() : base::OS::CommitPageSize();
? MemoryChunk::kAlignment size_t reserved_size =
: base::OS::CommitPageSize(); ((executable == EXECUTABLE))
size_t reserved_size = ? RoundUp(header_size + guard_size + reserve_area_size + guard_size,
((executable == EXECUTABLE)) alignment)
? RoundUp(header_size + guard_size + reserve_area_size + guard_size, : RoundUp(header_size + reserve_area_size,
alignment) base::OS::CommitPageSize());
: RoundUp(header_size + reserve_area_size, CHECK(memory_chunk->size() == reserved_size);
base::OS::CommitPageSize()); CHECK(memory_chunk->area_start() <
CHECK(memory_chunk->size() == reserved_size); memory_chunk->address() + memory_chunk->size());
CHECK(memory_chunk->area_start() < memory_chunk->address() + CHECK(memory_chunk->area_end() <=
memory_chunk->size()); memory_chunk->address() + memory_chunk->size());
CHECK(memory_chunk->area_end() <= memory_chunk->address() + CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size);
memory_chunk->size());
CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size); Address area_start = memory_chunk->area_start();
Address area_start = memory_chunk->area_start(); memory_chunk->CommitArea(second_commit_area_size);
CHECK(area_start == memory_chunk->area_start());
memory_chunk->CommitArea(second_commit_area_size); CHECK(memory_chunk->area_start() <
CHECK(area_start == memory_chunk->area_start()); memory_chunk->address() + memory_chunk->size());
CHECK(memory_chunk->area_start() < memory_chunk->address() + CHECK(memory_chunk->area_end() <=
memory_chunk->size()); memory_chunk->address() + memory_chunk->size());
CHECK(memory_chunk->area_end() <= memory_chunk->address() + CHECK(static_cast<size_t>(memory_chunk->area_size()) ==
memory_chunk->size()); second_commit_area_size);
CHECK(static_cast<size_t>(memory_chunk->area_size()) ==
second_commit_area_size); memory_allocator->Free(memory_chunk);
}
memory_allocator->Free(memory_chunk);
memory_allocator->TearDown(); memory_allocator->TearDown();
delete memory_allocator; delete memory_allocator;
} }
...@@ -207,8 +205,8 @@ TEST(Regress3540) { ...@@ -207,8 +205,8 @@ TEST(Regress3540) {
Heap* heap = isolate->heap(); Heap* heap = isolate->heap();
const int pageSize = Page::kPageSize; const int pageSize = Page::kPageSize;
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK( CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize())); 0));
TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator); TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
CodeRange* code_range = new CodeRange(isolate); CodeRange* code_range = new CodeRange(isolate);
const size_t code_range_size = 4 * pageSize; const size_t code_range_size = 4 * pageSize;
...@@ -310,8 +308,8 @@ TEST(MemoryAllocator) { ...@@ -310,8 +308,8 @@ TEST(MemoryAllocator) {
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator != nullptr); CHECK(memory_allocator != nullptr);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
heap->MaxExecutableSize())); 0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator); TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
{ {
...@@ -358,8 +356,8 @@ TEST(NewSpace) { ...@@ -358,8 +356,8 @@ TEST(NewSpace) {
Isolate* isolate = CcTest::i_isolate(); Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap(); Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
heap->MaxExecutableSize())); 0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator); TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
NewSpace new_space(heap); NewSpace new_space(heap);
...@@ -385,8 +383,8 @@ TEST(OldSpace) { ...@@ -385,8 +383,8 @@ TEST(OldSpace) {
Isolate* isolate = CcTest::i_isolate(); Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap(); Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
heap->MaxExecutableSize())); 0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator); TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
OldSpace* s = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE); OldSpace* s = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE);
...@@ -409,8 +407,8 @@ TEST(CompactionSpace) { ...@@ -409,8 +407,8 @@ TEST(CompactionSpace) {
Heap* heap = isolate->heap(); Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate); MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator != nullptr); CHECK(memory_allocator != nullptr);
CHECK( CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize())); 0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator); TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
CompactionSpace* compaction_space = CompactionSpace* compaction_space =
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment