Commit 6a5e24b2 authored by mlippautz's avatar mlippautz Committed by Commit bot

Move MemoryAllocator and CodeRange into Heap

- MemoryAllocator is now part of Heap
- CodeRange is now part of MemoryAllocator

BUG=chromium:581076
LOG=N

Review URL: https://codereview.chromium.org/1862653002

Cr-Commit-Position: refs/heads/master@{#35294}
parent 08454486
......@@ -7189,7 +7189,7 @@ void Isolate::AddMemoryAllocationCallback(MemoryAllocationCallback callback,
ObjectSpace space,
AllocationAction action) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->memory_allocator()->AddMemoryAllocationCallback(
isolate->heap()->memory_allocator()->AddMemoryAllocationCallback(
callback, space, action);
}
......@@ -7197,8 +7197,7 @@ void Isolate::AddMemoryAllocationCallback(MemoryAllocationCallback callback,
void Isolate::RemoveMemoryAllocationCallback(
MemoryAllocationCallback callback) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->memory_allocator()->RemoveMemoryAllocationCallback(
callback);
isolate->heap()->memory_allocator()->RemoveMemoryAllocationCallback(callback);
}
......@@ -7693,9 +7692,10 @@ void Isolate::SetStackLimit(uintptr_t stack_limit) {
void Isolate::GetCodeRange(void** start, size_t* length_in_bytes) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
if (isolate->code_range()->valid()) {
*start = isolate->code_range()->start();
*length_in_bytes = isolate->code_range()->size();
if (isolate->heap()->memory_allocator()->code_range()->valid()) {
*start = isolate->heap()->memory_allocator()->code_range()->start();
*length_in_bytes =
isolate->heap()->memory_allocator()->code_range()->size();
} else {
*start = NULL;
*length_in_bytes = 0;
......
......@@ -116,7 +116,7 @@ void StatisticsExtension::GetCounters(
};
const StatisticNumber numbers[] = {
{isolate->memory_allocator()->Size(), "total_committed_bytes"},
{heap->memory_allocator()->Size(), "total_committed_bytes"},
{heap->new_space()->Size(), "new_space_live_bytes"},
{heap->new_space()->Available(), "new_space_available_bytes"},
{heap->new_space()->CommittedMemory(), "new_space_commited_bytes"},
......
......@@ -1401,8 +1401,10 @@ Handle<Code> Factory::NewCode(const CodeDesc& desc,
int obj_size = Code::SizeFor(body_size);
Handle<Code> code = NewCodeRaw(obj_size, immovable);
DCHECK(isolate()->code_range() == NULL || !isolate()->code_range()->valid() ||
isolate()->code_range()->contains(code->address()) ||
DCHECK(isolate()->heap()->memory_allocator()->code_range() == NULL ||
!isolate()->heap()->memory_allocator()->code_range()->valid() ||
isolate()->heap()->memory_allocator()->code_range()->contains(
code->address()) ||
obj_size <= isolate()->heap()->code_space()->AreaSize());
// The code object has not been fully initialized yet. We rely on the
......
......@@ -164,7 +164,7 @@ void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
current_.reduce_memory = heap_->ShouldReduceMemory();
current_.start_time = start_time;
current_.start_object_size = heap_->SizeOfObjects();
current_.start_memory_size = heap_->isolate()->memory_allocator()->Size();
current_.start_memory_size = heap_->memory_allocator()->Size();
current_.start_holes_size = CountTotalHolesSize(heap_);
current_.new_space_object_size =
heap_->new_space()->top() - heap_->new_space()->bottom();
......@@ -214,7 +214,7 @@ void GCTracer::Stop(GarbageCollector collector) {
current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
current_.end_object_size = heap_->SizeOfObjects();
current_.end_memory_size = heap_->isolate()->memory_allocator()->Size();
current_.end_memory_size = heap_->memory_allocator()->Size();
current_.end_holes_size = CountTotalHolesSize(heap_);
current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize();
......
......@@ -71,7 +71,7 @@ class IdleScavengeObserver : public AllocationObserver {
Heap::Heap()
: amount_of_external_allocated_memory_(0),
amount_of_external_allocated_memory_at_last_global_gc_(0),
isolate_(NULL),
isolate_(nullptr),
code_range_size_(0),
// semispace_size_ should be a power of 2 and old_generation_size_ should
// be a multiple of Page::kPageSize.
......@@ -136,6 +136,7 @@ Heap::Heap()
last_gc_time_(0.0),
scavenge_collector_(nullptr),
mark_compact_collector_(nullptr),
memory_allocator_(nullptr),
store_buffer_(this),
incremental_marking_(nullptr),
gc_idle_time_handler_(nullptr),
......@@ -225,7 +226,7 @@ size_t Heap::CommittedPhysicalMemory() {
intptr_t Heap::CommittedMemoryExecutable() {
if (!HasBeenSetUp()) return 0;
return isolate()->memory_allocator()->SizeExecutable();
return memory_allocator()->SizeExecutable();
}
......@@ -296,7 +297,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
// and does not count available bytes already in the old space or code
// space. Undercounting is safe---we may get an unrequested full GC when
// a scavenge would have succeeded.
if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
if (memory_allocator()->MaxAvailable() <= new_space_.Size()) {
isolate_->counters()
->gc_compactor_caused_by_oldspace_exhaustion()
->Increment();
......@@ -339,8 +340,8 @@ void Heap::PrintShortHeapStatistics() {
PrintIsolate(isolate_, "Memory allocator, used: %6" V8_PTR_PREFIX
"d KB"
", available: %6" V8_PTR_PREFIX "d KB\n",
isolate_->memory_allocator()->Size() / KB,
isolate_->memory_allocator()->Available() / KB);
memory_allocator()->Size() / KB,
memory_allocator()->Available() / KB);
PrintIsolate(isolate_, "New space, used: %6" V8_PTR_PREFIX
"d KB"
", available: %6" V8_PTR_PREFIX
......@@ -3341,8 +3342,9 @@ AllocationResult Heap::AllocateCode(int object_size, bool immovable) {
result->set_map_no_write_barrier(code_map());
Code* code = Code::cast(result);
DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment));
DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() ||
isolate_->code_range()->contains(code->address()) ||
DCHECK(memory_allocator()->code_range() == NULL ||
!memory_allocator()->code_range()->valid() ||
memory_allocator()->code_range()->contains(code->address()) ||
object_size <= code_space()->AreaSize());
code->set_gc_metadata(Smi::FromInt(0));
code->set_ic_age(global_ic_age_);
......@@ -3367,8 +3369,9 @@ AllocationResult Heap::CopyCode(Code* code) {
// Relocate the copy.
DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() ||
isolate_->code_range()->contains(code->address()) ||
DCHECK(memory_allocator()->code_range() == NULL ||
!memory_allocator()->code_range()->valid() ||
memory_allocator()->code_range()->contains(code->address()) ||
obj_size <= code_space()->AreaSize());
new_code->Relocate(new_addr - old_addr);
// We have to iterate over the object and process its pointers when black
......@@ -3436,8 +3439,9 @@ AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
// Relocate the copy.
DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() ||
isolate_->code_range()->contains(code->address()) ||
DCHECK(memory_allocator()->code_range() == NULL ||
!memory_allocator()->code_range()->valid() ||
memory_allocator()->code_range()->contains(code->address()) ||
new_obj_size <= code_space()->AreaSize());
new_code->Relocate(new_addr - old_addr);
......@@ -4482,7 +4486,7 @@ void Heap::ReportHeapStatistics(const char* title) {
PrintF("\n");
PrintF("Heap statistics : ");
isolate_->memory_allocator()->ReportStatistics();
memory_allocator()->ReportStatistics();
PrintF("To space : ");
new_space_.ReportStatistics();
PrintF("Old space : ");
......@@ -4499,7 +4503,7 @@ void Heap::ReportHeapStatistics(const char* title) {
#endif // DEBUG
bool Heap::Contains(HeapObject* value) {
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
return false;
}
return HasBeenSetUp() &&
......@@ -4509,7 +4513,7 @@ bool Heap::Contains(HeapObject* value) {
}
bool Heap::ContainsSlow(Address addr) {
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) {
if (memory_allocator()->IsOutsideAllocatedSpace(addr)) {
return false;
}
return HasBeenSetUp() &&
......@@ -4519,7 +4523,7 @@ bool Heap::ContainsSlow(Address addr) {
}
bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
return false;
}
if (!HasBeenSetUp()) return false;
......@@ -4541,7 +4545,7 @@ bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
}
bool Heap::InSpaceSlow(Address addr, AllocationSpace space) {
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) {
if (memory_allocator()->IsOutsideAllocatedSpace(addr)) {
return false;
}
if (!HasBeenSetUp()) return false;
......@@ -4982,12 +4986,11 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->map_space_capacity = map_space_->Capacity();
*stats->lo_space_size = lo_space_->Size();
isolate_->global_handles()->RecordStats(stats);
*stats->memory_allocator_size = isolate()->memory_allocator()->Size();
*stats->memory_allocator_size = memory_allocator()->Size();
*stats->memory_allocator_capacity =
isolate()->memory_allocator()->Size() +
isolate()->memory_allocator()->Available();
memory_allocator()->Size() + memory_allocator()->Available();
*stats->os_error = base::OS::GetLastError();
isolate()->memory_allocator()->Available();
memory_allocator()->Available();
if (take_snapshot) {
HeapIterator iterator(this);
for (HeapObject* obj = iterator.next(); obj != NULL;
......@@ -5222,7 +5225,9 @@ bool Heap::SetUp() {
base::CallOnce(&initialize_gc_once, &InitializeGCOnce);
// Set up memory allocator.
if (!isolate_->memory_allocator()->SetUp(MaxReserved(), MaxExecutableSize()))
memory_allocator_ = new MemoryAllocator(isolate_);
if (!memory_allocator_->SetUp(MaxReserved(), MaxExecutableSize(),
code_range_size_))
return false;
// Initialize incremental marking.
......@@ -5239,8 +5244,6 @@ bool Heap::SetUp() {
if (old_space_ == NULL) return false;
if (!old_space_->SetUp()) return false;
if (!isolate_->code_range()->SetUp(code_range_size_)) return false;
// Initialize the code space, set its maximum capacity to the old
// generation size. It needs executable memory.
code_space_ = new OldSpace(this, CODE_SPACE, EXECUTABLE);
......@@ -5480,7 +5483,7 @@ void Heap::TearDown() {
store_buffer()->TearDown();
isolate_->memory_allocator()->TearDown();
memory_allocator()->TearDown();
StrongRootsList* next = NULL;
for (StrongRootsList* list = strong_roots_list_; list; list = next) {
......@@ -5488,6 +5491,9 @@ void Heap::TearDown() {
delete list;
}
strong_roots_list_ = NULL;
delete memory_allocator_;
memory_allocator_ = nullptr;
}
......@@ -6287,7 +6293,7 @@ void Heap::WaitUntilUnmappingOfFreeChunksCompleted() {
void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) {
// PreFree logically frees the memory chunk. However, the actual freeing
// will happen on a separate thread sometime later.
isolate_->memory_allocator()->PreFreeMemory(chunk);
memory_allocator()->PreFreeMemory(chunk);
// The chunks added to this queue will be freed by a concurrent thread.
chunk->set_next_chunk(chunks_queued_for_free_);
......@@ -6320,7 +6326,7 @@ void Heap::FreeQueuedChunks(MemoryChunk* list_head) {
MemoryChunk* chunk;
for (chunk = list_head; chunk != NULL; chunk = next) {
next = chunk->next_chunk();
isolate_->memory_allocator()->PerformFreeMemory(chunk);
memory_allocator()->PerformFreeMemory(chunk);
}
}
......
......@@ -927,6 +927,8 @@ class Heap {
GCTracer* tracer() { return tracer_; }
MemoryAllocator* memory_allocator() { return memory_allocator_; }
EmbedderHeapTracer* embedder_heap_tracer() { return embedder_heap_tracer_; }
PromotionQueue* promotion_queue() { return &promotion_queue_; }
......@@ -2149,6 +2151,8 @@ class Heap {
MarkCompactCollector* mark_compact_collector_;
MemoryAllocator* memory_allocator_;
StoreBuffer store_buffer_;
IncrementalMarking* incremental_marking_;
......@@ -2252,6 +2256,7 @@ class Heap {
friend class Page;
friend class Scavenger;
friend class StoreBuffer;
friend class TestMemoryAllocatorScope;
// The allocator interface.
friend class Factory;
......
......@@ -680,7 +680,7 @@ LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk,
intptr_t LargeObjectSpace::Available() {
return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
return ObjectSizeFor(heap()->memory_allocator()->Available());
}
......
This diff is collapsed.
......@@ -1257,7 +1257,8 @@ class MemoryAllocator {
// Initializes its internal bookkeeping structures.
// Max capacity of the total space and executable memory limit.
bool SetUp(intptr_t max_capacity, intptr_t capacity_executable);
bool SetUp(intptr_t max_capacity, intptr_t capacity_executable,
intptr_t code_range_size);
void TearDown();
......@@ -1383,6 +1384,8 @@ class MemoryAllocator {
Address start, size_t commit_size,
size_t reserved_size);
CodeRange* code_range() { return code_range_; }
private:
// See AllocatePage for public interface. Note that currently we only support
// pools for NOT_EXECUTABLE pages of size MemoryChunk::kPageSize.
......@@ -1394,6 +1397,8 @@ class MemoryAllocator {
Isolate* isolate_;
CodeRange* code_range_;
// Maximum space size in bytes.
intptr_t capacity_;
// Maximum subset of capacity_ that can be executable
......@@ -1447,6 +1452,8 @@ class MemoryAllocator {
List<MemoryChunk*> chunk_pool_;
friend class TestCodeRangeScope;
DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryAllocator);
};
......
......@@ -1798,7 +1798,6 @@ Isolate::Isolate(bool enable_serializer)
runtime_profiler_(NULL),
compilation_cache_(NULL),
counters_(NULL),
code_range_(NULL),
logger_(NULL),
stats_table_(NULL),
stub_cache_(NULL),
......@@ -1809,7 +1808,6 @@ Isolate::Isolate(bool enable_serializer)
capture_stack_trace_for_uncaught_exceptions_(false),
stack_trace_for_uncaught_exceptions_frame_limit_(0),
stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview),
memory_allocator_(NULL),
keyed_lookup_cache_(NULL),
context_slot_cache_(NULL),
descriptor_lookup_cache_(NULL),
......@@ -2067,10 +2065,6 @@ Isolate::~Isolate() {
delete thread_manager_;
thread_manager_ = NULL;
delete memory_allocator_;
memory_allocator_ = NULL;
delete code_range_;
code_range_ = NULL;
delete global_handles_;
global_handles_ = NULL;
delete eternal_handles_;
......@@ -2164,9 +2158,6 @@ bool Isolate::Init(Deserializer* des) {
// The initialization process does not handle memory exhaustion.
AlwaysAllocateScope always_allocate(this);
memory_allocator_ = new MemoryAllocator(this);
code_range_ = new CodeRange(this);
// Safe after setting Heap::isolate_, and initializing StackGuard
heap_.SetStackLimits();
......@@ -2225,7 +2216,7 @@ bool Isolate::Init(Deserializer* des) {
return false;
}
deoptimizer_data_ = new DeoptimizerData(memory_allocator_);
deoptimizer_data_ = new DeoptimizerData(heap()->memory_allocator());
const bool create_heap_objects = (des == NULL);
if (create_heap_objects && !heap_.CreateHeapObjects()) {
......
......@@ -817,7 +817,6 @@ class Isolate {
DCHECK(counters_ != NULL);
return counters_;
}
CodeRange* code_range() { return code_range_; }
RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
CompilationCache* compilation_cache() { return compilation_cache_; }
Logger* logger() {
......@@ -841,10 +840,6 @@ class Isolate {
return materialized_object_store_;
}
MemoryAllocator* memory_allocator() {
return memory_allocator_;
}
KeyedLookupCache* keyed_lookup_cache() {
return keyed_lookup_cache_;
}
......@@ -1240,7 +1235,6 @@ class Isolate {
RuntimeProfiler* runtime_profiler_;
CompilationCache* compilation_cache_;
Counters* counters_;
CodeRange* code_range_;
base::RecursiveMutex break_access_;
Logger* logger_;
StackGuard stack_guard_;
......@@ -1254,7 +1248,6 @@ class Isolate {
bool capture_stack_trace_for_uncaught_exceptions_;
int stack_trace_for_uncaught_exceptions_frame_limit_;
StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
MemoryAllocator* memory_allocator_;
KeyedLookupCache* keyed_lookup_cache_;
ContextSlotCache* context_slot_cache_;
DescriptorLookupCache* descriptor_lookup_cache_;
......@@ -1381,8 +1374,6 @@ class Isolate {
friend class Simulator;
friend class StackGuard;
friend class ThreadId;
friend class TestMemoryAllocatorScope;
friend class TestCodeRangeScope;
friend class v8::Isolate;
friend class v8::Locker;
friend class v8::Unlocker;
......
......@@ -6762,7 +6762,7 @@ bool RegExpEngine::TooMuchRegExpCode(Handle<String> pattern) {
Heap* heap = pattern->GetHeap();
bool too_much = pattern->length() > RegExpImpl::kRegExpTooLargeToOptimize;
if (heap->total_regexp_code_generated() > RegExpImpl::kRegExpCompiledLimit &&
heap->isolate()->memory_allocator()->SizeExecutable() >
heap->memory_allocator()->SizeExecutable() >
RegExpImpl::kRegExpExecutableMemoryLimit) {
too_much = true;
}
......
......@@ -78,7 +78,8 @@ void Assembler::emit_code_target(Handle<Code> target,
void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
DCHECK(RelocInfo::IsRuntimeEntry(rmode));
RecordRelocInfo(rmode);
emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start()));
emitl(static_cast<uint32_t>(
entry - isolate()->heap()->memory_allocator()->code_range()->start()));
}
......@@ -299,7 +300,8 @@ Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
Address Assembler::runtime_entry_at(Address pc) {
return Memory::int32_at(pc) + isolate()->code_range()->start();
return Memory::int32_at(pc) +
isolate()->heap()->memory_allocator()->code_range()->start();
}
// -----------------------------------------------------------------------------
......
......@@ -107,13 +107,12 @@ TEST(Page) {
class TestMemoryAllocatorScope {
public:
TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
: isolate_(isolate),
old_allocator_(isolate->memory_allocator_) {
isolate->memory_allocator_ = allocator;
: isolate_(isolate), old_allocator_(isolate->heap()->memory_allocator()) {
isolate->heap()->memory_allocator_ = allocator;
}
~TestMemoryAllocatorScope() {
isolate_->memory_allocator_ = old_allocator_;
isolate_->heap()->memory_allocator_ = old_allocator_;
}
private:
......@@ -129,12 +128,12 @@ class TestCodeRangeScope {
public:
TestCodeRangeScope(Isolate* isolate, CodeRange* code_range)
: isolate_(isolate),
old_code_range_(isolate->code_range_) {
isolate->code_range_ = code_range;
old_code_range_(isolate->heap()->memory_allocator()->code_range()) {
isolate->heap()->memory_allocator()->code_range_ = code_range;
}
~TestCodeRangeScope() {
isolate_->code_range_ = old_code_range_;
isolate_->heap()->memory_allocator()->code_range_ = old_code_range_;
}
private:
......@@ -153,50 +152,49 @@ static void VerifyMemoryChunk(Isolate* isolate,
size_t second_commit_area_size,
Executability executable) {
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(),
heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
TestCodeRangeScope test_code_range_scope(isolate, code_range);
size_t header_size = (executable == EXECUTABLE)
? MemoryAllocator::CodePageGuardStartOffset()
: MemoryChunk::kObjectStartOffset;
size_t guard_size = (executable == EXECUTABLE)
? MemoryAllocator::CodePageGuardSize()
: 0;
MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(reserve_area_size,
commit_area_size,
executable,
NULL);
size_t alignment = code_range != NULL && code_range->valid()
? MemoryChunk::kAlignment
: base::OS::CommitPageSize();
size_t reserved_size =
((executable == EXECUTABLE))
? RoundUp(header_size + guard_size + reserve_area_size + guard_size,
alignment)
: RoundUp(header_size + reserve_area_size,
base::OS::CommitPageSize());
CHECK(memory_chunk->size() == reserved_size);
CHECK(memory_chunk->area_start() < memory_chunk->address() +
memory_chunk->size());
CHECK(memory_chunk->area_end() <= memory_chunk->address() +
memory_chunk->size());
CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size);
Address area_start = memory_chunk->area_start();
memory_chunk->CommitArea(second_commit_area_size);
CHECK(area_start == memory_chunk->area_start());
CHECK(memory_chunk->area_start() < memory_chunk->address() +
memory_chunk->size());
CHECK(memory_chunk->area_end() <= memory_chunk->address() +
memory_chunk->size());
CHECK(static_cast<size_t>(memory_chunk->area_size()) ==
second_commit_area_size);
memory_allocator->Free(memory_chunk);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
{
TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
TestCodeRangeScope test_code_range_scope(isolate, code_range);
size_t header_size = (executable == EXECUTABLE)
? MemoryAllocator::CodePageGuardStartOffset()
: MemoryChunk::kObjectStartOffset;
size_t guard_size =
(executable == EXECUTABLE) ? MemoryAllocator::CodePageGuardSize() : 0;
MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(
reserve_area_size, commit_area_size, executable, NULL);
size_t alignment = code_range != NULL && code_range->valid()
? MemoryChunk::kAlignment
: base::OS::CommitPageSize();
size_t reserved_size =
((executable == EXECUTABLE))
? RoundUp(header_size + guard_size + reserve_area_size + guard_size,
alignment)
: RoundUp(header_size + reserve_area_size,
base::OS::CommitPageSize());
CHECK(memory_chunk->size() == reserved_size);
CHECK(memory_chunk->area_start() <
memory_chunk->address() + memory_chunk->size());
CHECK(memory_chunk->area_end() <=
memory_chunk->address() + memory_chunk->size());
CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size);
Address area_start = memory_chunk->area_start();
memory_chunk->CommitArea(second_commit_area_size);
CHECK(area_start == memory_chunk->area_start());
CHECK(memory_chunk->area_start() <
memory_chunk->address() + memory_chunk->size());
CHECK(memory_chunk->area_end() <=
memory_chunk->address() + memory_chunk->size());
CHECK(static_cast<size_t>(memory_chunk->area_size()) ==
second_commit_area_size);
memory_allocator->Free(memory_chunk);
}
memory_allocator->TearDown();
delete memory_allocator;
}
......@@ -207,8 +205,8 @@ TEST(Regress3540) {
Heap* heap = isolate->heap();
const int pageSize = Page::kPageSize;
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(
memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize()));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
CodeRange* code_range = new CodeRange(isolate);
const size_t code_range_size = 4 * pageSize;
......@@ -310,8 +308,8 @@ TEST(MemoryAllocator) {
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator != nullptr);
CHECK(memory_allocator->SetUp(heap->MaxReserved(),
heap->MaxExecutableSize()));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
{
......@@ -358,8 +356,8 @@ TEST(NewSpace) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(),
heap->MaxExecutableSize()));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
NewSpace new_space(heap);
......@@ -385,8 +383,8 @@ TEST(OldSpace) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(),
heap->MaxExecutableSize()));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
OldSpace* s = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE);
......@@ -409,8 +407,8 @@ TEST(CompactionSpace) {
Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator != nullptr);
CHECK(
memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize()));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
CompactionSpace* compaction_space =
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment