Commit 2fdadd77 authored by bmeurer@chromium.org's avatar bmeurer@chromium.org

Drop OS::IsOutsideAllocatedSpace() and move the tracking to the MemoryAllocator.

Instead of globally tracking allocated space limits, which was
not implemented properly anyway (i.e. lack of synchronization
on the reading side), track it per MemoryAllocator (that is
per heap/isolate).

In particular, avoid to call IsBadWritePtr() on Windows, it is
obsolete and Microsoft strongly discourages its usage.

R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/23903008

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16542 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 03e8c9d0
......@@ -6204,7 +6204,7 @@ bool Heap::Contains(HeapObject* value) {
bool Heap::Contains(Address addr) {
if (OS::IsOutsideAllocatedSpace(addr)) return false;
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false;
return HasBeenSetUp() &&
(new_space_.ToSpaceContains(addr) ||
old_pointer_space_->Contains(addr) ||
......@@ -6223,7 +6223,7 @@ bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
bool Heap::InSpace(Address addr, AllocationSpace space) {
if (OS::IsOutsideAllocatedSpace(addr)) return false;
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false;
if (!HasBeenSetUp()) return false;
switch (space) {
......
......@@ -52,9 +52,6 @@ namespace v8 {
namespace internal {
static Mutex* limit_mutex = NULL;
const char* OS::LocalTimezone(double time) {
if (std::isnan(time)) return "";
time_t tv = static_cast<time_t>(floor(time/msPerSecond));
......@@ -76,31 +73,6 @@ double OS::LocalTimeOffset() {
}
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
static void* highest_ever_allocated = reinterpret_cast<void*>(0);
static void UpdateAllocatedSpaceLimits(void* address, int size) {
ASSERT(limit_mutex != NULL);
LockGuard<Mutex> lock_guard(limit_mutex);
lowest_ever_allocated = Min(lowest_ever_allocated, address);
highest_ever_allocated =
Max(highest_ever_allocated,
reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
}
bool OS::IsOutsideAllocatedSpace(void* address) {
return address < lowest_ever_allocated || address >= highest_ever_allocated;
}
void* OS::Allocate(const size_t requested,
size_t* allocated,
bool is_executable) {
......@@ -112,7 +84,6 @@ void* OS::Allocate(const size_t requested,
return NULL;
}
*allocated = msize;
UpdateAllocatedSpaceLimits(mbase, msize);
return mbase;
}
......@@ -365,8 +336,6 @@ bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
if (NULL == VirtualAlloc(base, size, MEM_COMMIT, prot)) {
return false;
}
UpdateAllocatedSpaceLimits(base, static_cast<int>(size));
return true;
}
......@@ -406,12 +375,6 @@ void OS::SetUp() {
// call this setup code within the same millisecond.
uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
srandom(static_cast<unsigned int>(seed));
limit_mutex = new Mutex();
}
void OS::TearDown() {
delete limit_mutex;
}
......
......@@ -63,9 +63,6 @@ namespace v8 {
namespace internal {
static Mutex* limit_mutex = NULL;
const char* OS::LocalTimezone(double time) {
if (std::isnan(time)) return "";
time_t tv = static_cast<time_t>(floor(time/msPerSecond));
......@@ -84,31 +81,6 @@ double OS::LocalTimeOffset() {
}
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
static void* highest_ever_allocated = reinterpret_cast<void*>(0);
static void UpdateAllocatedSpaceLimits(void* address, int size) {
ASSERT(limit_mutex != NULL);
LockGuard<Mutex> lock_guard(limit_mutex);
lowest_ever_allocated = Min(lowest_ever_allocated, address);
highest_ever_allocated =
Max(highest_ever_allocated,
reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
}
bool OS::IsOutsideAllocatedSpace(void* address) {
return address < lowest_ever_allocated || address >= highest_ever_allocated;
}
void* OS::Allocate(const size_t requested,
size_t* allocated,
bool executable) {
......@@ -121,7 +93,6 @@ void* OS::Allocate(const size_t requested,
return NULL;
}
*allocated = msize;
UpdateAllocatedSpaceLimits(mbase, msize);
return mbase;
}
......@@ -345,8 +316,6 @@ bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
kMmapFdOffset)) {
return false;
}
UpdateAllocatedSpaceLimits(base, size);
return true;
}
......@@ -380,12 +349,6 @@ void OS::SetUp() {
// call this setup code within the same millisecond.
uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
srandom(static_cast<unsigned int>(seed));
limit_mutex = new Mutex();
}
void OS::TearDown() {
delete limit_mutex;
}
......
......@@ -76,9 +76,6 @@ namespace v8 {
namespace internal {
static Mutex* limit_mutex = NULL;
#ifdef __arm__
bool OS::ArmUsingHardFloat() {
......@@ -140,31 +137,6 @@ double OS::LocalTimeOffset() {
}
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
static void* highest_ever_allocated = reinterpret_cast<void*>(0);
static void UpdateAllocatedSpaceLimits(void* address, int size) {
ASSERT(limit_mutex != NULL);
LockGuard<Mutex> lock_guard(limit_mutex);
lowest_ever_allocated = Min(lowest_ever_allocated, address);
highest_ever_allocated =
Max(highest_ever_allocated,
reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
}
bool OS::IsOutsideAllocatedSpace(void* address) {
return address < lowest_ever_allocated || address >= highest_ever_allocated;
}
void* OS::Allocate(const size_t requested,
size_t* allocated,
bool is_executable) {
......@@ -178,7 +150,6 @@ void* OS::Allocate(const size_t requested,
return NULL;
}
*allocated = msize;
UpdateAllocatedSpaceLimits(mbase, msize);
return mbase;
}
......@@ -472,7 +443,6 @@ bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
return false;
}
UpdateAllocatedSpaceLimits(base, size);
return true;
}
......@@ -501,12 +471,6 @@ void OS::SetUp() {
// Seed the random number generator. We preserve microsecond resolution.
uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis()) ^ (getpid() << 16);
srandom(static_cast<unsigned int>(seed));
limit_mutex = new Mutex();
}
void OS::TearDown() {
delete limit_mutex;
}
......
......@@ -79,34 +79,6 @@ namespace v8 {
namespace internal {
static Mutex* limit_mutex = NULL;
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
static void* highest_ever_allocated = reinterpret_cast<void*>(0);
static void UpdateAllocatedSpaceLimits(void* address, int size) {
ASSERT(limit_mutex != NULL);
LockGuard<Mutex> lock(limit_mutex);
lowest_ever_allocated = Min(lowest_ever_allocated, address);
highest_ever_allocated =
Max(highest_ever_allocated,
reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
}
bool OS::IsOutsideAllocatedSpace(void* address) {
return address < lowest_ever_allocated || address >= highest_ever_allocated;
}
// Constants used for mmap.
// kMmapFd is used to pass vm_alloc flags to tag the region with the user
// defined tag 255 This helps identify V8-allocated regions in memory analysis
......@@ -131,7 +103,6 @@ void* OS::Allocate(const size_t requested,
return NULL;
}
*allocated = msize;
UpdateAllocatedSpaceLimits(mbase, msize);
return mbase;
}
......@@ -366,8 +337,6 @@ bool VirtualMemory::CommitRegion(void* address,
kMmapFdOffset)) {
return false;
}
UpdateAllocatedSpaceLimits(address, size);
return true;
}
......@@ -396,12 +365,6 @@ void OS::SetUp() {
// Seed the random number generator. We preserve microsecond resolution.
uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis()) ^ (getpid() << 16);
srandom(static_cast<unsigned int>(seed));
limit_mutex = new Mutex();
}
void OS::TearDown() {
delete limit_mutex;
}
......
......@@ -61,9 +61,6 @@ namespace v8 {
namespace internal {
static Mutex* limit_mutex = NULL;
const char* OS::LocalTimezone(double time) {
if (std::isnan(time)) return "";
time_t tv = static_cast<time_t>(floor(time/msPerSecond));
......@@ -82,31 +79,6 @@ double OS::LocalTimeOffset() {
}
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
static void* highest_ever_allocated = reinterpret_cast<void*>(0);
static void UpdateAllocatedSpaceLimits(void* address, int size) {
ASSERT(limit_mutex != NULL);
LockGuard<Mutex> lock(limit_mutex);
lowest_ever_allocated = Min(lowest_ever_allocated, address);
highest_ever_allocated =
Max(highest_ever_allocated,
reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
}
bool OS::IsOutsideAllocatedSpace(void* address) {
return address < lowest_ever_allocated || address >= highest_ever_allocated;
}
void* OS::Allocate(const size_t requested,
size_t* allocated,
bool is_executable) {
......@@ -120,7 +92,6 @@ void* OS::Allocate(const size_t requested,
return NULL;
}
*allocated = msize;
UpdateAllocatedSpaceLimits(mbase, msize);
return mbase;
}
......@@ -402,8 +373,6 @@ bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
kMmapFdOffset)) {
return false;
}
UpdateAllocatedSpaceLimits(base, size);
return true;
}
......@@ -433,12 +402,6 @@ void OS::SetUp() {
// Seed the random number generator. We preserve microsecond resolution.
uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis()) ^ (getpid() << 16);
srandom(static_cast<unsigned int>(seed));
limit_mutex = new Mutex();
}
void OS::TearDown() {
delete limit_mutex;
}
......
......@@ -81,9 +81,6 @@ namespace v8 {
namespace internal {
static Mutex* limit_mutex = NULL;
const char* OS::LocalTimezone(double time) {
if (std::isnan(time)) return "";
time_t tv = static_cast<time_t>(floor(time/msPerSecond));
......@@ -99,31 +96,6 @@ double OS::LocalTimeOffset() {
}
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
static void* highest_ever_allocated = reinterpret_cast<void*>(0);
static void UpdateAllocatedSpaceLimits(void* address, int size) {
ASSERT(limit_mutex != NULL);
LockGuard<Mutex> lock_guard(limit_mutex);
lowest_ever_allocated = Min(lowest_ever_allocated, address);
highest_ever_allocated =
Max(highest_ever_allocated,
reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
}
bool OS::IsOutsideAllocatedSpace(void* address) {
return address < lowest_ever_allocated || address >= highest_ever_allocated;
}
void* OS::Allocate(const size_t requested,
size_t* allocated,
bool is_executable) {
......@@ -136,7 +108,6 @@ void* OS::Allocate(const size_t requested,
return NULL;
}
*allocated = msize;
UpdateAllocatedSpaceLimits(mbase, msize);
return mbase;
}
......@@ -366,8 +337,6 @@ bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
kMmapFdOffset)) {
return false;
}
UpdateAllocatedSpaceLimits(base, size);
return true;
}
......@@ -401,12 +370,6 @@ void OS::SetUp() {
// call this setup code within the same millisecond.
uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
srandom(static_cast<unsigned int>(seed));
limit_mutex = new Mutex();
}
void OS::TearDown() {
delete limit_mutex;
}
......
......@@ -144,8 +144,6 @@ double ceiling(double x) {
}
static Mutex* limit_mutex = NULL;
#if V8_TARGET_ARCH_IA32
static void MemMoveWrapper(void* dest, const void* src, size_t size) {
memmove(dest, src, size);
......@@ -750,35 +748,6 @@ void OS::StrNCpy(Vector<char> dest, const char* src, size_t n) {
#undef _TRUNCATE
#undef STRUNCATE
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
static void* highest_ever_allocated = reinterpret_cast<void*>(0);
static void UpdateAllocatedSpaceLimits(void* address, int size) {
ASSERT(limit_mutex != NULL);
LockGuard<Mutex> lock_guard(limit_mutex);
lowest_ever_allocated = Min(lowest_ever_allocated, address);
highest_ever_allocated =
Max(highest_ever_allocated,
reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
}
bool OS::IsOutsideAllocatedSpace(void* pointer) {
if (pointer < lowest_ever_allocated || pointer >= highest_ever_allocated)
return true;
// Ask the Windows API
if (IsBadWritePtr(pointer, 1))
return true;
return false;
}
// Get the system's page size used by VirtualAlloc() or the next power
// of two. The reason for always returning a power of two is that the
......@@ -872,7 +841,6 @@ void* OS::Allocate(const size_t requested,
ASSERT(IsAligned(reinterpret_cast<size_t>(mbase), OS::AllocateAlignment()));
*allocated = msize;
UpdateAllocatedSpaceLimits(mbase, static_cast<int>(msize));
return mbase;
}
......@@ -1490,8 +1458,6 @@ bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
if (NULL == VirtualAlloc(base, size, MEM_COMMIT, prot)) {
return false;
}
UpdateAllocatedSpaceLimits(base, static_cast<int>(size));
return true;
}
......@@ -1623,13 +1589,6 @@ void OS::SetUp() {
// call this setup code within the same millisecond.
uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
srand(static_cast<unsigned int>(seed));
limit_mutex = new Mutex();
}
void OS::TearDown() {
delete limit_mutex;
}
} } // namespace v8::internal
......@@ -178,9 +178,6 @@ class OS {
// called after CPU initialization.
static void PostSetUp();
// Clean up platform-OS-related things. Called once at VM shutdown.
static void TearDown();
// Returns the accumulated user time for thread. This routine
// can be used for profiling. The implementation should
// strive for high-precision timer resolution, preferable
......@@ -254,13 +251,6 @@ class OS {
// Get the Alignment guaranteed by Allocate().
static size_t AllocateAlignment();
// Returns an indication of whether a pointer is in a space that
// has been allocated by Allocate(). This method may conservatively
// always return false, but giving more accurate information may
// improve the robustness of the stack dump code in the presence of
// heap corruption.
static bool IsOutsideAllocatedSpace(void* pointer);
// Sleep for a number of milliseconds.
static void Sleep(const int milliseconds);
......
......@@ -228,10 +228,10 @@ Address CodeRange::AllocateRawMemory(const size_t requested_size,
}
ASSERT(*allocated <= current.size);
ASSERT(IsAddressAligned(current.start, MemoryChunk::kAlignment));
if (!MemoryAllocator::CommitExecutableMemory(code_range_,
current.start,
commit_size,
*allocated)) {
if (!isolate_->memory_allocator()->CommitExecutableMemory(code_range_,
current.start,
commit_size,
*allocated)) {
*allocated = 0;
return NULL;
}
......@@ -245,7 +245,7 @@ Address CodeRange::AllocateRawMemory(const size_t requested_size,
bool CodeRange::CommitRawMemory(Address start, size_t length) {
return code_range_->Commit(start, length, true);
return isolate_->memory_allocator()->CommitMemory(start, length, EXECUTABLE);
}
......@@ -278,7 +278,9 @@ MemoryAllocator::MemoryAllocator(Isolate* isolate)
capacity_(0),
capacity_executable_(0),
size_(0),
size_executable_(0) {
size_executable_(0),
lowest_ever_allocated_(reinterpret_cast<void*>(-1)),
highest_ever_allocated_(reinterpret_cast<void*>(0)) {
}
......@@ -304,6 +306,17 @@ void MemoryAllocator::TearDown() {
}
bool MemoryAllocator::CommitMemory(Address base,
size_t size,
Executability executable) {
if (!VirtualMemory::CommitRegion(base, size, executable == EXECUTABLE)) {
return false;
}
UpdateAllocatedSpaceLimits(base, base + size);
return true;
}
void MemoryAllocator::FreeMemory(VirtualMemory* reservation,
Executability executable) {
// TODO(gc) make code_range part of memory allocator?
......@@ -383,7 +396,9 @@ Address MemoryAllocator::AllocateAlignedMemory(size_t reserve_size,
base = NULL;
}
} else {
if (!reservation.Commit(base, commit_size, false)) {
if (reservation.Commit(base, commit_size, false)) {
UpdateAllocatedSpaceLimits(base, base + commit_size);
} else {
base = NULL;
}
}
......@@ -509,7 +524,10 @@ bool MemoryChunk::CommitArea(size_t requested) {
Address start = address() + committed_size + guard_size;
size_t length = commit_size - committed_size;
if (reservation_.IsReserved()) {
if (!reservation_.Commit(start, length, IsFlagSet(IS_EXECUTABLE))) {
Executability executable = IsFlagSet(IS_EXECUTABLE)
? EXECUTABLE : NOT_EXECUTABLE;
if (!heap()->isolate()->memory_allocator()->CommitMemory(
start, length, executable)) {
return false;
}
} else {
......@@ -763,7 +781,7 @@ void MemoryAllocator::Free(MemoryChunk* chunk) {
bool MemoryAllocator::CommitBlock(Address start,
size_t size,
Executability executable) {
if (!VirtualMemory::CommitRegion(start, size, executable)) return false;
if (!CommitMemory(start, size, executable)) return false;
if (Heap::ShouldZapGarbage()) {
ZapBlock(start, size);
......@@ -899,6 +917,9 @@ bool MemoryAllocator::CommitExecutableMemory(VirtualMemory* vm,
return false;
}
UpdateAllocatedSpaceLimits(start,
start + CodePageAreaStartOffset() +
commit_size - CodePageGuardStartOffset());
return true;
}
......
......@@ -1083,6 +1083,13 @@ class MemoryAllocator {
return (Available() / Page::kPageSize) * Page::kMaxNonCodeHeapObjectSize;
}
// Returns an indication of whether a pointer is in a space that has
// been allocated by this MemoryAllocator.
V8_INLINE(bool IsOutsideAllocatedSpace(const void* address)) const {
return address < lowest_ever_allocated_ ||
address >= highest_ever_allocated_;
}
#ifdef DEBUG
// Reports statistic info of the space.
void ReportStatistics();
......@@ -1105,6 +1112,8 @@ class MemoryAllocator {
Executability executable,
VirtualMemory* controller);
bool CommitMemory(Address addr, size_t size, Executability executable);
void FreeMemory(VirtualMemory* reservation, Executability executable);
void FreeMemory(Address addr, size_t size, Executability executable);
......@@ -1150,10 +1159,10 @@ class MemoryAllocator {
return CodePageAreaEndOffset() - CodePageAreaStartOffset();
}
MUST_USE_RESULT static bool CommitExecutableMemory(VirtualMemory* vm,
Address start,
size_t commit_size,
size_t reserved_size);
MUST_USE_RESULT bool CommitExecutableMemory(VirtualMemory* vm,
Address start,
size_t commit_size,
size_t reserved_size);
private:
Isolate* isolate_;
......@@ -1168,6 +1177,14 @@ class MemoryAllocator {
// Allocated executable space size in bytes.
size_t size_executable_;
// We keep the lowest and highest addresses allocated as a quick way
// of determining that pointers are outside the heap. The estimate is
// conservative, i.e. not all addrsses in 'allocated' space are allocated
// to our heap. The range is [lowest, highest[, inclusive on the low end
// and exclusive on the high end.
void* lowest_ever_allocated_;
void* highest_ever_allocated_;
struct MemoryAllocationCallbackRegistration {
MemoryAllocationCallbackRegistration(MemoryAllocationCallback callback,
ObjectSpace space,
......@@ -1190,6 +1207,11 @@ class MemoryAllocator {
Page* InitializePagesInChunk(int chunk_id, int pages_in_chunk,
PagedSpace* owner);
void UpdateAllocatedSpaceLimits(void* low, void* high) {
lowest_ever_allocated_ = Min(lowest_ever_allocated_, low);
highest_ever_allocated_ = Max(highest_ever_allocated_, high);
}
DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryAllocator);
};
......
......@@ -112,7 +112,6 @@ void V8::TearDown() {
call_completed_callbacks_ = NULL;
Sampler::TearDown();
OS::TearDown();
}
......
......@@ -186,10 +186,9 @@ class Block {
TEST(CodeRange) {
const int code_range_size = 32*MB;
OS::SetUp();
Isolate::Current()->InitializeLoggingAndCounters();
CodeRange* code_range = new CodeRange(Isolate::Current());
code_range->SetUp(code_range_size);
CcTest::InitializeVM();
CodeRange code_range(reinterpret_cast<Isolate*>(CcTest::isolate()));
code_range.SetUp(code_range_size);
int current_allocated = 0;
int total_allocated = 0;
List<Block> blocks(1000);
......@@ -205,9 +204,9 @@ TEST(CodeRange) {
(Page::kMaxNonCodeHeapObjectSize << (Pseudorandom() % 3)) +
Pseudorandom() % 5000 + 1;
size_t allocated = 0;
Address base = code_range->AllocateRawMemory(requested,
requested,
&allocated);
Address base = code_range.AllocateRawMemory(requested,
requested,
&allocated);
CHECK(base != NULL);
blocks.Add(Block(base, static_cast<int>(allocated)));
current_allocated += static_cast<int>(allocated);
......@@ -215,7 +214,7 @@ TEST(CodeRange) {
} else {
// Free a block.
int index = Pseudorandom() % blocks.length();
code_range->FreeRawMemory(blocks[index].base, blocks[index].size);
code_range.FreeRawMemory(blocks[index].base, blocks[index].size);
current_allocated -= blocks[index].size;
if (index < blocks.length() - 1) {
blocks[index] = blocks.RemoveLast();
......@@ -225,6 +224,5 @@ TEST(CodeRange) {
}
}
code_range->TearDown();
delete code_range;
code_range.TearDown();
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment