Commit 388c1094 authored by erik.corry@gmail.com's avatar erik.corry@gmail.com

* Split up code_space into old_data_space and code_space.

* Make old_data_space non-executable on OSs and hardware that support it.
* Rename old_space to old_pointer_space (can contain pointers, esp. to new space).
* Ensure that individual pages allocated for old_space are only executable when
they are for code objects.
* Ensure Space::Setup can cope with non-aligned memory.
* Make some methods on Spaces virtual.  Make a way to iterate over all spaces.
* Replace executability flag with Executability enum in order to make intent at
call site clearer.
* Fix serialization/deserialization to allocate write barrier memory for large
arrays.



git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@165 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 1472ee59
......@@ -656,7 +656,7 @@ v8::Handle<v8::FunctionTemplate> GCExtension::GetNativeFunction(
v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) {
// All allocation spaces other than NEW_SPACE have the same effect.
Heap::CollectGarbage(0, OLD_SPACE);
Heap::CollectGarbage(0, OLD_DATA_SPACE);
return v8::Undefined();
}
......
......@@ -49,7 +49,7 @@ Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors) {
}
// Symbols are created in the old generation (code space).
// Symbols are created in the old generation (data space).
Handle<String> Factory::LookupSymbol(Vector<const char> string) {
CALL_HEAP_FUNCTION(Heap::LookupSymbol(string), String);
}
......
......@@ -214,17 +214,19 @@ typedef bool (*WeakSlotCallback)(Object** pointer);
// NOTE: SpaceIterator depends on AllocationSpace enumeration values being
// consecutive.
enum AllocationSpace {
NEW_SPACE,
OLD_SPACE,
CODE_SPACE,
MAP_SPACE,
LO_SPACE,
NEW_SPACE, // Semispaces collected with copying collector.
OLD_POINTER_SPACE, // Must be first of the paged spaces - see PagedSpaces.
OLD_DATA_SPACE, // May not have pointers to new space.
CODE_SPACE, // Also one of the old spaces. Marked executable.
MAP_SPACE, // Only map objects.
LO_SPACE, // Large objects.
FIRST_SPACE = NEW_SPACE,
LAST_SPACE = LO_SPACE
LAST_SPACE = LO_SPACE // <= 5 (see kSpaceBits and kLOSpacePointer)
};
const int kSpaceTagSize = 3;
const int kSpaceTagMask = (1 << kSpaceTagSize) - 1;
// A flag that indicates whether objects should be pretenured when
// allocated (allocated directly into the old generation) or not
// (allocated in the young generation if the object size and type
......@@ -233,6 +235,8 @@ enum PretenureFlag { NOT_TENURED, TENURED };
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR };
enum Executability { NOT_EXECUTABLE, EXECUTABLE };
// A CodeDesc describes a buffer holding instructions and relocation
// information. The instructions start at the beginning of the buffer
......
......@@ -44,7 +44,8 @@ int Heap::MaxHeapObjectSize() {
}
Object* Heap::AllocateRaw(int size_in_bytes, AllocationSpace space) {
Object* Heap::AllocateRaw(int size_in_bytes,
AllocationSpace space) {
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
#ifdef DEBUG
if (FLAG_gc_interval >= 0 &&
......@@ -60,8 +61,10 @@ Object* Heap::AllocateRaw(int size_in_bytes, AllocationSpace space) {
}
Object* result;
if (OLD_SPACE == space) {
result = old_space_->AllocateRaw(size_in_bytes);
if (OLD_POINTER_SPACE == space) {
result = old_pointer_space_->AllocateRaw(size_in_bytes);
} else if (OLD_DATA_SPACE == space) {
result = old_data_space_->AllocateRaw(size_in_bytes);
} else if (CODE_SPACE == space) {
result = code_space_->AllocateRaw(size_in_bytes);
} else if (LO_SPACE == space) {
......@@ -75,32 +78,6 @@ Object* Heap::AllocateRaw(int size_in_bytes, AllocationSpace space) {
}
Object* Heap::AllocateForDeserialization(int size_in_bytes,
AllocationSpace space) {
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
PagedSpace* where;
switch (space) {
case NEW_SPACE:
return new_space_->AllocateRaw(size_in_bytes);
case LO_SPACE:
return lo_space_->AllocateRaw(size_in_bytes);
case OLD_SPACE:
where = old_space_;
break;
case CODE_SPACE:
where = code_space_;
break;
case MAP_SPACE:
where = map_space_;
break;
}
// Only paged spaces fall through.
return where->AllocateForDeserialization(size_in_bytes);
}
Object* Heap::NumberFromInt32(int32_t value) {
if (Smi::IsValid(value)) return Smi::FromInt(value);
// Bypass NumberFromDouble to avoid various redundant checks.
......@@ -160,9 +137,9 @@ void Heap::RecordWrite(Address address, int offset) {
}
AllocationSpace Heap::TargetSpace(HeapObject* object) {
// Heap numbers and sequential strings are promoted to code space, all
// other object types are promoted to old space. We do not use
OldSpace* Heap::TargetSpace(HeapObject* object) {
// Heap numbers and sequential strings are promoted to old data space, all
// other object types are promoted to old pointer space. We do not use
// object->IsHeapNumber() and object->IsSeqString() because we already
// know that object has the heap object tag.
InstanceType type = object->map()->instance_type();
......@@ -171,7 +148,7 @@ AllocationSpace Heap::TargetSpace(HeapObject* object) {
type != HEAP_NUMBER_TYPE &&
(type >= FIRST_NONSTRING_TYPE ||
String::cast(object)->representation_tag() != kSeqStringTag);
return has_pointers ? OLD_SPACE : CODE_SPACE;
return has_pointers ? old_pointer_space_ : old_data_space_;
}
......
This diff is collapsed.
......@@ -247,7 +247,8 @@ class Heap : public AllStatic {
static Address NewSpaceTop() { return new_space_->top(); }
static NewSpace* new_space() { return new_space_; }
static OldSpace* old_space() { return old_space_; }
static OldSpace* old_pointer_space() { return old_pointer_space_; }
static OldSpace* old_data_space() { return old_data_space_; }
static OldSpace* code_space() { return code_space_; }
static MapSpace* map_space() { return map_space_; }
static LargeObjectSpace* lo_space() { return lo_space_; }
......@@ -500,18 +501,13 @@ class Heap : public AllStatic {
static Object* AllocateExternalStringFromTwoByte(
ExternalTwoByteString::Resource* resource);
// Allocates an uninitialized object.
// Allocates an uninitialized object. The memory is non-executable if the
// hardware and OS allow.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
// Please note this function does not perform a garbage collection.
static inline Object* AllocateRaw(int size_in_bytes, AllocationSpace space);
// Allocate an unitialized object during deserialization. Performs linear
// allocation (ie, guaranteed no free list allocation) and assumes the
// spaces are all preexpanded so allocation should not fail.
static inline Object* AllocateForDeserialization(int size_in_bytes,
AllocationSpace space);
static inline Object* AllocateRaw(int size_in_bytes,
AllocationSpace space);
// Makes a new native code object
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
......@@ -551,6 +547,9 @@ class Heap : public AllStatic {
// Returns whether required_space bytes are available after the collection.
static bool CollectGarbage(int required_space, AllocationSpace space);
// Performs a full garbage collection.
static void CollectAllGarbage();
// Utility to invoke the scavenger. This is needed in test code to
// ensure correct callback for weak global handles.
static void PerformScavenge();
......@@ -609,7 +608,7 @@ class Heap : public AllStatic {
static bool InSpace(HeapObject* value, AllocationSpace space);
// Finds out which space an object should get promoted to based on its type.
static inline AllocationSpace TargetSpace(HeapObject* object);
static inline OldSpace* TargetSpace(HeapObject* object);
// Sets the stub_cache_ (only used when expanding the dictionary).
static void set_code_stubs(Dictionary* value) { code_stubs_ = value; }
......@@ -726,7 +725,8 @@ class Heap : public AllStatic {
static const int kMaxMapSpaceSize = 8*MB;
static NewSpace* new_space_;
static OldSpace* old_space_;
static OldSpace* old_pointer_space_;
static OldSpace* old_data_space_;
static OldSpace* code_space_;
static MapSpace* map_space_;
static LargeObjectSpace* lo_space_;
......@@ -801,11 +801,10 @@ class Heap : public AllStatic {
bool new_object,
PretenureFlag pretenure = NOT_TENURED);
// Allocate an uninitialized object in map space. The behavior is
// identical to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that
// (a) it doesn't have to test the allocation space argument and (b) can
// reduce code size (since both AllocateRaw and AllocateRawMap are
// inlined).
// Allocate an uninitialized object in map space. The behavior is identical
// to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
// have to test the allocation space argument and (b) can reduce code size
// (since both AllocateRaw and AllocateRawMap are inlined).
static inline Object* AllocateRawMap(int size_in_bytes);
......@@ -912,10 +911,44 @@ class VerifyPointersAndRSetVisitor: public ObjectVisitor {
#endif
// Space iterator for iterating over all spaces of the heap.
// Returns each space in turn, and null when it is done.
class AllSpaces BASE_EMBEDDED {
public:
Space* next();
AllSpaces() { counter_ = FIRST_SPACE; }
private:
int counter_;
};
// Space iterator for iterating over all old spaces of the heap: Old pointer
// space, old data space and code space.
// Returns each space in turn, and null when it is done.
class OldSpaces BASE_EMBEDDED {
public:
OldSpace* next();
OldSpaces() { counter_ = OLD_POINTER_SPACE; }
private:
int counter_;
};
// Space iterator for iterating over all the paged spaces of the heap:
// Map space, old pointer space, old data space and code space.
// Returns each space in turn, and null when it is done.
class PagedSpaces BASE_EMBEDDED {
public:
PagedSpace* next();
PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
private:
int counter_;
};
// Space iterator for iterating over all spaces of the heap.
// For each space an object iterator is provided. The deallocation of the
// returned object iterators is handled by the space iterator.
class SpaceIterator : public Malloced {
public:
SpaceIterator();
......
This diff is collapsed.
......@@ -241,7 +241,8 @@ class MarkCompactCollector : public AllStatic {
// Callback functions for deallocating non-live blocks in the old
// generation.
static void DeallocateOldBlock(Address start, int size_in_bytes);
static void DeallocateOldPointerBlock(Address start, int size_in_bytes);
static void DeallocateOldDataBlock(Address start, int size_in_bytes);
static void DeallocateCodeBlock(Address start, int size_in_bytes);
static void DeallocateMapBlock(Address start, int size_in_bytes);
......@@ -295,9 +296,13 @@ class MarkCompactCollector : public AllStatic {
static int RelocateMapObject(HeapObject* obj);
// Relocates an old object.
static int RelocateOldObject(HeapObject* obj);
static int RelocateOldPointerObject(HeapObject* obj);
static int RelocateOldDataObject(HeapObject* obj);
// Relocates an immutable object in the code space.
// Helper function.
static inline int RelocateOldNonCodeObject(HeapObject* obj, OldSpace* space);
// Relocates an object in the code space.
static int RelocateCodeObject(HeapObject* obj);
// Copy a new object.
......@@ -322,11 +327,14 @@ class MarkCompactCollector : public AllStatic {
// Number of live objects in Heap::to_space_.
static int live_young_objects_;
// Number of live objects in Heap::old_space_.
static int live_old_objects_;
// Number of live objects in Heap::old_pointer_space_.
static int live_old_pointer_objects_;
// Number of live objects in Heap::old_data_space_.
static int live_old_data_objects_;
// Number of live objects in Heap::code_space_.
static int live_immutable_objects_;
static int live_code_objects_;
// Number of live objects in Heap::map_space_.
static int live_map_objects_;
......
......@@ -182,8 +182,8 @@ int main(int argc, char** argv) {
i::Bootstrapper::NativesSourceLookup(i);
}
}
// Get rid of unreferenced scripts.
i::Heap::CollectGarbage(0, i::OLD_SPACE);
// Get rid of unreferenced scripts with a global GC.
i::Heap::CollectAllGarbage();
i::Serializer ser;
ser.Serialize();
char* str;
......
......@@ -931,7 +931,7 @@ Object* JSObject::Copy(PretenureFlag pretenure) {
// Make the clone.
Object* clone = (pretenure == NOT_TENURED) ?
Heap::Allocate(map(), NEW_SPACE) :
Heap::Allocate(map(), OLD_SPACE);
Heap::Allocate(map(), OLD_POINTER_SPACE);
if (clone->IsFailure()) return clone;
JSObject::cast(clone)->CopyBody(this);
......
......@@ -3327,6 +3327,8 @@ class Oddball: public HeapObject {
// Proxy describes objects pointing from JavaScript to C structures.
// Since they cannot contain references to JS HeapObjects they can be
// placed in old_data_space.
class Proxy: public HeapObject {
public:
// [proxy]: field containing the address.
......
......@@ -353,8 +353,8 @@ static const int kMmapFd = -1;
static const int kMmapFdOffset = 0;
VirtualMemory::VirtualMemory(size_t size, void* address_hint) {
address_ = mmap(address_hint, size, PROT_NONE,
VirtualMemory::VirtualMemory(size_t size) {
address_ = mmap(NULL, size, PROT_NONE,
MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE,
kMmapFd, kMmapFdOffset);
size_ = size;
......
......@@ -312,8 +312,8 @@ static const int kMmapFd = -1;
static const int kMmapFdOffset = 0;
VirtualMemory::VirtualMemory(size_t size, void* address_hint) {
address_ = mmap(address_hint, size, PROT_NONE,
VirtualMemory::VirtualMemory(size_t size) {
address_ = mmap(NULL, size, PROT_NONE,
MAP_PRIVATE | MAP_ANON | MAP_NORESERVE,
kMmapFd, kMmapFdOffset);
size_ = size;
......
......@@ -1171,9 +1171,8 @@ bool VirtualMemory::IsReserved() {
}
VirtualMemory::VirtualMemory(size_t size, void* address_hint) {
address_ =
VirtualAlloc(address_hint, size, MEM_RESERVE, PAGE_NOACCESS);
VirtualMemory::VirtualMemory(size_t size) {
address_ = VirtualAlloc(NULL, size, MEM_RESERVE, PAGE_NOACCESS);
size_ = size;
}
......
......@@ -222,7 +222,7 @@ class OS {
class VirtualMemory {
public:
// Reserves virtual memory with size.
VirtualMemory(size_t size, void* address_hint = 0);
explicit VirtualMemory(size_t size);
~VirtualMemory();
// Returns whether the memory has been reserved.
......
......@@ -4536,8 +4536,8 @@ static Object* Runtime_DebugGetLoadedScripts(Arguments args) {
// Perform two GCs to get rid of all unreferenced scripts. The first GC gets
// rid of all the cached script wrappes and the second gets rid of the
// scripts which is no longer referenced.
Heap::CollectGarbage(0, OLD_SPACE);
Heap::CollectGarbage(0, OLD_SPACE);
Heap::CollectAllGarbage();
Heap::CollectAllGarbage();
// Get the number of scripts.
int count;
......@@ -4641,7 +4641,7 @@ static Object* Runtime_DebugReferencedBy(Arguments args) {
ASSERT(args.length() == 3);
// First perform a full GC in order to avoid references from dead objects.
Heap::CollectGarbage(0, OLD_SPACE);
Heap::CollectAllGarbage();
// Check parameters.
CONVERT_CHECKED(JSObject, target, args[0]);
......@@ -4721,7 +4721,7 @@ static Object* Runtime_DebugConstructedBy(Arguments args) {
ASSERT(args.length() == 2);
// First perform a full GC in order to avoid dead objects.
Heap::CollectGarbage(0, OLD_SPACE);
Heap::CollectAllGarbage();
// Check parameters.
CONVERT_CHECKED(JSFunction, constructor, args[0]);
......
This diff is collapsed.
......@@ -312,10 +312,11 @@ class Deserializer: public ObjectVisitor {
bool has_log_; // The file has log information.
// Resolve caches the following:
List<Page*> map_pages_; // All pages in the map space.
List<Page*> old_pages_; // All pages in the old space.
List<Page*> map_pages_; // All pages in the map space.
List<Page*> old_pointer_pages_; // All pages in the old pointer space.
List<Page*> old_data_pages_; // All pages in the old data space.
List<Page*> code_pages_;
List<Object*> large_objects_; // All known large objects.
List<Object*> large_objects_; // All known large objects.
// A list of global handles at deserialization time.
List<Object**> global_handles_;
......
......@@ -86,14 +86,7 @@ Page* Page::next_page() {
Address Page::AllocationTop() {
PagedSpace* owner = MemoryAllocator::PageOwner(this);
if (Heap::old_space() == owner) {
return Heap::old_space()->PageAllocationTop(this);
} else if (Heap::code_space() == owner) {
return Heap::code_space()->PageAllocationTop(this);
} else {
ASSERT(Heap::map_space() == owner);
return Heap::map_space()->PageAllocationTop(this);
}
return owner->PageAllocationTop(this);
}
......@@ -282,24 +275,6 @@ Object* PagedSpace::MCAllocateRaw(int size_in_bytes) {
}
// Allocating during deserialization. Always roll to the next page in the
// space, which should be suitably expanded.
Object* PagedSpace::AllocateForDeserialization(int size_in_bytes) {
ASSERT(HasBeenSetup());
ASSERT_OBJECT_SIZE(size_in_bytes);
HeapObject* object = AllocateLinearly(&allocation_info_, size_in_bytes);
if (object != NULL) return object;
// The space should be pre-expanded.
Page* current_page = Page::FromAllocationTop(allocation_info_.top);
ASSERT(current_page->next_page()->is_valid());
object = AllocateInNextPage(current_page, size_in_bytes);
ASSERT(object != NULL);
return object;
}
// -----------------------------------------------------------------------------
// LargeObjectChunk
......
......@@ -227,10 +227,10 @@ void MemoryAllocator::TearDown() {
void* MemoryAllocator::AllocateRawMemory(const size_t requested,
size_t* allocated,
bool executable) {
Executability executable) {
if (size_ + static_cast<int>(requested) > capacity_) return NULL;
void* mem = OS::Allocate(requested, allocated, executable);
void* mem = OS::Allocate(requested, allocated, executable == EXECUTABLE);
int alloced = *allocated;
size_ += alloced;
Counters::memory_allocated.Increment(alloced);
......@@ -316,7 +316,7 @@ Page* MemoryAllocator::CommitPages(Address start, size_t size,
ASSERT(initial_chunk_->address() <= start);
ASSERT(start + size <= reinterpret_cast<Address>(initial_chunk_->address())
+ initial_chunk_->size());
if (!initial_chunk_->Commit(start, size, owner->executable())) {
if (!initial_chunk_->Commit(start, size, owner->executable() == EXECUTABLE)) {
return Page::FromAddress(NULL);
}
Counters::memory_allocated.Increment(size);
......@@ -332,7 +332,7 @@ Page* MemoryAllocator::CommitPages(Address start, size_t size,
bool MemoryAllocator::CommitBlock(Address start,
size_t size,
bool executable) {
Executability executable) {
ASSERT(start != NULL);
ASSERT(size > 0);
ASSERT(initial_chunk_ != NULL);
......@@ -474,7 +474,9 @@ void MemoryAllocator::ReportStatistics() {
// -----------------------------------------------------------------------------
// PagedSpace implementation
PagedSpace::PagedSpace(int max_capacity, AllocationSpace id, bool executable)
PagedSpace::PagedSpace(int max_capacity,
AllocationSpace id,
Executability executable)
: Space(id, executable) {
max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize)
* Page::kObjectAreaSize;
......@@ -494,8 +496,11 @@ bool PagedSpace::Setup(Address start, size_t size) {
int num_pages = 0;
// Try to use the virtual memory range passed to us. If it is too small to
// contain at least one page, ignore it and allocate instead.
if (PagesInChunk(start, size) > 0) {
first_page_ = MemoryAllocator::CommitPages(start, size, this, &num_pages);
int pages_in_chunk = PagesInChunk(start, size);
if (pages_in_chunk > 0) {
first_page_ = MemoryAllocator::CommitPages(RoundUp(start, Page::kPageSize),
Page::kPageSize * pages_in_chunk,
this, &num_pages);
} else {
int requested_pages = Min(MemoryAllocator::kPagesPerChunk,
max_capacity_ / Page::kObjectAreaSize);
......@@ -768,15 +773,14 @@ void PagedSpace::Print() { }
NewSpace::NewSpace(int initial_semispace_capacity,
int maximum_semispace_capacity,
AllocationSpace id,
bool executable)
: Space(id, executable) {
AllocationSpace id)
: Space(id, NOT_EXECUTABLE) {
ASSERT(initial_semispace_capacity <= maximum_semispace_capacity);
ASSERT(IsPowerOf2(maximum_semispace_capacity));
maximum_capacity_ = maximum_semispace_capacity;
capacity_ = initial_semispace_capacity;
to_space_ = new SemiSpace(capacity_, maximum_capacity_, id, executable);
from_space_ = new SemiSpace(capacity_, maximum_capacity_, id, executable);
to_space_ = new SemiSpace(capacity_, maximum_capacity_, id);
from_space_ = new SemiSpace(capacity_, maximum_capacity_, id);
// Allocate and setup the histogram arrays if necessary.
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
......@@ -940,9 +944,8 @@ void NewSpace::Verify() {
SemiSpace::SemiSpace(int initial_capacity,
int maximum_capacity,
AllocationSpace id,
bool executable)
: Space(id, executable), capacity_(initial_capacity),
AllocationSpace id)
: Space(id, NOT_EXECUTABLE), capacity_(initial_capacity),
maximum_capacity_(maximum_capacity), start_(NULL), age_mark_(NULL) {
}
......@@ -980,6 +983,9 @@ bool SemiSpace::Double() {
#ifdef DEBUG
void SemiSpace::Print() { }
void SemiSpace::Verify() { }
#endif
......@@ -2190,7 +2196,7 @@ HeapObject* LargeObjectIterator::next() {
LargeObjectChunk* LargeObjectChunk::New(int size_in_bytes,
size_t* chunk_size,
bool executable) {
Executability executable) {
size_t requested = ChunkSizeFor(size_in_bytes);
void* mem = MemoryAllocator::AllocateRawMemory(requested,
chunk_size,
......@@ -2216,8 +2222,8 @@ int LargeObjectChunk::ChunkSizeFor(int size_in_bytes) {
// -----------------------------------------------------------------------------
// LargeObjectSpace
LargeObjectSpace::LargeObjectSpace(AllocationSpace id, bool executable)
: Space(id, executable),
LargeObjectSpace::LargeObjectSpace(AllocationSpace id)
: Space(id, NOT_EXECUTABLE), // Managed on a per-allocation basis
first_chunk_(NULL),
size_(0),
page_count_(0) {}
......@@ -2245,11 +2251,12 @@ void LargeObjectSpace::TearDown() {
Object* LargeObjectSpace::AllocateRawInternal(int requested_size,
int object_size) {
int object_size,
Executability executable) {
ASSERT(0 < object_size && object_size <= requested_size);
size_t chunk_size;
LargeObjectChunk* chunk =
LargeObjectChunk::New(requested_size, &chunk_size, executable());
LargeObjectChunk::New(requested_size, &chunk_size, executable);
if (chunk == NULL) {
return Failure::RetryAfterGC(requested_size, identity());
}
......@@ -2280,15 +2287,28 @@ Object* LargeObjectSpace::AllocateRawInternal(int requested_size,
}
Object* LargeObjectSpace::AllocateRaw(int size_in_bytes) {
Object* LargeObjectSpace::AllocateRawCode(int size_in_bytes) {
ASSERT(0 < size_in_bytes);
return AllocateRawInternal(size_in_bytes, size_in_bytes);
return AllocateRawInternal(size_in_bytes,
size_in_bytes,
EXECUTABLE);
}
Object* LargeObjectSpace::AllocateRawFixedArray(int size_in_bytes) {
ASSERT(0 < size_in_bytes);
int extra_rset_bytes = ExtraRSetBytesFor(size_in_bytes);
return AllocateRawInternal(size_in_bytes + extra_rset_bytes, size_in_bytes);
return AllocateRawInternal(size_in_bytes + extra_rset_bytes,
size_in_bytes,
NOT_EXECUTABLE);
}
Object* LargeObjectSpace::AllocateRaw(int size_in_bytes) {
ASSERT(0 < size_in_bytes);
return AllocateRawInternal(size_in_bytes,
size_in_bytes,
NOT_EXECUTABLE);
}
......
This diff is collapsed.
......@@ -461,10 +461,10 @@ THREADED_TEST(ScriptUsingStringResource) {
CHECK(source->IsExternal());
CHECK_EQ(resource,
static_cast<TestResource*>(source->GetExternalStringResource()));
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE);
v8::internal::Heap::CollectAllGarbage();
CHECK_EQ(0, TestResource::dispose_count);
}
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE);
v8::internal::Heap::CollectAllGarbage();
CHECK_EQ(1, TestResource::dispose_count);
}
......@@ -481,10 +481,10 @@ THREADED_TEST(ScriptUsingAsciiStringResource) {
Local<Value> value = script->Run();
CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value());
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE);
v8::internal::Heap::CollectAllGarbage();
CHECK_EQ(0, TestAsciiResource::dispose_count);
}
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE);
v8::internal::Heap::CollectAllGarbage();
CHECK_EQ(1, TestAsciiResource::dispose_count);
}
......@@ -2455,7 +2455,7 @@ static v8::Handle<Value> ArgumentsTestCallback(const v8::Arguments& args) {
CHECK_EQ(v8::Integer::New(3), args[2]);
CHECK_EQ(v8::Undefined(), args[3]);
v8::HandleScope scope;
i::Heap::CollectGarbage(0, i::OLD_SPACE);
i::Heap::CollectAllGarbage();
return v8::Undefined();
}
......@@ -4694,7 +4694,7 @@ THREADED_TEST(LockUnlockLock) {
static void EnsureNoSurvivingGlobalObjects() {
int count = 0;
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE);
v8::internal::Heap::CollectAllGarbage();
v8::internal::HeapIterator it;
while (it.has_next()) {
v8::internal::HeapObject* object = it.next();
......
......@@ -618,7 +618,7 @@ static void DebugEventBreakPointCollectGarbage(
Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
} else {
// Mark sweep (and perhaps compact).
Heap::CollectGarbage(0, v8::internal::OLD_SPACE);
Heap::CollectAllGarbage();
}
}
}
......@@ -960,7 +960,7 @@ static void CallAndGC(v8::Local<v8::Object> recv, v8::Local<v8::Function> f) {
CHECK_EQ(2 + i * 3, break_point_hit_count);
// Mark sweep (and perhaps compact) and call function.
Heap::CollectGarbage(0, v8::internal::OLD_SPACE);
Heap::CollectAllGarbage();
f->Call(recv, 0, NULL);
CHECK_EQ(3 + i * 3, break_point_hit_count);
}
......
......@@ -176,7 +176,8 @@ TEST(Tagging) {
CHECK(Failure::RetryAfterGC(12, NEW_SPACE)->IsFailure());
CHECK_EQ(12, Failure::RetryAfterGC(12, NEW_SPACE)->requested());
CHECK_EQ(NEW_SPACE, Failure::RetryAfterGC(12, NEW_SPACE)->allocation_space());
CHECK_EQ(OLD_SPACE, Failure::RetryAfterGC(12, OLD_SPACE)->allocation_space());
CHECK_EQ(OLD_POINTER_SPACE,
Failure::RetryAfterGC(12, OLD_POINTER_SPACE)->allocation_space());
CHECK(Failure::Exception()->IsFailure());
CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
......@@ -353,7 +354,7 @@ TEST(WeakGlobalHandlesMark) {
Handle<Object> h1 = GlobalHandles::Create(i);
Handle<Object> h2 = GlobalHandles::Create(u);
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
CHECK(Heap::CollectGarbage(0, NEW_SPACE));
// Make sure the object is promoted.
......@@ -363,7 +364,7 @@ TEST(WeakGlobalHandlesMark) {
CHECK(!GlobalHandles::IsNearDeath(h1.location()));
CHECK(!GlobalHandles::IsNearDeath(h2.location()));
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
CHECK((*h1)->IsString());
......@@ -400,7 +401,7 @@ TEST(DeleteWeakGlobalHandle) {
CHECK(!WeakPointerCleared);
// Mark-compact treats weak reference properly.
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
CHECK(WeakPointerCleared);
}
......@@ -751,11 +752,11 @@ TEST(Iteration) {
Handle<Object> objs[objs_count];
int next_objs_index = 0;
// Allocate a JS array to OLD_SPACE and NEW_SPACE
// Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
objs[next_objs_index++] = Factory::NewJSArray(10);
objs[next_objs_index++] = Factory::NewJSArray(10, TENURED);
// Allocate a small string to CODE_SPACE and NEW_SPACE
// Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
objs[next_objs_index++] =
Factory::NewStringFromAscii(CStrVector("abcdefghij"));
objs[next_objs_index++] =
......
......@@ -102,10 +102,10 @@ TEST(Promotion) {
CHECK(Heap::InSpace(*array, NEW_SPACE));
// Call the m-c collector, so array becomes an old object.
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// Array now sits in the old space
CHECK(Heap::InSpace(*array, OLD_SPACE));
CHECK(Heap::InSpace(*array, OLD_POINTER_SPACE));
}
......@@ -120,7 +120,7 @@ TEST(NoPromotion) {
v8::HandleScope sc;
// Do a mark compact GC to shrink the heap.
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// Allocate a big Fixed array in the new space.
int size = (Heap::MaxHeapObjectSize() - Array::kHeaderSize) / kPointerSize;
......@@ -142,7 +142,7 @@ TEST(NoPromotion) {
}
// Call mark compact GC, and it should pass.
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// array should not be promoted because the old space is full.
CHECK(Heap::InSpace(*array, NEW_SPACE));
......@@ -154,7 +154,7 @@ TEST(MarkCompactCollector) {
v8::HandleScope sc;
// call mark-compact when heap is empty
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// keep allocating garbage in new space until it fails
const int ARRAY_SIZE = 100;
......@@ -190,7 +190,7 @@ TEST(MarkCompactCollector) {
Top::context()->global()->SetProperty(func_name, function, NONE);
JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function));
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
CHECK(Top::context()->global()->HasLocalProperty(func_name));
......@@ -204,7 +204,7 @@ TEST(MarkCompactCollector) {
String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
CHECK(Top::context()->global()->HasLocalProperty(obj_name));
......@@ -242,7 +242,7 @@ TEST(GCCallback) {
CHECK_EQ(0, gc_starts);
CHECK_EQ(gc_ends, gc_starts);
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
CHECK_EQ(1, gc_starts);
CHECK_EQ(gc_ends, gc_starts);
}
......@@ -292,7 +292,7 @@ TEST(ObjectGroups) {
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s1.location());
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s2.location());
// Do a full GC
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// All object should be alive.
CHECK_EQ(0, NumberOfWeakCalls);
......@@ -308,7 +308,7 @@ TEST(ObjectGroups) {
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s1.location());
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s2.location());
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// All objects should be gone. 5 global handles in total.
CHECK_EQ(5, NumberOfWeakCalls);
......
......@@ -101,7 +101,7 @@ TEST(MemoryAllocator) {
CHECK(Heap::ConfigureHeapDefault());
CHECK(MemoryAllocator::Setup(Heap::MaxCapacity()));
OldSpace faked_space(Heap::MaxCapacity(), OLD_SPACE, false);
OldSpace faked_space(Heap::MaxCapacity(), OLD_POINTER_SPACE, NOT_EXECUTABLE);
int total_pages = 0;
int requested = 2;
int allocated;
......@@ -159,8 +159,7 @@ TEST(NewSpace) {
NewSpace* s = new NewSpace(Heap::InitialSemiSpaceSize(),
Heap::SemiSpaceSize(),
NEW_SPACE,
false);
NEW_SPACE);
CHECK(s != NULL);
void* chunk =
......@@ -187,7 +186,9 @@ TEST(OldSpace) {
CHECK(Heap::ConfigureHeapDefault());
CHECK(MemoryAllocator::Setup(Heap::MaxCapacity()));
OldSpace* s = new OldSpace(Heap::OldGenerationSize(), OLD_SPACE, false);
OldSpace* s = new OldSpace(Heap::OldGenerationSize(),
OLD_POINTER_SPACE,
NOT_EXECUTABLE);
CHECK(s != NULL);
void* chunk =
......@@ -213,7 +214,7 @@ TEST(LargeObjectSpace) {
CHECK(Heap::ConfigureHeapDefault());
MemoryAllocator::Setup(Heap::MaxCapacity());
LargeObjectSpace* lo = new LargeObjectSpace(LO_SPACE, false);
LargeObjectSpace* lo = new LargeObjectSpace(LO_SPACE);
CHECK(lo != NULL);
CHECK(lo->Setup());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment