Commit 388c1094 authored by erik.corry@gmail.com's avatar erik.corry@gmail.com

* Split up code_space into old_data_space and code_space.

* Make old_data_space non-executable on OSs and hardware that support it.
* Rename old_space to old_pointer_space (can contain pointers, esp. to new space).
* Ensure that individual pages allocated for old_space are only executable when
they are for code objects.
* Ensure Space::Setup can cope with non-aligned memory.
* Make some methods on Spaces virtual.  Make a way to iterate over all spaces.
* Replace executability flag with Executability enum in order to make intent at
call site clearer.
* Fix serialization/deserialization to allocate write barrier memory for large
arrays.



git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@165 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 1472ee59
...@@ -656,7 +656,7 @@ v8::Handle<v8::FunctionTemplate> GCExtension::GetNativeFunction( ...@@ -656,7 +656,7 @@ v8::Handle<v8::FunctionTemplate> GCExtension::GetNativeFunction(
v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) { v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) {
// All allocation spaces other than NEW_SPACE have the same effect. // All allocation spaces other than NEW_SPACE have the same effect.
Heap::CollectGarbage(0, OLD_SPACE); Heap::CollectGarbage(0, OLD_DATA_SPACE);
return v8::Undefined(); return v8::Undefined();
} }
......
...@@ -49,7 +49,7 @@ Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors) { ...@@ -49,7 +49,7 @@ Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors) {
} }
// Symbols are created in the old generation (code space). // Symbols are created in the old generation (data space).
Handle<String> Factory::LookupSymbol(Vector<const char> string) { Handle<String> Factory::LookupSymbol(Vector<const char> string) {
CALL_HEAP_FUNCTION(Heap::LookupSymbol(string), String); CALL_HEAP_FUNCTION(Heap::LookupSymbol(string), String);
} }
......
...@@ -214,17 +214,19 @@ typedef bool (*WeakSlotCallback)(Object** pointer); ...@@ -214,17 +214,19 @@ typedef bool (*WeakSlotCallback)(Object** pointer);
// NOTE: SpaceIterator depends on AllocationSpace enumeration values being // NOTE: SpaceIterator depends on AllocationSpace enumeration values being
// consecutive. // consecutive.
enum AllocationSpace { enum AllocationSpace {
NEW_SPACE, NEW_SPACE, // Semispaces collected with copying collector.
OLD_SPACE, OLD_POINTER_SPACE, // Must be first of the paged spaces - see PagedSpaces.
CODE_SPACE, OLD_DATA_SPACE, // May not have pointers to new space.
MAP_SPACE, CODE_SPACE, // Also one of the old spaces. Marked executable.
LO_SPACE, MAP_SPACE, // Only map objects.
LO_SPACE, // Large objects.
FIRST_SPACE = NEW_SPACE, FIRST_SPACE = NEW_SPACE,
LAST_SPACE = LO_SPACE LAST_SPACE = LO_SPACE // <= 5 (see kSpaceBits and kLOSpacePointer)
}; };
const int kSpaceTagSize = 3; const int kSpaceTagSize = 3;
const int kSpaceTagMask = (1 << kSpaceTagSize) - 1; const int kSpaceTagMask = (1 << kSpaceTagSize) - 1;
// A flag that indicates whether objects should be pretenured when // A flag that indicates whether objects should be pretenured when
// allocated (allocated directly into the old generation) or not // allocated (allocated directly into the old generation) or not
// (allocated in the young generation if the object size and type // (allocated in the young generation if the object size and type
...@@ -233,6 +235,8 @@ enum PretenureFlag { NOT_TENURED, TENURED }; ...@@ -233,6 +235,8 @@ enum PretenureFlag { NOT_TENURED, TENURED };
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR }; enum GarbageCollector { SCAVENGER, MARK_COMPACTOR };
enum Executability { NOT_EXECUTABLE, EXECUTABLE };
// A CodeDesc describes a buffer holding instructions and relocation // A CodeDesc describes a buffer holding instructions and relocation
// information. The instructions start at the beginning of the buffer // information. The instructions start at the beginning of the buffer
......
...@@ -44,7 +44,8 @@ int Heap::MaxHeapObjectSize() { ...@@ -44,7 +44,8 @@ int Heap::MaxHeapObjectSize() {
} }
Object* Heap::AllocateRaw(int size_in_bytes, AllocationSpace space) { Object* Heap::AllocateRaw(int size_in_bytes,
AllocationSpace space) {
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
#ifdef DEBUG #ifdef DEBUG
if (FLAG_gc_interval >= 0 && if (FLAG_gc_interval >= 0 &&
...@@ -60,8 +61,10 @@ Object* Heap::AllocateRaw(int size_in_bytes, AllocationSpace space) { ...@@ -60,8 +61,10 @@ Object* Heap::AllocateRaw(int size_in_bytes, AllocationSpace space) {
} }
Object* result; Object* result;
if (OLD_SPACE == space) { if (OLD_POINTER_SPACE == space) {
result = old_space_->AllocateRaw(size_in_bytes); result = old_pointer_space_->AllocateRaw(size_in_bytes);
} else if (OLD_DATA_SPACE == space) {
result = old_data_space_->AllocateRaw(size_in_bytes);
} else if (CODE_SPACE == space) { } else if (CODE_SPACE == space) {
result = code_space_->AllocateRaw(size_in_bytes); result = code_space_->AllocateRaw(size_in_bytes);
} else if (LO_SPACE == space) { } else if (LO_SPACE == space) {
...@@ -75,32 +78,6 @@ Object* Heap::AllocateRaw(int size_in_bytes, AllocationSpace space) { ...@@ -75,32 +78,6 @@ Object* Heap::AllocateRaw(int size_in_bytes, AllocationSpace space) {
} }
Object* Heap::AllocateForDeserialization(int size_in_bytes,
AllocationSpace space) {
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
PagedSpace* where;
switch (space) {
case NEW_SPACE:
return new_space_->AllocateRaw(size_in_bytes);
case LO_SPACE:
return lo_space_->AllocateRaw(size_in_bytes);
case OLD_SPACE:
where = old_space_;
break;
case CODE_SPACE:
where = code_space_;
break;
case MAP_SPACE:
where = map_space_;
break;
}
// Only paged spaces fall through.
return where->AllocateForDeserialization(size_in_bytes);
}
Object* Heap::NumberFromInt32(int32_t value) { Object* Heap::NumberFromInt32(int32_t value) {
if (Smi::IsValid(value)) return Smi::FromInt(value); if (Smi::IsValid(value)) return Smi::FromInt(value);
// Bypass NumberFromDouble to avoid various redundant checks. // Bypass NumberFromDouble to avoid various redundant checks.
...@@ -160,9 +137,9 @@ void Heap::RecordWrite(Address address, int offset) { ...@@ -160,9 +137,9 @@ void Heap::RecordWrite(Address address, int offset) {
} }
AllocationSpace Heap::TargetSpace(HeapObject* object) { OldSpace* Heap::TargetSpace(HeapObject* object) {
// Heap numbers and sequential strings are promoted to code space, all // Heap numbers and sequential strings are promoted to old data space, all
// other object types are promoted to old space. We do not use // other object types are promoted to old pointer space. We do not use
// object->IsHeapNumber() and object->IsSeqString() because we already // object->IsHeapNumber() and object->IsSeqString() because we already
// know that object has the heap object tag. // know that object has the heap object tag.
InstanceType type = object->map()->instance_type(); InstanceType type = object->map()->instance_type();
...@@ -171,7 +148,7 @@ AllocationSpace Heap::TargetSpace(HeapObject* object) { ...@@ -171,7 +148,7 @@ AllocationSpace Heap::TargetSpace(HeapObject* object) {
type != HEAP_NUMBER_TYPE && type != HEAP_NUMBER_TYPE &&
(type >= FIRST_NONSTRING_TYPE || (type >= FIRST_NONSTRING_TYPE ||
String::cast(object)->representation_tag() != kSeqStringTag); String::cast(object)->representation_tag() != kSeqStringTag);
return has_pointers ? OLD_SPACE : CODE_SPACE; return has_pointers ? old_pointer_space_ : old_data_space_;
} }
......
This diff is collapsed.
...@@ -247,7 +247,8 @@ class Heap : public AllStatic { ...@@ -247,7 +247,8 @@ class Heap : public AllStatic {
static Address NewSpaceTop() { return new_space_->top(); } static Address NewSpaceTop() { return new_space_->top(); }
static NewSpace* new_space() { return new_space_; } static NewSpace* new_space() { return new_space_; }
static OldSpace* old_space() { return old_space_; } static OldSpace* old_pointer_space() { return old_pointer_space_; }
static OldSpace* old_data_space() { return old_data_space_; }
static OldSpace* code_space() { return code_space_; } static OldSpace* code_space() { return code_space_; }
static MapSpace* map_space() { return map_space_; } static MapSpace* map_space() { return map_space_; }
static LargeObjectSpace* lo_space() { return lo_space_; } static LargeObjectSpace* lo_space() { return lo_space_; }
...@@ -500,18 +501,13 @@ class Heap : public AllStatic { ...@@ -500,18 +501,13 @@ class Heap : public AllStatic {
static Object* AllocateExternalStringFromTwoByte( static Object* AllocateExternalStringFromTwoByte(
ExternalTwoByteString::Resource* resource); ExternalTwoByteString::Resource* resource);
// Allocates an uninitialized object. // Allocates an uninitialized object. The memory is non-executable if the
// hardware and OS allow.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed. // failed.
// Please note this function does not perform a garbage collection. // Please note this function does not perform a garbage collection.
static inline Object* AllocateRaw(int size_in_bytes, AllocationSpace space); static inline Object* AllocateRaw(int size_in_bytes,
AllocationSpace space);
// Allocate an unitialized object during deserialization. Performs linear
// allocation (ie, guaranteed no free list allocation) and assumes the
// spaces are all preexpanded so allocation should not fail.
static inline Object* AllocateForDeserialization(int size_in_bytes,
AllocationSpace space);
// Makes a new native code object // Makes a new native code object
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
...@@ -551,6 +547,9 @@ class Heap : public AllStatic { ...@@ -551,6 +547,9 @@ class Heap : public AllStatic {
// Returns whether required_space bytes are available after the collection. // Returns whether required_space bytes are available after the collection.
static bool CollectGarbage(int required_space, AllocationSpace space); static bool CollectGarbage(int required_space, AllocationSpace space);
// Performs a full garbage collection.
static void CollectAllGarbage();
// Utility to invoke the scavenger. This is needed in test code to // Utility to invoke the scavenger. This is needed in test code to
// ensure correct callback for weak global handles. // ensure correct callback for weak global handles.
static void PerformScavenge(); static void PerformScavenge();
...@@ -609,7 +608,7 @@ class Heap : public AllStatic { ...@@ -609,7 +608,7 @@ class Heap : public AllStatic {
static bool InSpace(HeapObject* value, AllocationSpace space); static bool InSpace(HeapObject* value, AllocationSpace space);
// Finds out which space an object should get promoted to based on its type. // Finds out which space an object should get promoted to based on its type.
static inline AllocationSpace TargetSpace(HeapObject* object); static inline OldSpace* TargetSpace(HeapObject* object);
// Sets the stub_cache_ (only used when expanding the dictionary). // Sets the stub_cache_ (only used when expanding the dictionary).
static void set_code_stubs(Dictionary* value) { code_stubs_ = value; } static void set_code_stubs(Dictionary* value) { code_stubs_ = value; }
...@@ -726,7 +725,8 @@ class Heap : public AllStatic { ...@@ -726,7 +725,8 @@ class Heap : public AllStatic {
static const int kMaxMapSpaceSize = 8*MB; static const int kMaxMapSpaceSize = 8*MB;
static NewSpace* new_space_; static NewSpace* new_space_;
static OldSpace* old_space_; static OldSpace* old_pointer_space_;
static OldSpace* old_data_space_;
static OldSpace* code_space_; static OldSpace* code_space_;
static MapSpace* map_space_; static MapSpace* map_space_;
static LargeObjectSpace* lo_space_; static LargeObjectSpace* lo_space_;
...@@ -801,11 +801,10 @@ class Heap : public AllStatic { ...@@ -801,11 +801,10 @@ class Heap : public AllStatic {
bool new_object, bool new_object,
PretenureFlag pretenure = NOT_TENURED); PretenureFlag pretenure = NOT_TENURED);
// Allocate an uninitialized object in map space. The behavior is // Allocate an uninitialized object in map space. The behavior is identical
// identical to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
// (a) it doesn't have to test the allocation space argument and (b) can // have to test the allocation space argument and (b) can reduce code size
// reduce code size (since both AllocateRaw and AllocateRawMap are // (since both AllocateRaw and AllocateRawMap are inlined).
// inlined).
static inline Object* AllocateRawMap(int size_in_bytes); static inline Object* AllocateRawMap(int size_in_bytes);
...@@ -912,10 +911,44 @@ class VerifyPointersAndRSetVisitor: public ObjectVisitor { ...@@ -912,10 +911,44 @@ class VerifyPointersAndRSetVisitor: public ObjectVisitor {
#endif #endif
// Space iterator for iterating over all spaces of the heap.
// Returns each space in turn, and null when it is done.
class AllSpaces BASE_EMBEDDED {
public:
Space* next();
AllSpaces() { counter_ = FIRST_SPACE; }
private:
int counter_;
};
// Space iterator for iterating over all old spaces of the heap: Old pointer
// space, old data space and code space.
// Returns each space in turn, and null when it is done.
class OldSpaces BASE_EMBEDDED {
public:
OldSpace* next();
OldSpaces() { counter_ = OLD_POINTER_SPACE; }
private:
int counter_;
};
// Space iterator for iterating over all the paged spaces of the heap:
// Map space, old pointer space, old data space and code space.
// Returns each space in turn, and null when it is done.
class PagedSpaces BASE_EMBEDDED {
public:
PagedSpace* next();
PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
private:
int counter_;
};
// Space iterator for iterating over all spaces of the heap. // Space iterator for iterating over all spaces of the heap.
// For each space an object iterator is provided. The deallocation of the // For each space an object iterator is provided. The deallocation of the
// returned object iterators is handled by the space iterator. // returned object iterators is handled by the space iterator.
class SpaceIterator : public Malloced { class SpaceIterator : public Malloced {
public: public:
SpaceIterator(); SpaceIterator();
......
This diff is collapsed.
...@@ -241,7 +241,8 @@ class MarkCompactCollector : public AllStatic { ...@@ -241,7 +241,8 @@ class MarkCompactCollector : public AllStatic {
// Callback functions for deallocating non-live blocks in the old // Callback functions for deallocating non-live blocks in the old
// generation. // generation.
static void DeallocateOldBlock(Address start, int size_in_bytes); static void DeallocateOldPointerBlock(Address start, int size_in_bytes);
static void DeallocateOldDataBlock(Address start, int size_in_bytes);
static void DeallocateCodeBlock(Address start, int size_in_bytes); static void DeallocateCodeBlock(Address start, int size_in_bytes);
static void DeallocateMapBlock(Address start, int size_in_bytes); static void DeallocateMapBlock(Address start, int size_in_bytes);
...@@ -295,9 +296,13 @@ class MarkCompactCollector : public AllStatic { ...@@ -295,9 +296,13 @@ class MarkCompactCollector : public AllStatic {
static int RelocateMapObject(HeapObject* obj); static int RelocateMapObject(HeapObject* obj);
// Relocates an old object. // Relocates an old object.
static int RelocateOldObject(HeapObject* obj); static int RelocateOldPointerObject(HeapObject* obj);
static int RelocateOldDataObject(HeapObject* obj);
// Relocates an immutable object in the code space. // Helper function.
static inline int RelocateOldNonCodeObject(HeapObject* obj, OldSpace* space);
// Relocates an object in the code space.
static int RelocateCodeObject(HeapObject* obj); static int RelocateCodeObject(HeapObject* obj);
// Copy a new object. // Copy a new object.
...@@ -322,11 +327,14 @@ class MarkCompactCollector : public AllStatic { ...@@ -322,11 +327,14 @@ class MarkCompactCollector : public AllStatic {
// Number of live objects in Heap::to_space_. // Number of live objects in Heap::to_space_.
static int live_young_objects_; static int live_young_objects_;
// Number of live objects in Heap::old_space_. // Number of live objects in Heap::old_pointer_space_.
static int live_old_objects_; static int live_old_pointer_objects_;
// Number of live objects in Heap::old_data_space_.
static int live_old_data_objects_;
// Number of live objects in Heap::code_space_. // Number of live objects in Heap::code_space_.
static int live_immutable_objects_; static int live_code_objects_;
// Number of live objects in Heap::map_space_. // Number of live objects in Heap::map_space_.
static int live_map_objects_; static int live_map_objects_;
......
...@@ -182,8 +182,8 @@ int main(int argc, char** argv) { ...@@ -182,8 +182,8 @@ int main(int argc, char** argv) {
i::Bootstrapper::NativesSourceLookup(i); i::Bootstrapper::NativesSourceLookup(i);
} }
} }
// Get rid of unreferenced scripts. // Get rid of unreferenced scripts with a global GC.
i::Heap::CollectGarbage(0, i::OLD_SPACE); i::Heap::CollectAllGarbage();
i::Serializer ser; i::Serializer ser;
ser.Serialize(); ser.Serialize();
char* str; char* str;
......
...@@ -931,7 +931,7 @@ Object* JSObject::Copy(PretenureFlag pretenure) { ...@@ -931,7 +931,7 @@ Object* JSObject::Copy(PretenureFlag pretenure) {
// Make the clone. // Make the clone.
Object* clone = (pretenure == NOT_TENURED) ? Object* clone = (pretenure == NOT_TENURED) ?
Heap::Allocate(map(), NEW_SPACE) : Heap::Allocate(map(), NEW_SPACE) :
Heap::Allocate(map(), OLD_SPACE); Heap::Allocate(map(), OLD_POINTER_SPACE);
if (clone->IsFailure()) return clone; if (clone->IsFailure()) return clone;
JSObject::cast(clone)->CopyBody(this); JSObject::cast(clone)->CopyBody(this);
......
...@@ -3327,6 +3327,8 @@ class Oddball: public HeapObject { ...@@ -3327,6 +3327,8 @@ class Oddball: public HeapObject {
// Proxy describes objects pointing from JavaScript to C structures. // Proxy describes objects pointing from JavaScript to C structures.
// Since they cannot contain references to JS HeapObjects they can be
// placed in old_data_space.
class Proxy: public HeapObject { class Proxy: public HeapObject {
public: public:
// [proxy]: field containing the address. // [proxy]: field containing the address.
......
...@@ -353,8 +353,8 @@ static const int kMmapFd = -1; ...@@ -353,8 +353,8 @@ static const int kMmapFd = -1;
static const int kMmapFdOffset = 0; static const int kMmapFdOffset = 0;
VirtualMemory::VirtualMemory(size_t size, void* address_hint) { VirtualMemory::VirtualMemory(size_t size) {
address_ = mmap(address_hint, size, PROT_NONE, address_ = mmap(NULL, size, PROT_NONE,
MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE,
kMmapFd, kMmapFdOffset); kMmapFd, kMmapFdOffset);
size_ = size; size_ = size;
......
...@@ -312,8 +312,8 @@ static const int kMmapFd = -1; ...@@ -312,8 +312,8 @@ static const int kMmapFd = -1;
static const int kMmapFdOffset = 0; static const int kMmapFdOffset = 0;
VirtualMemory::VirtualMemory(size_t size, void* address_hint) { VirtualMemory::VirtualMemory(size_t size) {
address_ = mmap(address_hint, size, PROT_NONE, address_ = mmap(NULL, size, PROT_NONE,
MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, MAP_PRIVATE | MAP_ANON | MAP_NORESERVE,
kMmapFd, kMmapFdOffset); kMmapFd, kMmapFdOffset);
size_ = size; size_ = size;
......
...@@ -1171,9 +1171,8 @@ bool VirtualMemory::IsReserved() { ...@@ -1171,9 +1171,8 @@ bool VirtualMemory::IsReserved() {
} }
VirtualMemory::VirtualMemory(size_t size, void* address_hint) { VirtualMemory::VirtualMemory(size_t size) {
address_ = address_ = VirtualAlloc(NULL, size, MEM_RESERVE, PAGE_NOACCESS);
VirtualAlloc(address_hint, size, MEM_RESERVE, PAGE_NOACCESS);
size_ = size; size_ = size;
} }
......
...@@ -222,7 +222,7 @@ class OS { ...@@ -222,7 +222,7 @@ class OS {
class VirtualMemory { class VirtualMemory {
public: public:
// Reserves virtual memory with size. // Reserves virtual memory with size.
VirtualMemory(size_t size, void* address_hint = 0); explicit VirtualMemory(size_t size);
~VirtualMemory(); ~VirtualMemory();
// Returns whether the memory has been reserved. // Returns whether the memory has been reserved.
......
...@@ -4536,8 +4536,8 @@ static Object* Runtime_DebugGetLoadedScripts(Arguments args) { ...@@ -4536,8 +4536,8 @@ static Object* Runtime_DebugGetLoadedScripts(Arguments args) {
// Perform two GCs to get rid of all unreferenced scripts. The first GC gets // Perform two GCs to get rid of all unreferenced scripts. The first GC gets
// rid of all the cached script wrappes and the second gets rid of the // rid of all the cached script wrappes and the second gets rid of the
// scripts which is no longer referenced. // scripts which is no longer referenced.
Heap::CollectGarbage(0, OLD_SPACE); Heap::CollectAllGarbage();
Heap::CollectGarbage(0, OLD_SPACE); Heap::CollectAllGarbage();
// Get the number of scripts. // Get the number of scripts.
int count; int count;
...@@ -4641,7 +4641,7 @@ static Object* Runtime_DebugReferencedBy(Arguments args) { ...@@ -4641,7 +4641,7 @@ static Object* Runtime_DebugReferencedBy(Arguments args) {
ASSERT(args.length() == 3); ASSERT(args.length() == 3);
// First perform a full GC in order to avoid references from dead objects. // First perform a full GC in order to avoid references from dead objects.
Heap::CollectGarbage(0, OLD_SPACE); Heap::CollectAllGarbage();
// Check parameters. // Check parameters.
CONVERT_CHECKED(JSObject, target, args[0]); CONVERT_CHECKED(JSObject, target, args[0]);
...@@ -4721,7 +4721,7 @@ static Object* Runtime_DebugConstructedBy(Arguments args) { ...@@ -4721,7 +4721,7 @@ static Object* Runtime_DebugConstructedBy(Arguments args) {
ASSERT(args.length() == 2); ASSERT(args.length() == 2);
// First perform a full GC in order to avoid dead objects. // First perform a full GC in order to avoid dead objects.
Heap::CollectGarbage(0, OLD_SPACE); Heap::CollectAllGarbage();
// Check parameters. // Check parameters.
CONVERT_CHECKED(JSFunction, constructor, args[0]); CONVERT_CHECKED(JSFunction, constructor, args[0]);
......
This diff is collapsed.
...@@ -312,10 +312,11 @@ class Deserializer: public ObjectVisitor { ...@@ -312,10 +312,11 @@ class Deserializer: public ObjectVisitor {
bool has_log_; // The file has log information. bool has_log_; // The file has log information.
// Resolve caches the following: // Resolve caches the following:
List<Page*> map_pages_; // All pages in the map space. List<Page*> map_pages_; // All pages in the map space.
List<Page*> old_pages_; // All pages in the old space. List<Page*> old_pointer_pages_; // All pages in the old pointer space.
List<Page*> old_data_pages_; // All pages in the old data space.
List<Page*> code_pages_; List<Page*> code_pages_;
List<Object*> large_objects_; // All known large objects. List<Object*> large_objects_; // All known large objects.
// A list of global handles at deserialization time. // A list of global handles at deserialization time.
List<Object**> global_handles_; List<Object**> global_handles_;
......
...@@ -86,14 +86,7 @@ Page* Page::next_page() { ...@@ -86,14 +86,7 @@ Page* Page::next_page() {
Address Page::AllocationTop() { Address Page::AllocationTop() {
PagedSpace* owner = MemoryAllocator::PageOwner(this); PagedSpace* owner = MemoryAllocator::PageOwner(this);
if (Heap::old_space() == owner) { return owner->PageAllocationTop(this);
return Heap::old_space()->PageAllocationTop(this);
} else if (Heap::code_space() == owner) {
return Heap::code_space()->PageAllocationTop(this);
} else {
ASSERT(Heap::map_space() == owner);
return Heap::map_space()->PageAllocationTop(this);
}
} }
...@@ -282,24 +275,6 @@ Object* PagedSpace::MCAllocateRaw(int size_in_bytes) { ...@@ -282,24 +275,6 @@ Object* PagedSpace::MCAllocateRaw(int size_in_bytes) {
} }
// Allocating during deserialization. Always roll to the next page in the
// space, which should be suitably expanded.
Object* PagedSpace::AllocateForDeserialization(int size_in_bytes) {
ASSERT(HasBeenSetup());
ASSERT_OBJECT_SIZE(size_in_bytes);
HeapObject* object = AllocateLinearly(&allocation_info_, size_in_bytes);
if (object != NULL) return object;
// The space should be pre-expanded.
Page* current_page = Page::FromAllocationTop(allocation_info_.top);
ASSERT(current_page->next_page()->is_valid());
object = AllocateInNextPage(current_page, size_in_bytes);
ASSERT(object != NULL);
return object;
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// LargeObjectChunk // LargeObjectChunk
......
...@@ -227,10 +227,10 @@ void MemoryAllocator::TearDown() { ...@@ -227,10 +227,10 @@ void MemoryAllocator::TearDown() {
void* MemoryAllocator::AllocateRawMemory(const size_t requested, void* MemoryAllocator::AllocateRawMemory(const size_t requested,
size_t* allocated, size_t* allocated,
bool executable) { Executability executable) {
if (size_ + static_cast<int>(requested) > capacity_) return NULL; if (size_ + static_cast<int>(requested) > capacity_) return NULL;
void* mem = OS::Allocate(requested, allocated, executable); void* mem = OS::Allocate(requested, allocated, executable == EXECUTABLE);
int alloced = *allocated; int alloced = *allocated;
size_ += alloced; size_ += alloced;
Counters::memory_allocated.Increment(alloced); Counters::memory_allocated.Increment(alloced);
...@@ -316,7 +316,7 @@ Page* MemoryAllocator::CommitPages(Address start, size_t size, ...@@ -316,7 +316,7 @@ Page* MemoryAllocator::CommitPages(Address start, size_t size,
ASSERT(initial_chunk_->address() <= start); ASSERT(initial_chunk_->address() <= start);
ASSERT(start + size <= reinterpret_cast<Address>(initial_chunk_->address()) ASSERT(start + size <= reinterpret_cast<Address>(initial_chunk_->address())
+ initial_chunk_->size()); + initial_chunk_->size());
if (!initial_chunk_->Commit(start, size, owner->executable())) { if (!initial_chunk_->Commit(start, size, owner->executable() == EXECUTABLE)) {
return Page::FromAddress(NULL); return Page::FromAddress(NULL);
} }
Counters::memory_allocated.Increment(size); Counters::memory_allocated.Increment(size);
...@@ -332,7 +332,7 @@ Page* MemoryAllocator::CommitPages(Address start, size_t size, ...@@ -332,7 +332,7 @@ Page* MemoryAllocator::CommitPages(Address start, size_t size,
bool MemoryAllocator::CommitBlock(Address start, bool MemoryAllocator::CommitBlock(Address start,
size_t size, size_t size,
bool executable) { Executability executable) {
ASSERT(start != NULL); ASSERT(start != NULL);
ASSERT(size > 0); ASSERT(size > 0);
ASSERT(initial_chunk_ != NULL); ASSERT(initial_chunk_ != NULL);
...@@ -474,7 +474,9 @@ void MemoryAllocator::ReportStatistics() { ...@@ -474,7 +474,9 @@ void MemoryAllocator::ReportStatistics() {
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// PagedSpace implementation // PagedSpace implementation
PagedSpace::PagedSpace(int max_capacity, AllocationSpace id, bool executable) PagedSpace::PagedSpace(int max_capacity,
AllocationSpace id,
Executability executable)
: Space(id, executable) { : Space(id, executable) {
max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize) max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize)
* Page::kObjectAreaSize; * Page::kObjectAreaSize;
...@@ -494,8 +496,11 @@ bool PagedSpace::Setup(Address start, size_t size) { ...@@ -494,8 +496,11 @@ bool PagedSpace::Setup(Address start, size_t size) {
int num_pages = 0; int num_pages = 0;
// Try to use the virtual memory range passed to us. If it is too small to // Try to use the virtual memory range passed to us. If it is too small to
// contain at least one page, ignore it and allocate instead. // contain at least one page, ignore it and allocate instead.
if (PagesInChunk(start, size) > 0) { int pages_in_chunk = PagesInChunk(start, size);
first_page_ = MemoryAllocator::CommitPages(start, size, this, &num_pages); if (pages_in_chunk > 0) {
first_page_ = MemoryAllocator::CommitPages(RoundUp(start, Page::kPageSize),
Page::kPageSize * pages_in_chunk,
this, &num_pages);
} else { } else {
int requested_pages = Min(MemoryAllocator::kPagesPerChunk, int requested_pages = Min(MemoryAllocator::kPagesPerChunk,
max_capacity_ / Page::kObjectAreaSize); max_capacity_ / Page::kObjectAreaSize);
...@@ -768,15 +773,14 @@ void PagedSpace::Print() { } ...@@ -768,15 +773,14 @@ void PagedSpace::Print() { }
NewSpace::NewSpace(int initial_semispace_capacity, NewSpace::NewSpace(int initial_semispace_capacity,
int maximum_semispace_capacity, int maximum_semispace_capacity,
AllocationSpace id, AllocationSpace id)
bool executable) : Space(id, NOT_EXECUTABLE) {
: Space(id, executable) {
ASSERT(initial_semispace_capacity <= maximum_semispace_capacity); ASSERT(initial_semispace_capacity <= maximum_semispace_capacity);
ASSERT(IsPowerOf2(maximum_semispace_capacity)); ASSERT(IsPowerOf2(maximum_semispace_capacity));
maximum_capacity_ = maximum_semispace_capacity; maximum_capacity_ = maximum_semispace_capacity;
capacity_ = initial_semispace_capacity; capacity_ = initial_semispace_capacity;
to_space_ = new SemiSpace(capacity_, maximum_capacity_, id, executable); to_space_ = new SemiSpace(capacity_, maximum_capacity_, id);
from_space_ = new SemiSpace(capacity_, maximum_capacity_, id, executable); from_space_ = new SemiSpace(capacity_, maximum_capacity_, id);
// Allocate and setup the histogram arrays if necessary. // Allocate and setup the histogram arrays if necessary.
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
...@@ -940,9 +944,8 @@ void NewSpace::Verify() { ...@@ -940,9 +944,8 @@ void NewSpace::Verify() {
SemiSpace::SemiSpace(int initial_capacity, SemiSpace::SemiSpace(int initial_capacity,
int maximum_capacity, int maximum_capacity,
AllocationSpace id, AllocationSpace id)
bool executable) : Space(id, NOT_EXECUTABLE), capacity_(initial_capacity),
: Space(id, executable), capacity_(initial_capacity),
maximum_capacity_(maximum_capacity), start_(NULL), age_mark_(NULL) { maximum_capacity_(maximum_capacity), start_(NULL), age_mark_(NULL) {
} }
...@@ -980,6 +983,9 @@ bool SemiSpace::Double() { ...@@ -980,6 +983,9 @@ bool SemiSpace::Double() {
#ifdef DEBUG #ifdef DEBUG
void SemiSpace::Print() { } void SemiSpace::Print() { }
void SemiSpace::Verify() { }
#endif #endif
...@@ -2190,7 +2196,7 @@ HeapObject* LargeObjectIterator::next() { ...@@ -2190,7 +2196,7 @@ HeapObject* LargeObjectIterator::next() {
LargeObjectChunk* LargeObjectChunk::New(int size_in_bytes, LargeObjectChunk* LargeObjectChunk::New(int size_in_bytes,
size_t* chunk_size, size_t* chunk_size,
bool executable) { Executability executable) {
size_t requested = ChunkSizeFor(size_in_bytes); size_t requested = ChunkSizeFor(size_in_bytes);
void* mem = MemoryAllocator::AllocateRawMemory(requested, void* mem = MemoryAllocator::AllocateRawMemory(requested,
chunk_size, chunk_size,
...@@ -2216,8 +2222,8 @@ int LargeObjectChunk::ChunkSizeFor(int size_in_bytes) { ...@@ -2216,8 +2222,8 @@ int LargeObjectChunk::ChunkSizeFor(int size_in_bytes) {
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// LargeObjectSpace // LargeObjectSpace
LargeObjectSpace::LargeObjectSpace(AllocationSpace id, bool executable) LargeObjectSpace::LargeObjectSpace(AllocationSpace id)
: Space(id, executable), : Space(id, NOT_EXECUTABLE), // Managed on a per-allocation basis
first_chunk_(NULL), first_chunk_(NULL),
size_(0), size_(0),
page_count_(0) {} page_count_(0) {}
...@@ -2245,11 +2251,12 @@ void LargeObjectSpace::TearDown() { ...@@ -2245,11 +2251,12 @@ void LargeObjectSpace::TearDown() {
Object* LargeObjectSpace::AllocateRawInternal(int requested_size, Object* LargeObjectSpace::AllocateRawInternal(int requested_size,
int object_size) { int object_size,
Executability executable) {
ASSERT(0 < object_size && object_size <= requested_size); ASSERT(0 < object_size && object_size <= requested_size);
size_t chunk_size; size_t chunk_size;
LargeObjectChunk* chunk = LargeObjectChunk* chunk =
LargeObjectChunk::New(requested_size, &chunk_size, executable()); LargeObjectChunk::New(requested_size, &chunk_size, executable);
if (chunk == NULL) { if (chunk == NULL) {
return Failure::RetryAfterGC(requested_size, identity()); return Failure::RetryAfterGC(requested_size, identity());
} }
...@@ -2280,15 +2287,28 @@ Object* LargeObjectSpace::AllocateRawInternal(int requested_size, ...@@ -2280,15 +2287,28 @@ Object* LargeObjectSpace::AllocateRawInternal(int requested_size,
} }
Object* LargeObjectSpace::AllocateRaw(int size_in_bytes) { Object* LargeObjectSpace::AllocateRawCode(int size_in_bytes) {
ASSERT(0 < size_in_bytes); ASSERT(0 < size_in_bytes);
return AllocateRawInternal(size_in_bytes, size_in_bytes); return AllocateRawInternal(size_in_bytes,
size_in_bytes,
EXECUTABLE);
} }
Object* LargeObjectSpace::AllocateRawFixedArray(int size_in_bytes) { Object* LargeObjectSpace::AllocateRawFixedArray(int size_in_bytes) {
ASSERT(0 < size_in_bytes);
int extra_rset_bytes = ExtraRSetBytesFor(size_in_bytes); int extra_rset_bytes = ExtraRSetBytesFor(size_in_bytes);
return AllocateRawInternal(size_in_bytes + extra_rset_bytes, size_in_bytes); return AllocateRawInternal(size_in_bytes + extra_rset_bytes,
size_in_bytes,
NOT_EXECUTABLE);
}
Object* LargeObjectSpace::AllocateRaw(int size_in_bytes) {
ASSERT(0 < size_in_bytes);
return AllocateRawInternal(size_in_bytes,
size_in_bytes,
NOT_EXECUTABLE);
} }
......
This diff is collapsed.
...@@ -461,10 +461,10 @@ THREADED_TEST(ScriptUsingStringResource) { ...@@ -461,10 +461,10 @@ THREADED_TEST(ScriptUsingStringResource) {
CHECK(source->IsExternal()); CHECK(source->IsExternal());
CHECK_EQ(resource, CHECK_EQ(resource,
static_cast<TestResource*>(source->GetExternalStringResource())); static_cast<TestResource*>(source->GetExternalStringResource()));
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE); v8::internal::Heap::CollectAllGarbage();
CHECK_EQ(0, TestResource::dispose_count); CHECK_EQ(0, TestResource::dispose_count);
} }
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE); v8::internal::Heap::CollectAllGarbage();
CHECK_EQ(1, TestResource::dispose_count); CHECK_EQ(1, TestResource::dispose_count);
} }
...@@ -481,10 +481,10 @@ THREADED_TEST(ScriptUsingAsciiStringResource) { ...@@ -481,10 +481,10 @@ THREADED_TEST(ScriptUsingAsciiStringResource) {
Local<Value> value = script->Run(); Local<Value> value = script->Run();
CHECK(value->IsNumber()); CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value()); CHECK_EQ(7, value->Int32Value());
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE); v8::internal::Heap::CollectAllGarbage();
CHECK_EQ(0, TestAsciiResource::dispose_count); CHECK_EQ(0, TestAsciiResource::dispose_count);
} }
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE); v8::internal::Heap::CollectAllGarbage();
CHECK_EQ(1, TestAsciiResource::dispose_count); CHECK_EQ(1, TestAsciiResource::dispose_count);
} }
...@@ -2455,7 +2455,7 @@ static v8::Handle<Value> ArgumentsTestCallback(const v8::Arguments& args) { ...@@ -2455,7 +2455,7 @@ static v8::Handle<Value> ArgumentsTestCallback(const v8::Arguments& args) {
CHECK_EQ(v8::Integer::New(3), args[2]); CHECK_EQ(v8::Integer::New(3), args[2]);
CHECK_EQ(v8::Undefined(), args[3]); CHECK_EQ(v8::Undefined(), args[3]);
v8::HandleScope scope; v8::HandleScope scope;
i::Heap::CollectGarbage(0, i::OLD_SPACE); i::Heap::CollectAllGarbage();
return v8::Undefined(); return v8::Undefined();
} }
...@@ -4694,7 +4694,7 @@ THREADED_TEST(LockUnlockLock) { ...@@ -4694,7 +4694,7 @@ THREADED_TEST(LockUnlockLock) {
static void EnsureNoSurvivingGlobalObjects() { static void EnsureNoSurvivingGlobalObjects() {
int count = 0; int count = 0;
v8::internal::Heap::CollectGarbage(0, v8::internal::OLD_SPACE); v8::internal::Heap::CollectAllGarbage();
v8::internal::HeapIterator it; v8::internal::HeapIterator it;
while (it.has_next()) { while (it.has_next()) {
v8::internal::HeapObject* object = it.next(); v8::internal::HeapObject* object = it.next();
......
...@@ -618,7 +618,7 @@ static void DebugEventBreakPointCollectGarbage( ...@@ -618,7 +618,7 @@ static void DebugEventBreakPointCollectGarbage(
Heap::CollectGarbage(0, v8::internal::NEW_SPACE); Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
} else { } else {
// Mark sweep (and perhaps compact). // Mark sweep (and perhaps compact).
Heap::CollectGarbage(0, v8::internal::OLD_SPACE); Heap::CollectAllGarbage();
} }
} }
} }
...@@ -960,7 +960,7 @@ static void CallAndGC(v8::Local<v8::Object> recv, v8::Local<v8::Function> f) { ...@@ -960,7 +960,7 @@ static void CallAndGC(v8::Local<v8::Object> recv, v8::Local<v8::Function> f) {
CHECK_EQ(2 + i * 3, break_point_hit_count); CHECK_EQ(2 + i * 3, break_point_hit_count);
// Mark sweep (and perhaps compact) and call function. // Mark sweep (and perhaps compact) and call function.
Heap::CollectGarbage(0, v8::internal::OLD_SPACE); Heap::CollectAllGarbage();
f->Call(recv, 0, NULL); f->Call(recv, 0, NULL);
CHECK_EQ(3 + i * 3, break_point_hit_count); CHECK_EQ(3 + i * 3, break_point_hit_count);
} }
......
...@@ -176,7 +176,8 @@ TEST(Tagging) { ...@@ -176,7 +176,8 @@ TEST(Tagging) {
CHECK(Failure::RetryAfterGC(12, NEW_SPACE)->IsFailure()); CHECK(Failure::RetryAfterGC(12, NEW_SPACE)->IsFailure());
CHECK_EQ(12, Failure::RetryAfterGC(12, NEW_SPACE)->requested()); CHECK_EQ(12, Failure::RetryAfterGC(12, NEW_SPACE)->requested());
CHECK_EQ(NEW_SPACE, Failure::RetryAfterGC(12, NEW_SPACE)->allocation_space()); CHECK_EQ(NEW_SPACE, Failure::RetryAfterGC(12, NEW_SPACE)->allocation_space());
CHECK_EQ(OLD_SPACE, Failure::RetryAfterGC(12, OLD_SPACE)->allocation_space()); CHECK_EQ(OLD_POINTER_SPACE,
Failure::RetryAfterGC(12, OLD_POINTER_SPACE)->allocation_space());
CHECK(Failure::Exception()->IsFailure()); CHECK(Failure::Exception()->IsFailure());
CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi()); CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi()); CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
...@@ -353,7 +354,7 @@ TEST(WeakGlobalHandlesMark) { ...@@ -353,7 +354,7 @@ TEST(WeakGlobalHandlesMark) {
Handle<Object> h1 = GlobalHandles::Create(i); Handle<Object> h1 = GlobalHandles::Create(i);
Handle<Object> h2 = GlobalHandles::Create(u); Handle<Object> h2 = GlobalHandles::Create(u);
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
CHECK(Heap::CollectGarbage(0, NEW_SPACE)); CHECK(Heap::CollectGarbage(0, NEW_SPACE));
// Make sure the object is promoted. // Make sure the object is promoted.
...@@ -363,7 +364,7 @@ TEST(WeakGlobalHandlesMark) { ...@@ -363,7 +364,7 @@ TEST(WeakGlobalHandlesMark) {
CHECK(!GlobalHandles::IsNearDeath(h1.location())); CHECK(!GlobalHandles::IsNearDeath(h1.location()));
CHECK(!GlobalHandles::IsNearDeath(h2.location())); CHECK(!GlobalHandles::IsNearDeath(h2.location()));
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
CHECK((*h1)->IsString()); CHECK((*h1)->IsString());
...@@ -400,7 +401,7 @@ TEST(DeleteWeakGlobalHandle) { ...@@ -400,7 +401,7 @@ TEST(DeleteWeakGlobalHandle) {
CHECK(!WeakPointerCleared); CHECK(!WeakPointerCleared);
// Mark-compact treats weak reference properly. // Mark-compact treats weak reference properly.
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
CHECK(WeakPointerCleared); CHECK(WeakPointerCleared);
} }
...@@ -751,11 +752,11 @@ TEST(Iteration) { ...@@ -751,11 +752,11 @@ TEST(Iteration) {
Handle<Object> objs[objs_count]; Handle<Object> objs[objs_count];
int next_objs_index = 0; int next_objs_index = 0;
// Allocate a JS array to OLD_SPACE and NEW_SPACE // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
objs[next_objs_index++] = Factory::NewJSArray(10); objs[next_objs_index++] = Factory::NewJSArray(10);
objs[next_objs_index++] = Factory::NewJSArray(10, TENURED); objs[next_objs_index++] = Factory::NewJSArray(10, TENURED);
// Allocate a small string to CODE_SPACE and NEW_SPACE // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
objs[next_objs_index++] = objs[next_objs_index++] =
Factory::NewStringFromAscii(CStrVector("abcdefghij")); Factory::NewStringFromAscii(CStrVector("abcdefghij"));
objs[next_objs_index++] = objs[next_objs_index++] =
......
...@@ -102,10 +102,10 @@ TEST(Promotion) { ...@@ -102,10 +102,10 @@ TEST(Promotion) {
CHECK(Heap::InSpace(*array, NEW_SPACE)); CHECK(Heap::InSpace(*array, NEW_SPACE));
// Call the m-c collector, so array becomes an old object. // Call the m-c collector, so array becomes an old object.
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// Array now sits in the old space // Array now sits in the old space
CHECK(Heap::InSpace(*array, OLD_SPACE)); CHECK(Heap::InSpace(*array, OLD_POINTER_SPACE));
} }
...@@ -120,7 +120,7 @@ TEST(NoPromotion) { ...@@ -120,7 +120,7 @@ TEST(NoPromotion) {
v8::HandleScope sc; v8::HandleScope sc;
// Do a mark compact GC to shrink the heap. // Do a mark compact GC to shrink the heap.
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// Allocate a big Fixed array in the new space. // Allocate a big Fixed array in the new space.
int size = (Heap::MaxHeapObjectSize() - Array::kHeaderSize) / kPointerSize; int size = (Heap::MaxHeapObjectSize() - Array::kHeaderSize) / kPointerSize;
...@@ -142,7 +142,7 @@ TEST(NoPromotion) { ...@@ -142,7 +142,7 @@ TEST(NoPromotion) {
} }
// Call mark compact GC, and it should pass. // Call mark compact GC, and it should pass.
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// array should not be promoted because the old space is full. // array should not be promoted because the old space is full.
CHECK(Heap::InSpace(*array, NEW_SPACE)); CHECK(Heap::InSpace(*array, NEW_SPACE));
...@@ -154,7 +154,7 @@ TEST(MarkCompactCollector) { ...@@ -154,7 +154,7 @@ TEST(MarkCompactCollector) {
v8::HandleScope sc; v8::HandleScope sc;
// call mark-compact when heap is empty // call mark-compact when heap is empty
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// keep allocating garbage in new space until it fails // keep allocating garbage in new space until it fails
const int ARRAY_SIZE = 100; const int ARRAY_SIZE = 100;
...@@ -190,7 +190,7 @@ TEST(MarkCompactCollector) { ...@@ -190,7 +190,7 @@ TEST(MarkCompactCollector) {
Top::context()->global()->SetProperty(func_name, function, NONE); Top::context()->global()->SetProperty(func_name, function, NONE);
JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function)); JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function));
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
func_name = String::cast(Heap::LookupAsciiSymbol("theFunction")); func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
CHECK(Top::context()->global()->HasLocalProperty(func_name)); CHECK(Top::context()->global()->HasLocalProperty(func_name));
...@@ -204,7 +204,7 @@ TEST(MarkCompactCollector) { ...@@ -204,7 +204,7 @@ TEST(MarkCompactCollector) {
String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot")); String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
obj->SetProperty(prop_name, Smi::FromInt(23), NONE); obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
obj_name = String::cast(Heap::LookupAsciiSymbol("theObject")); obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
CHECK(Top::context()->global()->HasLocalProperty(obj_name)); CHECK(Top::context()->global()->HasLocalProperty(obj_name));
...@@ -242,7 +242,7 @@ TEST(GCCallback) { ...@@ -242,7 +242,7 @@ TEST(GCCallback) {
CHECK_EQ(0, gc_starts); CHECK_EQ(0, gc_starts);
CHECK_EQ(gc_ends, gc_starts); CHECK_EQ(gc_ends, gc_starts);
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
CHECK_EQ(1, gc_starts); CHECK_EQ(1, gc_starts);
CHECK_EQ(gc_ends, gc_starts); CHECK_EQ(gc_ends, gc_starts);
} }
...@@ -292,7 +292,7 @@ TEST(ObjectGroups) { ...@@ -292,7 +292,7 @@ TEST(ObjectGroups) {
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s1.location()); GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s1.location());
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s2.location()); GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s2.location());
// Do a full GC // Do a full GC
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// All object should be alive. // All object should be alive.
CHECK_EQ(0, NumberOfWeakCalls); CHECK_EQ(0, NumberOfWeakCalls);
...@@ -308,7 +308,7 @@ TEST(ObjectGroups) { ...@@ -308,7 +308,7 @@ TEST(ObjectGroups) {
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s1.location()); GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s1.location());
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s2.location()); GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s2.location());
CHECK(Heap::CollectGarbage(0, OLD_SPACE)); CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
// All objects should be gone. 5 global handles in total. // All objects should be gone. 5 global handles in total.
CHECK_EQ(5, NumberOfWeakCalls); CHECK_EQ(5, NumberOfWeakCalls);
......
...@@ -101,7 +101,7 @@ TEST(MemoryAllocator) { ...@@ -101,7 +101,7 @@ TEST(MemoryAllocator) {
CHECK(Heap::ConfigureHeapDefault()); CHECK(Heap::ConfigureHeapDefault());
CHECK(MemoryAllocator::Setup(Heap::MaxCapacity())); CHECK(MemoryAllocator::Setup(Heap::MaxCapacity()));
OldSpace faked_space(Heap::MaxCapacity(), OLD_SPACE, false); OldSpace faked_space(Heap::MaxCapacity(), OLD_POINTER_SPACE, NOT_EXECUTABLE);
int total_pages = 0; int total_pages = 0;
int requested = 2; int requested = 2;
int allocated; int allocated;
...@@ -159,8 +159,7 @@ TEST(NewSpace) { ...@@ -159,8 +159,7 @@ TEST(NewSpace) {
NewSpace* s = new NewSpace(Heap::InitialSemiSpaceSize(), NewSpace* s = new NewSpace(Heap::InitialSemiSpaceSize(),
Heap::SemiSpaceSize(), Heap::SemiSpaceSize(),
NEW_SPACE, NEW_SPACE);
false);
CHECK(s != NULL); CHECK(s != NULL);
void* chunk = void* chunk =
...@@ -187,7 +186,9 @@ TEST(OldSpace) { ...@@ -187,7 +186,9 @@ TEST(OldSpace) {
CHECK(Heap::ConfigureHeapDefault()); CHECK(Heap::ConfigureHeapDefault());
CHECK(MemoryAllocator::Setup(Heap::MaxCapacity())); CHECK(MemoryAllocator::Setup(Heap::MaxCapacity()));
OldSpace* s = new OldSpace(Heap::OldGenerationSize(), OLD_SPACE, false); OldSpace* s = new OldSpace(Heap::OldGenerationSize(),
OLD_POINTER_SPACE,
NOT_EXECUTABLE);
CHECK(s != NULL); CHECK(s != NULL);
void* chunk = void* chunk =
...@@ -213,7 +214,7 @@ TEST(LargeObjectSpace) { ...@@ -213,7 +214,7 @@ TEST(LargeObjectSpace) {
CHECK(Heap::ConfigureHeapDefault()); CHECK(Heap::ConfigureHeapDefault());
MemoryAllocator::Setup(Heap::MaxCapacity()); MemoryAllocator::Setup(Heap::MaxCapacity());
LargeObjectSpace* lo = new LargeObjectSpace(LO_SPACE, false); LargeObjectSpace* lo = new LargeObjectSpace(LO_SPACE);
CHECK(lo != NULL); CHECK(lo != NULL);
CHECK(lo->Setup()); CHECK(lo->Setup());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment