Commit bb7f8fbe authored by dimich@chromium.org's avatar dimich@chromium.org

Cleanup of Isolate usage (guided by SunSpider profiling)

Review URL: http://codereview.chromium.org/6718023

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7311 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 41f30cac
......@@ -668,9 +668,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
if (!ctx->IsHeapObject()) return false;
Map* map = SafeMap(ctx);
if (!(map == HEAP->raw_unchecked_context_map() ||
map == HEAP->raw_unchecked_catch_context_map() ||
map == HEAP->raw_unchecked_global_context_map())) {
Heap* heap = map->heap();
if (!(map == heap->raw_unchecked_context_map() ||
map == heap->raw_unchecked_catch_context_map() ||
map == heap->raw_unchecked_global_context_map())) {
return false;
}
......
......@@ -1740,7 +1740,7 @@ void NewSpace::RecordPromotion(HeapObject* obj) {
// -----------------------------------------------------------------------------
// Free lists for old object spaces implementation
void FreeListNode::set_size(int size_in_bytes) {
void FreeListNode::set_size(Heap* heap, int size_in_bytes) {
ASSERT(size_in_bytes > 0);
ASSERT(IsAligned(size_in_bytes, kPointerSize));
......@@ -1752,14 +1752,14 @@ void FreeListNode::set_size(int size_in_bytes) {
// field and a next pointer, we give it a filler map that gives it the
// correct size.
if (size_in_bytes > ByteArray::kHeaderSize) {
set_map(HEAP->raw_unchecked_byte_array_map());
set_map(heap->raw_unchecked_byte_array_map());
// Can't use ByteArray::cast because it fails during deserialization.
ByteArray* this_as_byte_array = reinterpret_cast<ByteArray*>(this);
this_as_byte_array->set_length(ByteArray::LengthFor(size_in_bytes));
} else if (size_in_bytes == kPointerSize) {
set_map(HEAP->raw_unchecked_one_pointer_filler_map());
set_map(heap->raw_unchecked_one_pointer_filler_map());
} else if (size_in_bytes == 2 * kPointerSize) {
set_map(HEAP->raw_unchecked_two_pointer_filler_map());
set_map(heap->raw_unchecked_two_pointer_filler_map());
} else {
UNREACHABLE();
}
......@@ -1768,9 +1768,9 @@ void FreeListNode::set_size(int size_in_bytes) {
}
Address FreeListNode::next() {
Address FreeListNode::next(Heap* heap) {
ASSERT(IsFreeListNode(this));
if (map() == HEAP->raw_unchecked_byte_array_map()) {
if (map() == heap->raw_unchecked_byte_array_map()) {
ASSERT(Size() >= kNextOffset + kPointerSize);
return Memory::Address_at(address() + kNextOffset);
} else {
......@@ -1779,9 +1779,9 @@ Address FreeListNode::next() {
}
void FreeListNode::set_next(Address next) {
void FreeListNode::set_next(Heap* heap, Address next) {
ASSERT(IsFreeListNode(this));
if (map() == HEAP->raw_unchecked_byte_array_map()) {
if (map() == heap->raw_unchecked_byte_array_map()) {
ASSERT(Size() >= kNextOffset + kPointerSize);
Memory::Address_at(address() + kNextOffset) = next;
} else {
......@@ -1790,7 +1790,9 @@ void FreeListNode::set_next(Address next) {
}
OldSpaceFreeList::OldSpaceFreeList(AllocationSpace owner) : owner_(owner) {
OldSpaceFreeList::OldSpaceFreeList(Heap* heap, AllocationSpace owner)
: heap_(heap),
owner_(owner) {
Reset();
}
......@@ -1825,7 +1827,7 @@ int OldSpaceFreeList::Free(Address start, int size_in_bytes) {
Isolate::Current()->memory_allocator()->ZapBlock(start, size_in_bytes);
#endif
FreeListNode* node = FreeListNode::FromAddress(start);
node->set_size(size_in_bytes);
node->set_size(heap_, size_in_bytes);
// We don't use the freelists in compacting mode. This makes it more like a
// GC that only has mark-sweep-compact and doesn't have a mark-sweep
......@@ -1843,7 +1845,7 @@ int OldSpaceFreeList::Free(Address start, int size_in_bytes) {
// Insert other blocks at the head of an exact free list.
int index = size_in_bytes >> kPointerSizeLog2;
node->set_next(free_[index].head_node_);
node->set_next(heap_, free_[index].head_node_);
free_[index].head_node_ = node->address();
available_ += size_in_bytes;
needs_rebuild_ = true;
......@@ -1862,7 +1864,8 @@ MaybeObject* OldSpaceFreeList::Allocate(int size_in_bytes, int* wasted_bytes) {
if (free_[index].head_node_ != NULL) {
FreeListNode* node = FreeListNode::FromAddress(free_[index].head_node_);
// If this was the last block of its size, remove the size.
if ((free_[index].head_node_ = node->next()) == NULL) RemoveSize(index);
if ((free_[index].head_node_ = node->next(heap_)) == NULL)
RemoveSize(index);
available_ -= size_in_bytes;
*wasted_bytes = 0;
ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep.
......@@ -1891,33 +1894,33 @@ MaybeObject* OldSpaceFreeList::Allocate(int size_in_bytes, int* wasted_bytes) {
finger_ = prev;
free_[prev].next_size_ = rem;
// If this was the last block of size cur, remove the size.
if ((free_[cur].head_node_ = cur_node->next()) == NULL) {
if ((free_[cur].head_node_ = cur_node->next(heap_)) == NULL) {
free_[rem].next_size_ = free_[cur].next_size_;
} else {
free_[rem].next_size_ = cur;
}
// Add the remainder block.
rem_node->set_size(rem_bytes);
rem_node->set_next(free_[rem].head_node_);
rem_node->set_size(heap_, rem_bytes);
rem_node->set_next(heap_, free_[rem].head_node_);
free_[rem].head_node_ = rem_node->address();
} else {
// If this was the last block of size cur, remove the size.
if ((free_[cur].head_node_ = cur_node->next()) == NULL) {
if ((free_[cur].head_node_ = cur_node->next(heap_)) == NULL) {
finger_ = prev;
free_[prev].next_size_ = free_[cur].next_size_;
}
if (rem_bytes < kMinBlockSize) {
// Too-small remainder is wasted.
rem_node->set_size(rem_bytes);
rem_node->set_size(heap_, rem_bytes);
available_ -= size_in_bytes + rem_bytes;
*wasted_bytes = rem_bytes;
return cur_node;
}
// Add the remainder block and, if needed, insert its size.
rem_node->set_size(rem_bytes);
rem_node->set_next(free_[rem].head_node_);
rem_node->set_size(heap_, rem_bytes);
rem_node->set_next(heap_, free_[rem].head_node_);
free_[rem].head_node_ = rem_node->address();
if (rem_node->next() == NULL) InsertSize(rem);
if (rem_node->next(heap_) == NULL) InsertSize(rem);
}
available_ -= size_in_bytes;
*wasted_bytes = 0;
......@@ -1930,7 +1933,7 @@ void OldSpaceFreeList::MarkNodes() {
Address cur_addr = free_[i].head_node_;
while (cur_addr != NULL) {
FreeListNode* cur_node = FreeListNode::FromAddress(cur_addr);
cur_addr = cur_node->next();
cur_addr = cur_node->next(heap_);
cur_node->SetMark();
}
}
......@@ -1944,7 +1947,7 @@ bool OldSpaceFreeList::Contains(FreeListNode* node) {
while (cur_addr != NULL) {
FreeListNode* cur_node = FreeListNode::FromAddress(cur_addr);
if (cur_node == node) return true;
cur_addr = cur_node->next();
cur_addr = cur_node->next(heap_);
}
}
return false;
......@@ -1952,8 +1955,10 @@ bool OldSpaceFreeList::Contains(FreeListNode* node) {
#endif
FixedSizeFreeList::FixedSizeFreeList(AllocationSpace owner, int object_size)
: owner_(owner), object_size_(object_size) {
FixedSizeFreeList::FixedSizeFreeList(Heap* heap,
AllocationSpace owner,
int object_size)
: heap_(heap), owner_(owner), object_size_(object_size) {
Reset();
}
......@@ -1971,12 +1976,12 @@ void FixedSizeFreeList::Free(Address start) {
// We only use the freelists with mark-sweep.
ASSERT(!HEAP->mark_compact_collector()->IsCompacting());
FreeListNode* node = FreeListNode::FromAddress(start);
node->set_size(object_size_);
node->set_next(NULL);
node->set_size(heap_, object_size_);
node->set_next(heap_, NULL);
if (head_ == NULL) {
tail_ = head_ = node->address();
} else {
FreeListNode::FromAddress(tail_)->set_next(node->address());
FreeListNode::FromAddress(tail_)->set_next(heap_, node->address());
tail_ = node->address();
}
available_ += object_size_;
......@@ -1990,7 +1995,7 @@ MaybeObject* FixedSizeFreeList::Allocate() {
ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep.
FreeListNode* node = FreeListNode::FromAddress(head_);
head_ = node->next();
head_ = node->next(heap_);
available_ -= object_size_;
return node;
}
......@@ -2000,7 +2005,7 @@ void FixedSizeFreeList::MarkNodes() {
Address cur_addr = head_;
while (cur_addr != NULL && cur_addr != tail_) {
FreeListNode* cur_node = FreeListNode::FromAddress(cur_addr);
cur_addr = cur_node->next();
cur_addr = cur_node->next(heap_);
cur_node->SetMark();
}
}
......
......@@ -1721,11 +1721,11 @@ class FreeListNode: public HeapObject {
// function also writes a map to the first word of the block so that it
// looks like a heap object to the garbage collector and heap iteration
// functions.
void set_size(int size_in_bytes);
void set_size(Heap* heap, int size_in_bytes);
// Accessors for the next field.
inline Address next();
inline void set_next(Address next);
inline Address next(Heap* heap);
inline void set_next(Heap* heap, Address next);
private:
static const int kNextOffset = POINTER_SIZE_ALIGN(ByteArray::kHeaderSize);
......@@ -1737,7 +1737,7 @@ class FreeListNode: public HeapObject {
// The free list for the old space.
class OldSpaceFreeList BASE_EMBEDDED {
public:
explicit OldSpaceFreeList(AllocationSpace owner);
OldSpaceFreeList(Heap* heap, AllocationSpace owner);
// Clear the free list.
void Reset();
......@@ -1767,6 +1767,8 @@ class OldSpaceFreeList BASE_EMBEDDED {
static const int kMinBlockSize = 2 * kPointerSize;
static const int kMaxBlockSize = Page::kMaxHeapObjectSize;
Heap* heap_;
// The identity of the owning space, for building allocation Failure
// objects.
AllocationSpace owner_;
......@@ -1841,7 +1843,7 @@ class OldSpaceFreeList BASE_EMBEDDED {
// The free list for the map space.
class FixedSizeFreeList BASE_EMBEDDED {
public:
FixedSizeFreeList(AllocationSpace owner, int object_size);
FixedSizeFreeList(Heap* heap, AllocationSpace owner, int object_size);
// Clear the free list.
void Reset();
......@@ -1862,6 +1864,9 @@ class FixedSizeFreeList BASE_EMBEDDED {
void MarkNodes();
private:
Heap* heap_;
// Available bytes on the free list.
intptr_t available_;
......@@ -1893,7 +1898,8 @@ class OldSpace : public PagedSpace {
intptr_t max_capacity,
AllocationSpace id,
Executability executable)
: PagedSpace(heap, max_capacity, id, executable), free_list_(id) {
: PagedSpace(heap, max_capacity, id, executable),
free_list_(heap, id) {
page_extra_ = 0;
}
......@@ -1970,7 +1976,7 @@ class FixedSpace : public PagedSpace {
: PagedSpace(heap, max_capacity, id, NOT_EXECUTABLE),
object_size_in_bytes_(object_size_in_bytes),
name_(name),
free_list_(id, object_size_in_bytes) {
free_list_(heap, id, object_size_in_bytes) {
page_extra_ = Page::kObjectAreaSize % object_size_in_bytes;
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment