Commit 8dd405e3 authored by Ali Ijaz Sheikh's avatar Ali Ijaz Sheikh Committed by Commit Bot

[heap] introduce SpaceWithLinearArea class

NewSpace and OldSpace have linear allocation areas, but presently the
implementation doesn't share any code and there are subtle differences.
This CL introduces a superclass 'SpaceWithLinearArea' that will be used
to refactor and share code.

Change-Id: I741e6a6ebb9e75c111287214fd1f555fba62c452
Reviewed-on: https://chromium-review.googlesource.com/809504Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Ali Ijaz Sheikh <ofrobots@google.com>
Cr-Commit-Position: refs/heads/master@{#49890}
parent f9aacf15
......@@ -1374,14 +1374,11 @@ intptr_t Space::GetNextInlineAllocationStepSize() {
PagedSpace::PagedSpace(Heap* heap, AllocationSpace space,
Executability executable)
: Space(heap, space, executable),
: SpaceWithLinearArea(heap, space, executable),
anchor_(this),
free_list_(this),
top_on_previous_step_(0) {
free_list_(this) {
area_size_ = MemoryAllocator::PageAreaSize(space);
accounting_stats_.Clear();
allocation_info_.Reset(nullptr, nullptr);
}
......@@ -1641,6 +1638,7 @@ Address PagedSpace::ComputeLimit(Address start, Address end,
}
}
// TODO(ofrobots): refactor this code into SpaceWithLinearArea
void PagedSpace::StartNextInlineAllocationStep() {
if (!allocation_observers_paused_ && SupportsInlineAllocation()) {
top_on_previous_step_ = allocation_observers_.empty() ? 0 : top();
......@@ -2150,7 +2148,7 @@ bool NewSpace::EnsureAllocation(int size_in_bytes,
return true;
}
// TODO(ofrobots): refactor this code into SpaceWithLinearArea
void NewSpace::StartNextInlineAllocationStep() {
if (!allocation_observers_paused_) {
top_on_previous_step_ =
......@@ -2159,26 +2157,13 @@ void NewSpace::StartNextInlineAllocationStep() {
}
}
// TODO(ofrobots): refactor into SpaceWithLinearArea
void NewSpace::AddAllocationObserver(AllocationObserver* observer) {
InlineAllocationStep(top(), top(), nullptr, 0);
Space::AddAllocationObserver(observer);
}
// TODO(ofrobots): refactor into SpaceWithLinearArea
void PagedSpace::AddAllocationObserver(AllocationObserver* observer) {
void SpaceWithLinearArea::AddAllocationObserver(AllocationObserver* observer) {
InlineAllocationStep(top(), top(), nullptr, 0);
Space::AddAllocationObserver(observer);
}
// TODO(ofrobots): refactor into SpaceWithLinearArea
void NewSpace::RemoveAllocationObserver(AllocationObserver* observer) {
InlineAllocationStep(top(), top(), nullptr, 0);
Space::RemoveAllocationObserver(observer);
}
// TODO(ofrobots): refactor into SpaceWithLinearArea
void PagedSpace::RemoveAllocationObserver(AllocationObserver* observer) {
void SpaceWithLinearArea::RemoveAllocationObserver(
AllocationObserver* observer) {
InlineAllocationStep(top(), top(), nullptr, 0);
Space::RemoveAllocationObserver(observer);
}
......@@ -2193,43 +2178,22 @@ void NewSpace::PauseAllocationObservers() {
void PagedSpace::PauseAllocationObservers() {
// Do a step to account for memory allocated so far.
// TODO(ofrobots): Refactor into SpaceWithLinearArea. Note subtle difference
// from NewSpace version.
InlineAllocationStep(top(), nullptr, nullptr, 0);
Space::PauseAllocationObservers();
top_on_previous_step_ = 0;
}
void NewSpace::ResumeAllocationObservers() {
DCHECK_NULL(top_on_previous_step_);
Space::ResumeAllocationObservers();
StartNextInlineAllocationStep();
}
// TODO(ofrobots): refactor into SpaceWithLinearArea
void PagedSpace::ResumeAllocationObservers() {
void SpaceWithLinearArea::ResumeAllocationObservers() {
DCHECK_NULL(top_on_previous_step_);
Space::ResumeAllocationObservers();
StartNextInlineAllocationStep();
}
// TODO(ofrobots): refactor into SpaceWithLinearArea
void PagedSpace::InlineAllocationStep(Address top, Address new_top,
Address soon_object, size_t size) {
if (top_on_previous_step_) {
if (top < top_on_previous_step_) {
// Generated code decreased the top pointer to do folded allocations.
DCHECK_NOT_NULL(top);
DCHECK_EQ(Page::FromAllocationAreaAddress(top),
Page::FromAllocationAreaAddress(top_on_previous_step_));
top_on_previous_step_ = top;
}
int bytes_allocated = static_cast<int>(top - top_on_previous_step_);
AllocationStep(bytes_allocated, soon_object, static_cast<int>(size));
top_on_previous_step_ = new_top;
}
}
void NewSpace::InlineAllocationStep(Address top, Address new_top,
Address soon_object, size_t size) {
void SpaceWithLinearArea::InlineAllocationStep(Address top, Address new_top,
Address soon_object,
size_t size) {
if (top_on_previous_step_) {
if (top < top_on_previous_step_) {
// Generated code decreased the top pointer to do folded allocations.
......
......@@ -1966,7 +1966,50 @@ class LocalAllocationBuffer {
AllocationInfo allocation_info_;
};
class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
class SpaceWithLinearArea : public Space {
public:
SpaceWithLinearArea(Heap* heap, AllocationSpace id, Executability executable)
: Space(heap, id, executable), top_on_previous_step_(0) {
allocation_info_.Reset(nullptr, nullptr);
}
// Returns the allocation pointer in this space.
Address top() { return allocation_info_.top(); }
Address limit() { return allocation_info_.limit(); }
// The allocation top address.
Address* allocation_top_address() { return allocation_info_.top_address(); }
// The allocation limit address.
Address* allocation_limit_address() {
return allocation_info_.limit_address();
}
// If we are doing inline allocation in steps, this method performs the 'step'
// operation. top is the memory address of the bump pointer at the last
// inline allocation (i.e. it determines the numbers of bytes actually
// allocated since the last step.) new_top is the address of the bump pointer
// where the next byte is going to be allocated from. top and new_top may be
// different when we cross a page boundary or reset the space.
// TODO(ofrobots): clarify the precise difference between this and
// Space::AllocationStep.
void InlineAllocationStep(Address top, Address new_top, Address soon_object,
size_t size);
V8_EXPORT_PRIVATE void AddAllocationObserver(
AllocationObserver* observer) override;
V8_EXPORT_PRIVATE void RemoveAllocationObserver(
AllocationObserver* observer) override;
V8_EXPORT_PRIVATE void ResumeAllocationObservers() override;
protected:
// TODO(ofrobots): make these private after refactoring is complete.
AllocationInfo allocation_info_;
Address top_on_previous_step_;
};
class V8_EXPORT_PRIVATE PagedSpace
: NON_EXPORTED_BASE(public SpaceWithLinearArea) {
public:
typedef PageIterator iterator;
......@@ -2038,18 +2081,6 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
// due to being too small to use for allocation.
virtual size_t Waste() { return free_list_.wasted_bytes(); }
// Returns the allocation pointer in this space.
Address top() { return allocation_info_.top(); }
Address limit() { return allocation_info_.limit(); }
// The allocation top address.
Address* allocation_top_address() { return allocation_info_.top_address(); }
// The allocation limit address.
Address* allocation_limit_address() {
return allocation_info_.limit_address();
}
enum UpdateSkipList { UPDATE_SKIP_LIST, IGNORE_SKIP_LIST };
// Allocate the requested number of bytes in the space if possible, return a
......@@ -2090,13 +2121,7 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
void ResetFreeList() { free_list_.Reset(); }
void AddAllocationObserver(AllocationObserver* observer) override;
void RemoveAllocationObserver(AllocationObserver* observer) override;
void PauseAllocationObservers() override;
void ResumeAllocationObservers() override;
void InlineAllocationStep(Address top, Address new_top, Address soon_object,
size_t size);
// Empty space allocation info, returning unused area to free list.
void EmptyAllocationInfo();
......@@ -2271,14 +2296,9 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
// The space's free list.
FreeList free_list_;
// Normal allocation information.
AllocationInfo allocation_info_;
// Mutex guarding any concurrent access to the space.
base::Mutex space_mutex_;
Address top_on_previous_step_;
friend class IncrementalMarking;
friend class MarkCompactCollector;
......@@ -2489,13 +2509,12 @@ class SemiSpaceIterator : public ObjectIterator {
// The new space consists of a contiguous pair of semispaces. It simply
// forwards most functions to the appropriate semispace.
class NewSpace : public Space {
class NewSpace : public SpaceWithLinearArea {
public:
typedef PageIterator iterator;
explicit NewSpace(Heap* heap)
: Space(heap, NEW_SPACE, NOT_EXECUTABLE),
top_on_previous_step_(0),
: SpaceWithLinearArea(heap, NEW_SPACE, NOT_EXECUTABLE),
to_space_(heap, kToSpace),
from_space_(heap, kFromSpace),
reservation_(),
......@@ -2620,18 +2639,6 @@ class NewSpace : public Space {
return to_space_.minimum_capacity();
}
// Return the address of the allocation pointer in the active semispace.
Address top() {
DCHECK(to_space_.current_page()->ContainsLimit(allocation_info_.top()));
return allocation_info_.top();
}
// Return the address of the allocation pointer limit in the active semispace.
Address limit() {
DCHECK(to_space_.current_page()->ContainsLimit(allocation_info_.limit()));
return allocation_info_.limit();
}
void ResetOriginalTop() {
DCHECK_GE(top(), original_top());
DCHECK_LE(top(), original_limit());
......@@ -2649,14 +2656,6 @@ class NewSpace : public Space {
// Set the age mark in the active semispace.
void set_age_mark(Address mark) { to_space_.set_age_mark(mark); }
// The allocation top and limit address.
Address* allocation_top_address() { return allocation_info_.top_address(); }
// The allocation limit address.
Address* allocation_limit_address() {
return allocation_info_.limit_address();
}
MUST_USE_RESULT INLINE(AllocationResult AllocateRawAligned(
int size_in_bytes, AllocationAlignment alignment));
......@@ -2746,10 +2745,7 @@ class NewSpace : public Space {
SemiSpace* active_space() { return &to_space_; }
void AddAllocationObserver(AllocationObserver* observer) override;
void RemoveAllocationObserver(AllocationObserver* observer) override;
void PauseAllocationObservers() override;
void ResumeAllocationObservers() override;
iterator begin() { return to_space_.begin(); }
iterator end() { return to_space_.end(); }
......@@ -2765,10 +2761,6 @@ class NewSpace : public Space {
base::Mutex mutex_;
// Allocation pointer and limit for normal allocation and allocation during
// mark-compact collection.
AllocationInfo allocation_info_;
Address top_on_previous_step_;
// The top and the limit at the time of setting the allocation info.
// These values can be accessed by background tasks.
base::AtomicValue<Address> original_top_;
......@@ -2784,14 +2776,6 @@ class NewSpace : public Space {
bool EnsureAllocation(int size_in_bytes, AllocationAlignment alignment);
// If we are doing inline allocation in steps, this method performs the 'step'
// operation. top is the memory address of the bump pointer at the last
// inline allocation (i.e. it determines the numbers of bytes actually
// allocated since the last step.) new_top is the address of the bump pointer
// where the next byte is going to be allocated from. top and new_top may be
// different when we cross a page boundary or reset the space.
void InlineAllocationStep(Address top, Address new_top, Address soon_object,
size_t size);
void StartNextInlineAllocationStep() override;
friend class SemiSpaceIterator;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment