Commit 985afadf authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Cleanup heap.h

- Move implemnetation details into -inl.h or .cc file
- Inline NewSpace::Contains manually

R=ulan@chromium.org

Review-Url: https://codereview.chromium.org/2316513002
Cr-Commit-Position: refs/heads/master@{#39196}
parent 51a3ffd9
......@@ -145,6 +145,67 @@ WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
PagedSpace* Heap::paged_space(int idx) {
switch (idx) {
case OLD_SPACE:
return old_space();
case MAP_SPACE:
return map_space();
case CODE_SPACE:
return code_space();
case NEW_SPACE:
case LO_SPACE:
UNREACHABLE();
}
return NULL;
}
Space* Heap::space(int idx) {
switch (idx) {
case NEW_SPACE:
return new_space();
case LO_SPACE:
return lo_space();
default:
return paged_space(idx);
}
}
Address* Heap::NewSpaceAllocationTopAddress() {
return new_space_.allocation_top_address();
}
Address* Heap::NewSpaceAllocationLimitAddress() {
return new_space_.allocation_limit_address();
}
Address* Heap::OldSpaceAllocationTopAddress() {
return old_space_->allocation_top_address();
}
Address* Heap::OldSpaceAllocationLimitAddress() {
return old_space_->allocation_limit_address();
}
bool Heap::HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
if (HighMemoryPressure()) return true;
return false;
}
void Heap::UpdateNewSpaceAllocationCounter() {
new_space_allocation_counter_ = NewSpaceAllocationCounter();
}
size_t Heap::NewSpaceAllocationCounter() {
return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
}
template <>
bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
......@@ -413,7 +474,10 @@ void Heap::FinalizeExternalString(String* string) {
bool Heap::InNewSpace(Object* object) {
bool result = new_space_.Contains(object);
// Inlined check from NewSpace::Contains.
bool result =
object->IsHeapObject() &&
Page::FromAddress(HeapObject::cast(object)->address())->InNewSpace();
DCHECK(!result || // Either not in new space
gc_state_ != NOT_IN_GC || // ... or in the middle of GC
InToSpace(object)); // ... or in to-space (where we allocate).
......@@ -421,12 +485,16 @@ bool Heap::InNewSpace(Object* object) {
}
bool Heap::InFromSpace(Object* object) {
return new_space_.FromSpaceContains(object);
return object->IsHeapObject() &&
MemoryChunk::FromAddress(HeapObject::cast(object)->address())
->IsFlagSet(Page::IN_FROM_SPACE);
}
bool Heap::InToSpace(Object* object) {
return new_space_.ToSpaceContains(object);
return object->IsHeapObject() &&
MemoryChunk::FromAddress(HeapObject::cast(object)->address())
->IsFlagSet(Page::IN_TO_SPACE);
}
bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); }
......
......@@ -423,6 +423,7 @@ void Heap::IncrementDeferredCount(v8::Isolate::UseCounterFeature feature) {
deferred_counters_[feature]++;
}
bool Heap::UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
void Heap::GarbageCollectionPrologue() {
{
......@@ -5215,6 +5216,14 @@ intptr_t Heap::CalculateOldGenerationAllocationLimit(double factor,
return Min(limit, halfway_to_the_max);
}
intptr_t Heap::MinimumAllocationLimitGrowingStep() {
const double kRegularAllocationLimitGrowingStep = 8;
const double kLowMemoryAllocationLimitGrowingStep = 2;
intptr_t limit = (Page::kPageSize > MB ? Page::kPageSize : MB);
return limit * (ShouldOptimizeForMemoryUsage()
? kLowMemoryAllocationLimitGrowingStep
: kRegularAllocationLimitGrowingStep);
}
void Heap::SetOldGenerationAllocationLimit(intptr_t old_gen_size,
double gc_speed,
......
......@@ -324,10 +324,13 @@ class HeapObjectsFilter;
class HeapStats;
class HistogramTimer;
class Isolate;
class MemoryAllocator;
class MemoryReducer;
class ObjectStats;
class PagedSpace;
class Scavenger;
class ScavengeJob;
class Space;
class StoreBuffer;
class WeakObjectRetainer;
......@@ -589,19 +592,10 @@ class Heap {
bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
Address* NewSpaceAllocationTopAddress() {
return new_space_.allocation_top_address();
}
Address* NewSpaceAllocationLimitAddress() {
return new_space_.allocation_limit_address();
}
Address* OldSpaceAllocationTopAddress() {
return old_space_->allocation_top_address();
}
Address* OldSpaceAllocationLimitAddress() {
return old_space_->allocation_limit_address();
}
inline Address* NewSpaceAllocationTopAddress();
inline Address* NewSpaceAllocationLimitAddress();
inline Address* OldSpaceAllocationTopAddress();
inline Address* OldSpaceAllocationLimitAddress();
bool CanExpandOldGeneration(int size) {
if (force_oom_) return false;
......@@ -730,17 +724,7 @@ class Heap {
// Check new space expansion criteria and expand semispaces if it was hit.
void CheckNewSpaceExpansionCriteria();
inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
if (HighMemoryPressure()) return true;
return false;
}
inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit);
void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
......@@ -885,31 +869,8 @@ class Heap {
MapSpace* map_space() { return map_space_; }
LargeObjectSpace* lo_space() { return lo_space_; }
PagedSpace* paged_space(int idx) {
switch (idx) {
case OLD_SPACE:
return old_space();
case MAP_SPACE:
return map_space();
case CODE_SPACE:
return code_space();
case NEW_SPACE:
case LO_SPACE:
UNREACHABLE();
}
return NULL;
}
Space* space(int idx) {
switch (idx) {
case NEW_SPACE:
return new_space();
case LO_SPACE:
return lo_space();
default:
return paged_space(idx);
}
}
inline PagedSpace* paged_space(int idx);
inline Space* space(int idx);
// Returns name of the space.
const char* GetSpaceName(int idx);
......@@ -1278,13 +1239,9 @@ class Heap {
return static_cast<intptr_t>(total);
}
void UpdateNewSpaceAllocationCounter() {
new_space_allocation_counter_ = NewSpaceAllocationCounter();
}
inline void UpdateNewSpaceAllocationCounter();
size_t NewSpaceAllocationCounter() {
return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
}
inline size_t NewSpaceAllocationCounter();
// This should be used only for testing.
void set_new_space_allocation_counter(size_t new_value) {
......@@ -1602,7 +1559,7 @@ class Heap {
void EnsureFromSpaceIsCommitted();
// Uncommit unused semi space.
bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
bool UncommitFromSpace();
// Fill in bogus values in from space
void ZapFromSpace();
......@@ -1763,14 +1720,7 @@ class Heap {
void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
double mutator_speed);
intptr_t MinimumAllocationLimitGrowingStep() {
const double kRegularAllocationLimitGrowingStep = 8;
const double kLowMemoryAllocationLimitGrowingStep = 2;
intptr_t limit = (Page::kPageSize > MB ? Page::kPageSize : MB);
return limit * (ShouldOptimizeForMemoryUsage()
? kLowMemoryAllocationLimitGrowingStep
: kRegularAllocationLimitGrowingStep);
}
intptr_t MinimumAllocationLimitGrowingStep();
// ===========================================================================
// Idle notification. ========================================================
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment