Commit 2e316502 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Removes spaces.h include from heap.h

Together with the presubmit rule of prohibiting src/heap/* includes except for
heap.h this now properly hides all heap internals.

R=ulan@chromium.org
BUG=

Review-Url: https://codereview.chromium.org/2314783002
Cr-Commit-Position: refs/heads/master@{#39211}
parent 7e60d08d
...@@ -172,11 +172,11 @@ Space* Heap::space(int idx) { ...@@ -172,11 +172,11 @@ Space* Heap::space(int idx) {
} }
Address* Heap::NewSpaceAllocationTopAddress() { Address* Heap::NewSpaceAllocationTopAddress() {
return new_space_.allocation_top_address(); return new_space_->allocation_top_address();
} }
Address* Heap::NewSpaceAllocationLimitAddress() { Address* Heap::NewSpaceAllocationLimitAddress() {
return new_space_.allocation_limit_address(); return new_space_->allocation_limit_address();
} }
Address* Heap::OldSpaceAllocationTopAddress() { Address* Heap::OldSpaceAllocationTopAddress() {
...@@ -190,7 +190,7 @@ Address* Heap::OldSpaceAllocationLimitAddress() { ...@@ -190,7 +190,7 @@ Address* Heap::OldSpaceAllocationLimitAddress() {
bool Heap::HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) { bool Heap::HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true; if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
intptr_t adjusted_allocation_limit = limit - new_space_.Capacity(); intptr_t adjusted_allocation_limit = limit - new_space_->Capacity();
if (PromotedTotalSize() >= adjusted_allocation_limit) return true; if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
...@@ -333,7 +333,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, ...@@ -333,7 +333,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
if (large_object) { if (large_object) {
space = LO_SPACE; space = LO_SPACE;
} else { } else {
allocation = new_space_.AllocateRaw(size_in_bytes, alignment); allocation = new_space_->AllocateRaw(size_in_bytes, alignment);
if (allocation.To(&object)) { if (allocation.To(&object)) {
OnAllocationEvent(object, size_in_bytes); OnAllocationEvent(object, size_in_bytes);
} }
...@@ -472,6 +472,11 @@ void Heap::FinalizeExternalString(String* string) { ...@@ -472,6 +472,11 @@ void Heap::FinalizeExternalString(String* string) {
} }
} }
Address Heap::NewSpaceTop() { return new_space_->top(); }
bool Heap::DeoptMaybeTenuredAllocationSites() {
return new_space_->IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
}
bool Heap::InNewSpace(Object* object) { bool Heap::InNewSpace(Object* object) {
// Inlined check from NewSpace::Contains. // Inlined check from NewSpace::Contains.
...@@ -500,7 +505,7 @@ bool Heap::InToSpace(Object* object) { ...@@ -500,7 +505,7 @@ bool Heap::InToSpace(Object* object) {
bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); } bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); }
bool Heap::InNewSpaceSlow(Address address) { bool Heap::InNewSpaceSlow(Address address) {
return new_space_.ContainsSlow(address); return new_space_->ContainsSlow(address);
} }
bool Heap::InOldSpaceSlow(Address address) { bool Heap::InOldSpaceSlow(Address address) {
...@@ -517,7 +522,7 @@ bool Heap::OldGenerationAllocationLimitReached() { ...@@ -517,7 +522,7 @@ bool Heap::OldGenerationAllocationLimitReached() {
template <PromotionMode promotion_mode> template <PromotionMode promotion_mode>
bool Heap::ShouldBePromoted(Address old_address, int object_size) { bool Heap::ShouldBePromoted(Address old_address, int object_size) {
Page* page = Page::FromAddress(old_address); Page* page = Page::FromAddress(old_address);
Address age_mark = new_space_.age_mark(); Address age_mark = new_space_->age_mark();
if (promotion_mode == PROMOTE_MARKED) { if (promotion_mode == PROMOTE_MARKED) {
MarkBit mark_bit = ObjectMarking::MarkBitFrom(old_address); MarkBit mark_bit = ObjectMarking::MarkBitFrom(old_address);
......
This diff is collapsed.
...@@ -16,9 +16,8 @@ ...@@ -16,9 +16,8 @@
#include "src/base/atomic-utils.h" #include "src/base/atomic-utils.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/heap-symbols.h" #include "src/heap-symbols.h"
// TODO(mstarzinger): One more include to kill!
#include "src/heap/spaces.h"
#include "src/list.h" #include "src/list.h"
#include "src/objects.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -326,7 +325,9 @@ class HistogramTimer; ...@@ -326,7 +325,9 @@ class HistogramTimer;
class Isolate; class Isolate;
class MemoryAllocator; class MemoryAllocator;
class MemoryReducer; class MemoryReducer;
class ObjectIterator;
class ObjectStats; class ObjectStats;
class Page;
class PagedSpace; class PagedSpace;
class Scavenger; class Scavenger;
class ScavengeJob; class ScavengeJob;
...@@ -401,6 +402,95 @@ class PromotionQueue { ...@@ -401,6 +402,95 @@ class PromotionQueue {
DISALLOW_COPY_AND_ASSIGN(PromotionQueue); DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
}; };
class AllocationResult {
public:
// Implicit constructor from Object*.
AllocationResult(Object* object) // NOLINT
: object_(object) {
// AllocationResults can't return Smis, which are used to represent
// failure and the space to retry in.
CHECK(!object->IsSmi());
}
AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) {
return AllocationResult(space);
}
inline bool IsRetry() { return object_->IsSmi(); }
template <typename T>
bool To(T** obj) {
if (IsRetry()) return false;
*obj = T::cast(object_);
return true;
}
Object* ToObjectChecked() {
CHECK(!IsRetry());
return object_;
}
inline AllocationSpace RetrySpace();
private:
explicit AllocationResult(AllocationSpace space)
: object_(Smi::FromInt(static_cast<int>(space))) {}
Object* object_;
};
STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize);
#ifdef DEBUG
struct CommentStatistic {
const char* comment;
int size;
int count;
void Clear() {
comment = NULL;
size = 0;
count = 0;
}
// Must be small, since an iteration is used for lookup.
static const int kMaxComments = 64;
};
#endif
class NumberAndSizeInfo BASE_EMBEDDED {
public:
NumberAndSizeInfo() : number_(0), bytes_(0) {}
int number() const { return number_; }
void increment_number(int num) { number_ += num; }
int bytes() const { return bytes_; }
void increment_bytes(int size) { bytes_ += size; }
void clear() {
number_ = 0;
bytes_ = 0;
}
private:
int number_;
int bytes_;
};
// HistogramInfo class for recording a single "bar" of a histogram. This
// class is used for collecting statistics to print to the log file.
class HistogramInfo : public NumberAndSizeInfo {
public:
HistogramInfo() : NumberAndSizeInfo(), name_(nullptr) {}
const char* name() { return name_; }
void set_name(const char* name) { name_ = name; }
private:
const char* name_;
};
class Heap { class Heap {
public: public:
// Declare all the root indices. This defines the root list order. // Declare all the root indices. This defines the root list order.
...@@ -785,9 +875,7 @@ class Heap { ...@@ -785,9 +875,7 @@ class Heap {
void DeoptMarkedAllocationSites(); void DeoptMarkedAllocationSites();
bool DeoptMaybeTenuredAllocationSites() { inline bool DeoptMaybeTenuredAllocationSites();
return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
}
void AddWeakNewSpaceObjectToCodeDependency(Handle<HeapObject> obj, void AddWeakNewSpaceObjectToCodeDependency(Handle<HeapObject> obj,
Handle<WeakCell> code); Handle<WeakCell> code);
...@@ -861,9 +949,9 @@ class Heap { ...@@ -861,9 +949,9 @@ class Heap {
// Getters for spaces. ======================================================= // Getters for spaces. =======================================================
// =========================================================================== // ===========================================================================
Address NewSpaceTop() { return new_space_.top(); } inline Address NewSpaceTop();
NewSpace* new_space() { return &new_space_; } NewSpace* new_space() { return new_space_; }
OldSpace* old_space() { return old_space_; } OldSpace* old_space() { return old_space_; }
OldSpace* code_space() { return code_space_; } OldSpace* code_space() { return code_space_; }
MapSpace* map_space() { return map_space_; } MapSpace* map_space() { return map_space_; }
...@@ -2001,7 +2089,7 @@ class Heap { ...@@ -2001,7 +2089,7 @@ class Heap {
int global_ic_age_; int global_ic_age_;
NewSpace new_space_; NewSpace* new_space_;
OldSpace* old_space_; OldSpace* old_space_;
OldSpace* code_space_; OldSpace* code_space_;
MapSpace* map_space_; MapSpace* map_space_;
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
#include "src/base/platform/mutex.h" #include "src/base/platform/mutex.h"
#include "src/flags.h" #include "src/flags.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/heap/heap.h"
#include "src/heap/marking.h" #include "src/heap/marking.h"
#include "src/list.h" #include "src/list.h"
#include "src/objects.h" #include "src/objects.h"
...@@ -1868,50 +1869,6 @@ class FreeList { ...@@ -1868,50 +1869,6 @@ class FreeList {
DISALLOW_IMPLICIT_CONSTRUCTORS(FreeList); DISALLOW_IMPLICIT_CONSTRUCTORS(FreeList);
}; };
class AllocationResult {
public:
// Implicit constructor from Object*.
AllocationResult(Object* object) // NOLINT
: object_(object) {
// AllocationResults can't return Smis, which are used to represent
// failure and the space to retry in.
CHECK(!object->IsSmi());
}
AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) {
return AllocationResult(space);
}
inline bool IsRetry() { return object_->IsSmi(); }
template <typename T>
bool To(T** obj) {
if (IsRetry()) return false;
*obj = T::cast(object_);
return true;
}
Object* ToObjectChecked() {
CHECK(!IsRetry());
return object_;
}
inline AllocationSpace RetrySpace();
private:
explicit AllocationResult(AllocationSpace space)
: object_(Smi::FromInt(static_cast<int>(space))) {}
Object* object_;
};
STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize);
// LocalAllocationBuffer represents a linear allocation area that is created // LocalAllocationBuffer represents a linear allocation area that is created
// from a given {AllocationResult} and can be used to allocate memory without // from a given {AllocationResult} and can be used to allocate memory without
// synchronization. // synchronization.
...@@ -2247,41 +2204,6 @@ class PagedSpace : public Space { ...@@ -2247,41 +2204,6 @@ class PagedSpace : public Space {
friend class HeapTester; friend class HeapTester;
}; };
class NumberAndSizeInfo BASE_EMBEDDED {
public:
NumberAndSizeInfo() : number_(0), bytes_(0) {}
int number() const { return number_; }
void increment_number(int num) { number_ += num; }
int bytes() const { return bytes_; }
void increment_bytes(int size) { bytes_ += size; }
void clear() {
number_ = 0;
bytes_ = 0;
}
private:
int number_;
int bytes_;
};
// HistogramInfo class for recording a single "bar" of a histogram. This
// class is used for collecting statistics to print to the log file.
class HistogramInfo : public NumberAndSizeInfo {
public:
HistogramInfo() : NumberAndSizeInfo() {}
const char* name() { return name_; }
void set_name(const char* name) { name_ = name; }
private:
const char* name_;
};
enum SemiSpaceId { kFromSpace = 0, kToSpace = 1 }; enum SemiSpaceId { kFromSpace = 0, kToSpace = 1 };
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
...@@ -3022,20 +2944,6 @@ class MemoryChunkIterator BASE_EMBEDDED { ...@@ -3022,20 +2944,6 @@ class MemoryChunkIterator BASE_EMBEDDED {
LargePageIterator lo_iterator_; LargePageIterator lo_iterator_;
}; };
#ifdef DEBUG
struct CommentStatistic {
const char* comment;
int size;
int count;
void Clear() {
comment = NULL;
size = 0;
count = 0;
}
// Must be small, since an iteration is used for lookup.
static const int kMaxComments = 64;
};
#endif
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment