Commit 6ac65cfb authored by Sigurd Schneider's avatar Sigurd Schneider Committed by Commit Bot

[cctest] Add V8_EXPORT_PRIVATE for cctest (heap part)

Bug: v8:9020

Change-Id: I67e052b3a15ef88c21d056ca824d32da68cbbcfd
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1541049
Commit-Queue: Sigurd Schneider <sigurds@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60548}
parent 5e8eb540
......@@ -53,7 +53,7 @@ class ArrayBufferTracker : public AllStatic {
static bool ProcessBuffers(Page* page, ProcessingMode mode);
// Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer buffer);
V8_EXPORT_PRIVATE static bool IsTracked(JSArrayBuffer buffer);
// Tears down the tracker and frees up all registered array buffers.
static void TearDown(Heap* heap);
......
......@@ -33,7 +33,7 @@ struct MemoryChunkData {
using MemoryChunkDataMap =
std::unordered_map<MemoryChunk*, MemoryChunkData, MemoryChunk::Hasher>;
class ConcurrentMarking {
class V8_EXPORT_PRIVATE ConcurrentMarking {
public:
// When the scope is entered, the concurrent marking tasks
// are preempted and are not looking at the heap objects, concurrent marking
......
......@@ -1057,8 +1057,9 @@ class NewFunctionArgs final {
static NewFunctionArgs ForWasm(
Handle<String> name,
Handle<WasmExportedFunctionData> exported_function_data, Handle<Map> map);
static NewFunctionArgs ForBuiltin(Handle<String> name, Handle<Map> map,
int builtin_id);
V8_EXPORT_PRIVATE static NewFunctionArgs ForBuiltin(Handle<String> name,
Handle<Map> map,
int builtin_id);
static NewFunctionArgs ForFunctionWithoutCode(Handle<String> name,
Handle<Map> map,
LanguageMode language_mode);
......
......@@ -271,10 +271,12 @@ class Heap {
// Calculates the maximum amount of filler that could be required by the
// given alignment.
static int GetMaximumFillToAlign(AllocationAlignment alignment);
V8_EXPORT_PRIVATE static int GetMaximumFillToAlign(
AllocationAlignment alignment);
// Calculates the actual amount of filler required for a given address at the
// given alignment.
static int GetFillToAlign(Address address, AllocationAlignment alignment);
V8_EXPORT_PRIVATE static int GetFillToAlign(Address address,
AllocationAlignment alignment);
void FatalProcessOutOfMemory(const char* location);
......@@ -391,17 +393,19 @@ class Heap {
// Trim the given array from the left. Note that this relocates the object
// start and hence is only valid if there is only a single reference to it.
FixedArrayBase LeftTrimFixedArray(FixedArrayBase obj, int elements_to_trim);
V8_EXPORT_PRIVATE FixedArrayBase LeftTrimFixedArray(FixedArrayBase obj,
int elements_to_trim);
// Trim the given array from the right.
void RightTrimFixedArray(FixedArrayBase obj, int elements_to_trim);
V8_EXPORT_PRIVATE void RightTrimFixedArray(FixedArrayBase obj,
int elements_to_trim);
void RightTrimWeakFixedArray(WeakFixedArray obj, int elements_to_trim);
// Converts the given boolean condition to JavaScript boolean value.
inline Oddball ToBoolean(bool condition);
// Notify the heap that a context has been disposed.
int NotifyContextDisposed(bool dependant_context);
V8_EXPORT_PRIVATE int NotifyContextDisposed(bool dependant_context);
void set_native_contexts_list(Object object) {
native_contexts_list_ = object;
......@@ -453,7 +457,7 @@ class Heap {
}
void UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk);
void UnprotectAndRegisterMemoryChunk(HeapObject object);
V8_EXPORT_PRIVATE void UnprotectAndRegisterMemoryChunk(HeapObject object);
void UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk);
V8_EXPORT_PRIVATE void ProtectUnprotectedMemoryChunks();
......@@ -493,16 +497,18 @@ class Heap {
bool IdleNotification(double deadline_in_seconds);
bool IdleNotification(int idle_time_in_ms);
void MemoryPressureNotification(MemoryPressureLevel level,
bool is_isolate_locked);
V8_EXPORT_PRIVATE void MemoryPressureNotification(MemoryPressureLevel level,
bool is_isolate_locked);
void CheckMemoryPressure();
void AddNearHeapLimitCallback(v8::NearHeapLimitCallback, void* data);
void RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
size_t heap_limit);
void AutomaticallyRestoreInitialHeapLimit(double threshold_percent);
V8_EXPORT_PRIVATE void AddNearHeapLimitCallback(v8::NearHeapLimitCallback,
void* data);
V8_EXPORT_PRIVATE void RemoveNearHeapLimitCallback(
v8::NearHeapLimitCallback callback, size_t heap_limit);
V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit(
double threshold_percent);
double MonotonicallyIncreasingTimeInMs();
V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs();
void RecordStats(HeapStats* stats, bool take_snapshot = false);
......@@ -538,7 +544,7 @@ class Heap {
void CompactWeakArrayLists(AllocationType allocation);
void AddRetainedMap(Handle<Map> map);
V8_EXPORT_PRIVATE void AddRetainedMap(Handle<Map> map);
// This event is triggered after successful allocation of a new object made
// by runtime. Allocations of target space for object evacuation do not
......@@ -558,7 +564,7 @@ class Heap {
void ActivateMemoryReducerIfNeeded();
bool ShouldOptimizeForMemoryUsage();
V8_EXPORT_PRIVATE bool ShouldOptimizeForMemoryUsage();
bool HighMemoryPressure() {
return memory_pressure_level_ != MemoryPressureLevel::kNone;
......@@ -704,7 +710,7 @@ class Heap {
std::function<void(HeapObject object, ObjectSlot slot, Object target)>
gc_notify_updated_slot);
void AddKeepDuringJobTarget(Handle<JSReceiver> target);
V8_EXPORT_PRIVATE void AddKeepDuringJobTarget(Handle<JSReceiver> target);
void ClearKeepDuringJobSet();
// ===========================================================================
......@@ -715,8 +721,8 @@ class Heap {
bool inline_allocation_disabled() { return inline_allocation_disabled_; }
// Switch whether inline bump-pointer allocation should be used.
void EnableInlineAllocation();
void DisableInlineAllocation();
V8_EXPORT_PRIVATE void EnableInlineAllocation();
V8_EXPORT_PRIVATE void DisableInlineAllocation();
// ===========================================================================
// Methods triggering GCs. ===================================================
......@@ -735,12 +741,13 @@ class Heap {
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Last hope GC, should try to squeeze as much as possible.
void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason);
V8_EXPORT_PRIVATE void CollectAllAvailableGarbage(
GarbageCollectionReason gc_reason);
// Precise garbage collection that potentially finalizes already running
// incremental marking before performing an atomic garbage collection.
// Only use if absolutely necessary or in tests to avoid floating garbage!
void PreciseCollectAllGarbage(
V8_EXPORT_PRIVATE void PreciseCollectAllGarbage(
int flags, GarbageCollectionReason gc_reason,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
......@@ -763,7 +770,7 @@ class Heap {
// Builtins. =================================================================
// ===========================================================================
Code builtin(int index);
V8_EXPORT_PRIVATE Code builtin(int index);
Address builtin_address(int index);
void set_builtin(int index, Code builtin);
......@@ -823,13 +830,13 @@ class Heap {
// Start incremental marking and ensure that idle time handler can perform
// incremental steps.
void StartIdleIncrementalMarking(
V8_EXPORT_PRIVATE void StartIdleIncrementalMarking(
GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
// Starts incremental marking assuming incremental marking is currently
// stopped.
void StartIncrementalMarking(
V8_EXPORT_PRIVATE void StartIncrementalMarking(
int gc_flags, GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
......@@ -865,7 +872,8 @@ class Heap {
// This function checks that either
// - the map transition is safe,
// - or it was communicated to GC using NotifyObjectLayoutChange.
void VerifyObjectLayoutChange(HeapObject object, Map new_map);
V8_EXPORT_PRIVATE void VerifyObjectLayoutChange(HeapObject object,
Map new_map);
#endif
// ===========================================================================
......@@ -910,8 +918,8 @@ class Heap {
// Called when a string's resource is changed. The size of the payload is sent
// as argument of the method.
void UpdateExternalString(String string, size_t old_payload,
size_t new_payload);
V8_EXPORT_PRIVATE void UpdateExternalString(String string, size_t old_payload,
size_t new_payload);
// Finalizes an external string by deleting the associated external
// data and clearing the resource pointer.
......@@ -940,11 +948,11 @@ class Heap {
// Checks whether an address/object in the heap (including auxiliary
// area and unused area).
bool Contains(HeapObject value);
V8_EXPORT_PRIVATE bool Contains(HeapObject value);
// Checks whether an address/object in a space.
// Currently used by tests, serialization and heap verification only.
bool InSpace(HeapObject value, AllocationSpace space);
V8_EXPORT_PRIVATE bool InSpace(HeapObject value, AllocationSpace space);
// Slow methods that can be used for verification as they can also be used
// with off-heap Addresses.
......@@ -989,7 +997,7 @@ class Heap {
// ===========================================================================
// Returns the maximum amount of memory reserved for the heap.
size_t MaxReserved();
V8_EXPORT_PRIVATE size_t MaxReserved();
size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
size_t MaxOldGenerationSize() { return max_old_generation_size_; }
......@@ -1017,7 +1025,7 @@ class Heap {
size_t Capacity();
// Returns the capacity of the old generation.
size_t OldGenerationCapacity();
V8_EXPORT_PRIVATE size_t OldGenerationCapacity();
// Returns the amount of memory currently held alive by the unmapper.
size_t CommittedMemoryOfUnmapper();
......@@ -1047,7 +1055,7 @@ class Heap {
size_t Available();
// Returns of size of all objects residing in the heap.
size_t SizeOfObjects();
V8_EXPORT_PRIVATE size_t SizeOfObjects();
void UpdateSurvivalStatistics(int start_new_space_size);
......@@ -1131,7 +1139,7 @@ class Heap {
// Returns the size of objects residing in non-new spaces.
// Excludes external memory held by those objects.
size_t OldGenerationSizeOfObjects();
V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects();
// ===========================================================================
// Prologue/epilogue callback methods.========================================
......@@ -1155,8 +1163,8 @@ class Heap {
// ===========================================================================
// Creates a filler object and returns a heap object immediately after it.
V8_WARN_UNUSED_RESULT HeapObject PrecedeWithFiller(HeapObject object,
int filler_size);
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT HeapObject
PrecedeWithFiller(HeapObject object, int filler_size);
// Creates a filler object if needed for alignment and returns a heap object
// immediately after it. If any space is left after the returned object,
......@@ -1243,7 +1251,7 @@ class Heap {
// =============================================================================
#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
void Verify();
V8_EXPORT_PRIVATE void Verify();
void VerifyRememberedSetFor(HeapObject object);
#endif
......@@ -1444,13 +1452,14 @@ class Heap {
void EnsureFromSpaceIsCommitted();
// Uncommit unused semi space.
bool UncommitFromSpace();
V8_EXPORT_PRIVATE bool UncommitFromSpace();
// Fill in bogus values in from space
void ZapFromSpace();
// Zaps the memory of a code object.
void ZapCodeObject(Address start_address, int size_in_bytes);
V8_EXPORT_PRIVATE void ZapCodeObject(Address start_address,
int size_in_bytes);
// Deopts all code that contains allocation instruction which are tenured or
// not tenured. Moreover it clears the pretenuring allocation site statistics.
......@@ -1640,7 +1649,7 @@ class Heap {
bool always_allocate() { return always_allocate_scope_count_ != 0; }
bool CanExpandOldGeneration(size_t size);
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size);
bool ShouldExpandOldGenerationOnSlowAllocation();
......@@ -1740,7 +1749,7 @@ class Heap {
void PrintRetainingPath(HeapObject object, RetainingPathOption option);
#ifdef DEBUG
void IncrementObjectCounters();
V8_EXPORT_PRIVATE void IncrementObjectCounters();
#endif // DEBUG
// The amount of memory that has been freed concurrently.
......@@ -1815,7 +1824,7 @@ class Heap {
int gc_post_processing_depth_ = 0;
// Returns the amount of external memory registered since last global gc.
uint64_t PromotedExternalMemorySize();
V8_EXPORT_PRIVATE uint64_t PromotedExternalMemorySize();
// How many "runtime allocations" happened.
uint32_t allocations_count_ = 0;
......@@ -2054,7 +2063,6 @@ class Heap {
DISALLOW_COPY_AND_ASSIGN(Heap);
};
class HeapStats {
public:
static const int kStartMarker = 0xDECADE00;
......@@ -2224,7 +2232,7 @@ class SpaceIterator : public Malloced {
// nodes filtering uses GC marks, it can't be used during MS/MC GC
// phases. Also, it is forbidden to interrupt iteration in this mode,
// as this will leave heap objects marked (and thus, unusable).
class HeapIterator {
class V8_EXPORT_PRIVATE HeapIterator {
public:
enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
......
......@@ -28,7 +28,7 @@ using InvalidatedSlots = std::map<HeapObject, int, Object::Comparer>;
// implementation with complexity O(m*log(m) + n), where
// m is the number of invalidated objects in the memory chunk.
// n is the number of IsValid queries.
class InvalidatedSlotsFilter {
class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
public:
explicit InvalidatedSlotsFilter(MemoryChunk* chunk);
inline bool IsValid(Address slot);
......
......@@ -629,7 +629,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Ensures that sweeping is finished.
//
// Note: Can only be called safely from main thread.
void EnsureSweepingCompleted();
V8_EXPORT_PRIVATE void EnsureSweepingCompleted();
// Checks if sweeping is in progress right now on any space.
bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); }
......
......@@ -36,7 +36,7 @@ class ReadOnlyHeap final {
void OnHeapTearDown();
// Returns whether the object resides in the read-only space.
static bool Contains(HeapObject object);
V8_EXPORT_PRIVATE static bool Contains(HeapObject object);
std::vector<Object>* read_only_object_cache() {
return &read_only_object_cache_;
......
......@@ -1327,8 +1327,8 @@ static SlotSet* AllocateAndInitializeSlotSet(size_t size, Address page_start) {
return slot_set;
}
template SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_NEW>();
template SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_OLD>();
template V8_EXPORT_PRIVATE SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_NEW>();
template V8_EXPORT_PRIVATE SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_OLD>();
template <RememberedSetType type>
SlotSet* MemoryChunk::AllocateSlotSet() {
......
......@@ -233,7 +233,7 @@ class FreeListCategory {
DISALLOW_IMPLICIT_CONSTRUCTORS(FreeListCategory);
};
class MemoryChunkLayout {
class V8_EXPORT_PRIVATE MemoryChunkLayout {
public:
static size_t CodePageGuardStartOffset();
static size_t CodePageGuardSize();
......@@ -241,7 +241,7 @@ class MemoryChunkLayout {
static intptr_t ObjectEndOffsetInCodePage();
static size_t AllocatableMemoryInCodePage();
static intptr_t ObjectStartOffsetInDataPage();
V8_EXPORT_PRIVATE static size_t AllocatableMemoryInDataPage();
static size_t AllocatableMemoryInDataPage();
static size_t ObjectStartOffsetInMemoryChunk(AllocationSpace space);
static size_t AllocatableMemoryInMemoryChunk(AllocationSpace space);
};
......@@ -504,7 +504,7 @@ class MemoryChunk {
}
template <RememberedSetType type>
SlotSet* AllocateSlotSet();
V8_EXPORT_PRIVATE SlotSet* AllocateSlotSet();
// Not safe to be called concurrently.
template <RememberedSetType type>
void ReleaseSlotSet();
......@@ -516,7 +516,8 @@ class MemoryChunk {
InvalidatedSlots* AllocateInvalidatedSlots();
void ReleaseInvalidatedSlots();
void RegisterObjectWithInvalidatedSlots(HeapObject object, int size);
V8_EXPORT_PRIVATE void RegisterObjectWithInvalidatedSlots(HeapObject object,
int size);
// Updates invalidated_slots after array left-trimming.
void MoveObjectWithInvalidatedSlots(HeapObject old_start,
HeapObject new_start);
......@@ -536,7 +537,7 @@ class MemoryChunk {
size_t area_size() { return static_cast<size_t>(area_end() - area_start()); }
// Approximate amount of physical memory committed for this chunk.
size_t CommittedPhysicalMemory();
V8_EXPORT_PRIVATE size_t CommittedPhysicalMemory();
Address HighWaterMark() { return address() + high_water_mark_; }
......@@ -646,8 +647,7 @@ class MemoryChunk {
return InYoungGeneration() && IsLargePage();
}
bool InOldSpace() const;
bool InLargeObjectSpace() const;
V8_EXPORT_PRIVATE bool InLargeObjectSpace() const;
Space* owner() const { return owner_; }
......@@ -659,9 +659,9 @@ class MemoryChunk {
// MemoryChunk::synchronized_heap() to simulate the barrier.
void InitializationMemoryFence();
void SetReadable();
void SetReadAndExecutable();
void SetReadAndWritable();
V8_EXPORT_PRIVATE void SetReadable();
V8_EXPORT_PRIVATE void SetReadAndExecutable();
V8_EXPORT_PRIVATE void SetReadAndWritable();
void SetDefaultCodePermissions() {
if (FLAG_jitless) {
......@@ -974,7 +974,7 @@ class LargePage : public MemoryChunk {
// ----------------------------------------------------------------------------
// Space is the abstract superclass for all allocation spaces.
class Space : public Malloced {
class V8_EXPORT_PRIVATE Space : public Malloced {
public:
Space(Heap* heap, AllocationSpace id)
: allocation_observers_paused_(false),
......@@ -1007,17 +1007,15 @@ class Space : public Malloced {
const char* name() { return AllocationSpaceName(id_); }
V8_EXPORT_PRIVATE virtual void AddAllocationObserver(
AllocationObserver* observer);
virtual void AddAllocationObserver(AllocationObserver* observer);
V8_EXPORT_PRIVATE virtual void RemoveAllocationObserver(
AllocationObserver* observer);
virtual void RemoveAllocationObserver(AllocationObserver* observer);
V8_EXPORT_PRIVATE virtual void PauseAllocationObservers();
virtual void PauseAllocationObservers();
V8_EXPORT_PRIVATE virtual void ResumeAllocationObservers();
virtual void ResumeAllocationObservers();
V8_EXPORT_PRIVATE virtual void StartNextInlineAllocationStep() {}
virtual void StartNextInlineAllocationStep() {}
void AllocationStep(int bytes_since_last, Address soon_object, int size);
......@@ -1075,7 +1073,7 @@ class Space : public Malloced {
return external_backing_store_bytes_[type];
}
V8_EXPORT_PRIVATE void* GetRandomMmapAddr();
void* GetRandomMmapAddr();
MemoryChunk* first_page() { return memory_chunk_list_.front(); }
MemoryChunk* last_page() { return memory_chunk_list_.back(); }
......@@ -1114,7 +1112,6 @@ class Space : public Malloced {
DISALLOW_COPY_AND_ASSIGN(Space);
};
class MemoryChunkValidator {
// Computed offsets should match the compiler generated ones.
STATIC_ASSERT(MemoryChunk::kSizeOffset == offsetof(MemoryChunk, size_));
......@@ -1204,7 +1201,7 @@ class SkipList {
// A space acquires chunks of memory from the operating system. The memory
// allocator allocates and deallocates pages for the paged heap spaces and large
// pages for large object space.
class V8_EXPORT_PRIVATE MemoryAllocator {
class MemoryAllocator {
public:
// Unmapper takes care of concurrently unmapping and uncommitting memory
// chunks.
......@@ -1250,10 +1247,10 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
V8_EXPORT_PRIVATE void FreeQueuedChunks();
void CancelAndWaitForPendingTasks();
void PrepareForGC();
void EnsureUnmappingCompleted();
V8_EXPORT_PRIVATE void EnsureUnmappingCompleted();
V8_EXPORT_PRIVATE void TearDown();
size_t NumberOfCommittedChunks();
int NumberOfChunks();
V8_EXPORT_PRIVATE int NumberOfChunks();
size_t CommittedBufferedMemory();
private:
......@@ -1319,18 +1316,18 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
kPooledAndQueue,
};
static intptr_t GetCommitPageSize();
V8_EXPORT_PRIVATE static intptr_t GetCommitPageSize();
// Computes the memory area of discardable memory within a given memory area
// [addr, addr+size) and returns the result as base::AddressRegion. If the
// memory is not discardable base::AddressRegion is an empty region.
static base::AddressRegion ComputeDiscardMemoryArea(Address addr,
size_t size);
V8_EXPORT_PRIVATE static base::AddressRegion ComputeDiscardMemoryArea(
Address addr, size_t size);
MemoryAllocator(Isolate* isolate, size_t max_capacity,
size_t code_range_size);
V8_EXPORT_PRIVATE MemoryAllocator(Isolate* isolate, size_t max_capacity,
size_t code_range_size);
void TearDown();
V8_EXPORT_PRIVATE void TearDown();
// Allocates a Page from the allocator. AllocationMode is used to indicate
// whether pooled allocation, which only works for MemoryChunk::kPageSize,
......@@ -1369,8 +1366,10 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
// Returns a MemoryChunk in which the memory region from commit_area_size to
// reserve_area_size of the chunk area is reserved but not committed, it
// could be committed later by calling MemoryChunk::CommitArea.
MemoryChunk* AllocateChunk(size_t reserve_area_size, size_t commit_area_size,
Executability executable, Space* space);
V8_EXPORT_PRIVATE MemoryChunk* AllocateChunk(size_t reserve_area_size,
size_t commit_area_size,
Executability executable,
Space* space);
Address AllocateAlignedMemory(size_t reserve_size, size_t commit_size,
size_t alignment, Executability executable,
......@@ -1552,15 +1551,28 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryAllocator);
};
extern template Page*
MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, PagedSpace>(
size_t size, PagedSpace* owner, Executability executable);
extern template Page*
MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, SemiSpace>(
size_t size, SemiSpace* owner, Executability executable);
extern template Page*
MemoryAllocator::AllocatePage<MemoryAllocator::kPooled, SemiSpace>(
size_t size, SemiSpace* owner, Executability executable);
extern template EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
Page* MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, PagedSpace>(
size_t size, PagedSpace* owner, Executability executable);
extern template EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
Page* MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, SemiSpace>(
size_t size, SemiSpace* owner, Executability executable);
extern template EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
Page* MemoryAllocator::AllocatePage<MemoryAllocator::kPooled, SemiSpace>(
size_t size, SemiSpace* owner, Executability executable);
extern template EXPORT_TEMPLATE_DECLARE(
V8_EXPORT_PRIVATE) void MemoryAllocator::
Free<MemoryAllocator::kFull>(MemoryChunk* chunk);
extern template EXPORT_TEMPLATE_DECLARE(
V8_EXPORT_PRIVATE) void MemoryAllocator::
Free<MemoryAllocator::kAlreadyPooled>(MemoryChunk* chunk);
extern template EXPORT_TEMPLATE_DECLARE(
V8_EXPORT_PRIVATE) void MemoryAllocator::
Free<MemoryAllocator::kPreFreeAndQueue>(MemoryChunk* chunk);
extern template EXPORT_TEMPLATE_DECLARE(
V8_EXPORT_PRIVATE) void MemoryAllocator::
Free<MemoryAllocator::kPooledAndQueue>(MemoryChunk* chunk);
// -----------------------------------------------------------------------------
// Interface for heap object iterator to be implemented by all object space
......@@ -1802,7 +1814,7 @@ class AllocationStats {
// words in size.
// At least 16384 words (huge): This list is for objects of 2048 words or
// larger. Empty pages are also added to this list.
class V8_EXPORT_PRIVATE FreeList {
class FreeList {
public:
// This method returns how much memory can be allocated after freeing
// maximum_freed memory.
......@@ -1884,7 +1896,7 @@ class V8_EXPORT_PRIVATE FreeList {
// Used after booting the VM.
void RepairLists(Heap* heap);
size_t EvictFreeListItems(Page* page);
V8_EXPORT_PRIVATE size_t EvictFreeListItems(Page* page);
bool ContainsPageFreeListItems(Page* page);
size_t wasted_bytes() { return wasted_bytes_; }
......@@ -1907,7 +1919,7 @@ class V8_EXPORT_PRIVATE FreeList {
}
bool AddCategory(FreeListCategory* category);
void RemoveCategory(FreeListCategory* category);
V8_EXPORT_PRIVATE void RemoveCategory(FreeListCategory* category);
void PrintCategories(FreeListCategoryType type);
// Returns a page containing an entry for a given type, or nullptr otherwise.
......@@ -2043,11 +2055,11 @@ class LocalAllocationBuffer {
inline bool TryFreeLast(HeapObject object, int object_size);
// Close a LAB, effectively invalidating it. Returns the unused area.
LinearAllocationArea Close();
V8_EXPORT_PRIVATE LinearAllocationArea Close();
private:
LocalAllocationBuffer(Heap* heap,
LinearAllocationArea allocation_info) V8_NOEXCEPT;
V8_EXPORT_PRIVATE LocalAllocationBuffer(
Heap* heap, LinearAllocationArea allocation_info) V8_NOEXCEPT;
Heap* heap_;
LinearAllocationArea allocation_info_;
......@@ -2539,7 +2551,7 @@ class SemiSpace : public Space {
std::unique_ptr<ObjectIterator> GetObjectIterator() override;
#ifdef DEBUG
void Print() override;
V8_EXPORT_PRIVATE void Print() override;
// Validate a range of of addresses in a SemiSpace.
// The "from" address must be on a page prior to the "to" address,
// in the linked page order, or it must be earlier on the same page.
......@@ -2615,7 +2627,8 @@ class SemiSpaceIterator : public ObjectIterator {
// The new space consists of a contiguous pair of semispaces. It simply
// forwards most functions to the appropriate semispace.
class NewSpace : public SpaceWithLinearArea {
class V8_EXPORT_PRIVATE NewSpace
: NON_EXPORTED_BASE(public SpaceWithLinearArea) {
public:
using iterator = PageIterator;
......@@ -2855,7 +2868,7 @@ class NewSpace : public SpaceWithLinearArea {
friend class SemiSpaceIterator;
};
class PauseAllocationObserversScope {
class V8_EXPORT_PRIVATE PauseAllocationObserversScope {
public:
explicit PauseAllocationObserversScope(Heap* heap);
~PauseAllocationObserversScope();
......@@ -2886,7 +2899,6 @@ class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace {
int size_in_bytes) override;
};
// A collection of |CompactionSpace|s used by a single compaction task.
class CompactionSpaceCollection : public Malloced {
public:
......@@ -2990,7 +3002,7 @@ class ReadOnlySpace : public PagedSpace {
bool writable() const { return !is_marked_read_only_; }
void ClearStringPaddingIfNeeded();
V8_EXPORT_PRIVATE void ClearStringPaddingIfNeeded();
void MarkAsReadOnly();
// Make the heap forget the space for memory bookkeeping purposes
// (e.g. prevent space's memory from registering as leaked).
......@@ -3052,7 +3064,7 @@ class LargeObjectSpace : public Space {
void PromoteNewLargeObject(LargePage* page);
// Checks whether a heap object is in this space; O(1).
bool Contains(HeapObject obj);
V8_EXPORT_PRIVATE bool Contains(HeapObject obj);
// Checks whether an address is in the object area in this space. Iterates
// all objects in the space. May be slow.
bool ContainsSlow(Address addr);
......@@ -3100,7 +3112,8 @@ class NewLargeObjectSpace : public LargeObjectSpace {
public:
NewLargeObjectSpace(Heap* heap, size_t capacity);
V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(int object_size);
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult
AllocateRaw(int object_size);
// Available bytes for objects in this space.
size_t Available() override;
......
......@@ -96,7 +96,7 @@ class Sweeper {
// and the main thread can sweep lazily, but the background sweeper tasks
// are not running yet.
void StartSweeping();
void StartSweeperTasks();
V8_EXPORT_PRIVATE void StartSweeperTasks();
void EnsureCompleted();
bool AreSweeperTasksRunning();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment