Commit 541e3df5 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[heap] Reimplement unmapper tests using tracking page allocator

in order to make the test compatible with the pointer compression friendly
heap layout.

Bug: v8:8182
Change-Id: I34a0c597b70687f7ae7dad19df60c94520fa349f
Reviewed-on: https://chromium-review.googlesource.com/c/1317818
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#57310}
parent 47ae51f7
...@@ -74,6 +74,13 @@ v8::PageAllocator* GetPlatformPageAllocator() { ...@@ -74,6 +74,13 @@ v8::PageAllocator* GetPlatformPageAllocator() {
return page_allocator.Get(); return page_allocator.Get();
} }
v8::PageAllocator* SetPlatformPageAllocatorForTesting(
v8::PageAllocator* new_page_allocator) {
v8::PageAllocator* old_page_allocator = GetPlatformPageAllocator();
*page_allocator.Pointer() = new_page_allocator;
return old_page_allocator;
}
void* Malloced::New(size_t size) { void* Malloced::New(size_t size) {
void* result = AllocWithRetry(size); void* result = AllocWithRetry(size);
if (result == nullptr) { if (result == nullptr) {
...@@ -158,7 +165,7 @@ void* AllocatePages(v8::PageAllocator* page_allocator, void* address, ...@@ -158,7 +165,7 @@ void* AllocatePages(v8::PageAllocator* page_allocator, void* address,
PageAllocator::Permission access) { PageAllocator::Permission access) {
DCHECK_NOT_NULL(page_allocator); DCHECK_NOT_NULL(page_allocator);
DCHECK_EQ(address, AlignedAddress(address, alignment)); DCHECK_EQ(address, AlignedAddress(address, alignment));
DCHECK_EQ(0UL, size & (page_allocator->AllocatePageSize() - 1)); DCHECK(IsAligned(size, page_allocator->AllocatePageSize()));
void* result = nullptr; void* result = nullptr;
for (int i = 0; i < kAllocationTries; ++i) { for (int i = 0; i < kAllocationTries; ++i) {
result = page_allocator->AllocatePages(address, size, alignment, access); result = page_allocator->AllocatePages(address, size, alignment, access);
...@@ -172,7 +179,7 @@ void* AllocatePages(v8::PageAllocator* page_allocator, void* address, ...@@ -172,7 +179,7 @@ void* AllocatePages(v8::PageAllocator* page_allocator, void* address,
bool FreePages(v8::PageAllocator* page_allocator, void* address, bool FreePages(v8::PageAllocator* page_allocator, void* address,
const size_t size) { const size_t size) {
DCHECK_NOT_NULL(page_allocator); DCHECK_NOT_NULL(page_allocator);
DCHECK_EQ(0UL, size & (page_allocator->AllocatePageSize() - 1)); DCHECK(IsAligned(size, page_allocator->AllocatePageSize()));
return page_allocator->FreePages(address, size); return page_allocator->FreePages(address, size);
} }
...@@ -180,6 +187,7 @@ bool ReleasePages(v8::PageAllocator* page_allocator, void* address, size_t size, ...@@ -180,6 +187,7 @@ bool ReleasePages(v8::PageAllocator* page_allocator, void* address, size_t size,
size_t new_size) { size_t new_size) {
DCHECK_NOT_NULL(page_allocator); DCHECK_NOT_NULL(page_allocator);
DCHECK_LT(new_size, size); DCHECK_LT(new_size, size);
DCHECK(IsAligned(new_size, page_allocator->CommitPageSize()));
return page_allocator->ReleasePages(address, size, new_size); return page_allocator->ReleasePages(address, size, new_size);
} }
......
...@@ -86,6 +86,13 @@ void AlignedFree(void *ptr); ...@@ -86,6 +86,13 @@ void AlignedFree(void *ptr);
// Returns platfrom page allocator instance. Guaranteed to be a valid pointer. // Returns platfrom page allocator instance. Guaranteed to be a valid pointer.
V8_EXPORT_PRIVATE v8::PageAllocator* GetPlatformPageAllocator(); V8_EXPORT_PRIVATE v8::PageAllocator* GetPlatformPageAllocator();
// Sets the given page allocator as the platform page allocator and returns
// the current one. This function *must* be used only for testing purposes.
// It is not thread-safe and the testing infrastructure should ensure that
// the tests do not modify the value simultaneously.
V8_EXPORT_PRIVATE v8::PageAllocator* SetPlatformPageAllocatorForTesting(
v8::PageAllocator* page_allocator);
// Gets the page granularity for AllocatePages and FreePages. Addresses returned // Gets the page granularity for AllocatePages and FreePages. Addresses returned
// by AllocatePages and AllocatePage are aligned to this size. // by AllocatePages and AllocatePage are aligned to this size.
V8_EXPORT_PRIVATE size_t AllocatePageSize(); V8_EXPORT_PRIVATE size_t AllocatePageSize();
......
...@@ -258,6 +258,16 @@ size_t RegionAllocator::CheckRegion(Address address) { ...@@ -258,6 +258,16 @@ size_t RegionAllocator::CheckRegion(Address address) {
return region->size(); return region->size();
} }
bool RegionAllocator::IsFree(Address address, size_t size) {
CHECK(contains(address, size));
AllRegionsSet::iterator region_iter = FindRegion(address);
if (region_iter == all_regions_.end()) {
return true;
}
Region* region = *region_iter;
return !region->is_used() && region->contains(address, size);
}
void RegionAllocator::Region::Print(std::ostream& os) const { void RegionAllocator::Region::Print(std::ostream& os) const {
std::ios::fmtflags flags = os.flags(std::ios::hex | std::ios::showbase); std::ios::fmtflags flags = os.flags(std::ios::hex | std::ios::showbase);
os << "[" << begin() << ", " << end() << "), size: " << size(); os << "[" << begin() << ", " << end() << "), size: " << size();
......
...@@ -60,6 +60,9 @@ class V8_BASE_EXPORT RegionAllocator final { ...@@ -60,6 +60,9 @@ class V8_BASE_EXPORT RegionAllocator final {
// otherwise 0. // otherwise 0.
size_t CheckRegion(Address address); size_t CheckRegion(Address address);
// Returns true if there are no pages allocated in given region.
bool IsFree(Address address, size_t size);
Address begin() const { return whole_region_.begin(); } Address begin() const { return whole_region_.begin(); }
Address end() const { return whole_region_.end(); } Address end() const { return whole_region_.end(); }
size_t size() const { return whole_region_.size(); } size_t size() const { return whole_region_.size(); }
......
...@@ -374,15 +374,15 @@ inline std::ostream& operator<<(std::ostream& os, DeoptimizeKind kind) { ...@@ -374,15 +374,15 @@ inline std::ostream& operator<<(std::ostream& os, DeoptimizeKind kind) {
enum class IsolateAllocationMode { enum class IsolateAllocationMode {
// Allocate Isolate in C++ heap using default new/delete operators. // Allocate Isolate in C++ heap using default new/delete operators.
kAllocateInCppHeap, kInCppHeap,
// Allocate Isolate in a committed region inside V8 heap reservation. // Allocate Isolate in a committed region inside V8 heap reservation.
kAllocateInV8Heap, kInV8Heap,
#ifdef V8_COMPRESS_POINTERS #ifdef V8_COMPRESS_POINTERS
kDefault = kAllocateInV8Heap, kDefault = kInV8Heap,
#else #else
kDefault = kAllocateInCppHeap, kDefault = kInCppHeap,
#endif #endif
}; };
......
...@@ -1107,16 +1107,17 @@ void MemoryAllocator::Free(MemoryChunk* chunk) { ...@@ -1107,16 +1107,17 @@ void MemoryAllocator::Free(MemoryChunk* chunk) {
} }
} }
template void MemoryAllocator::Free<MemoryAllocator::kFull>(MemoryChunk* chunk); template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) void MemoryAllocator::Free<
MemoryAllocator::kFull>(MemoryChunk* chunk);
template void MemoryAllocator::Free<MemoryAllocator::kAlreadyPooled>( template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) void MemoryAllocator::Free<
MemoryChunk* chunk); MemoryAllocator::kAlreadyPooled>(MemoryChunk* chunk);
template void MemoryAllocator::Free<MemoryAllocator::kPreFreeAndQueue>( template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) void MemoryAllocator::Free<
MemoryChunk* chunk); MemoryAllocator::kPreFreeAndQueue>(MemoryChunk* chunk);
template void MemoryAllocator::Free<MemoryAllocator::kPooledAndQueue>( template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) void MemoryAllocator::Free<
MemoryChunk* chunk); MemoryAllocator::kPooledAndQueue>(MemoryChunk* chunk);
template <MemoryAllocator::AllocationMode alloc_mode, typename SpaceType> template <MemoryAllocator::AllocationMode alloc_mode, typename SpaceType>
Page* MemoryAllocator::AllocatePage(size_t size, SpaceType* owner, Page* MemoryAllocator::AllocatePage(size_t size, SpaceType* owner,
...@@ -1136,14 +1137,14 @@ Page* MemoryAllocator::AllocatePage(size_t size, SpaceType* owner, ...@@ -1136,14 +1137,14 @@ Page* MemoryAllocator::AllocatePage(size_t size, SpaceType* owner,
return owner->InitializePage(chunk, executable); return owner->InitializePage(chunk, executable);
} }
template Page* template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, PagedSpace>( Page* MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, PagedSpace>(
size_t size, PagedSpace* owner, Executability executable); size_t size, PagedSpace* owner, Executability executable);
template Page* template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, SemiSpace>( Page* MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, SemiSpace>(
size_t size, SemiSpace* owner, Executability executable); size_t size, SemiSpace* owner, Executability executable);
template Page* template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
MemoryAllocator::AllocatePage<MemoryAllocator::kPooled, SemiSpace>( Page* MemoryAllocator::AllocatePage<MemoryAllocator::kPooled, SemiSpace>(
size_t size, SemiSpace* owner, Executability executable); size_t size, SemiSpace* owner, Executability executable);
LargePage* MemoryAllocator::AllocateLargePage(size_t size, LargePage* MemoryAllocator::AllocateLargePage(size_t size,
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
#include "src/allocation.h" #include "src/allocation.h"
#include "src/base/atomic-utils.h" #include "src/base/atomic-utils.h"
#include "src/base/bounded-page-allocator.h" #include "src/base/bounded-page-allocator.h"
#include "src/base/export-template.h"
#include "src/base/iterator.h" #include "src/base/iterator.h"
#include "src/base/list.h" #include "src/base/list.h"
#include "src/base/platform/mutex.h" #include "src/base/platform/mutex.h"
...@@ -1200,11 +1201,11 @@ class V8_EXPORT_PRIVATE MemoryAllocator { ...@@ -1200,11 +1201,11 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
return chunk; return chunk;
} }
void FreeQueuedChunks(); V8_EXPORT_PRIVATE void FreeQueuedChunks();
void CancelAndWaitForPendingTasks(); void CancelAndWaitForPendingTasks();
void PrepareForMarkCompact(); void PrepareForMarkCompact();
void EnsureUnmappingCompleted(); void EnsureUnmappingCompleted();
void TearDown(); V8_EXPORT_PRIVATE void TearDown();
size_t NumberOfCommittedChunks(); size_t NumberOfCommittedChunks();
int NumberOfChunks(); int NumberOfChunks();
size_t CommittedBufferedMemory(); size_t CommittedBufferedMemory();
...@@ -1290,12 +1291,14 @@ class V8_EXPORT_PRIVATE MemoryAllocator { ...@@ -1290,12 +1291,14 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
// should be tried first. // should be tried first.
template <MemoryAllocator::AllocationMode alloc_mode = kRegular, template <MemoryAllocator::AllocationMode alloc_mode = kRegular,
typename SpaceType> typename SpaceType>
EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
Page* AllocatePage(size_t size, SpaceType* owner, Executability executable); Page* AllocatePage(size_t size, SpaceType* owner, Executability executable);
LargePage* AllocateLargePage(size_t size, LargeObjectSpace* owner, LargePage* AllocateLargePage(size_t size, LargeObjectSpace* owner,
Executability executable); Executability executable);
template <MemoryAllocator::FreeMode mode = kFull> template <MemoryAllocator::FreeMode mode = kFull>
EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
void Free(MemoryChunk* chunk); void Free(MemoryChunk* chunk);
// Returns allocated spaces in bytes. // Returns allocated spaces in bytes.
......
...@@ -12,7 +12,7 @@ namespace internal { ...@@ -12,7 +12,7 @@ namespace internal {
IsolateAllocator::IsolateAllocator(IsolateAllocationMode mode) { IsolateAllocator::IsolateAllocator(IsolateAllocationMode mode) {
#if V8_TARGET_ARCH_64_BIT #if V8_TARGET_ARCH_64_BIT
if (mode == IsolateAllocationMode::kAllocateInV8Heap) { if (mode == IsolateAllocationMode::kInV8Heap) {
Address heap_base = InitReservation(); Address heap_base = InitReservation();
CommitPagesForIsolate(heap_base); CommitPagesForIsolate(heap_base);
return; return;
...@@ -20,7 +20,7 @@ IsolateAllocator::IsolateAllocator(IsolateAllocationMode mode) { ...@@ -20,7 +20,7 @@ IsolateAllocator::IsolateAllocator(IsolateAllocationMode mode) {
#endif // V8_TARGET_ARCH_64_BIT #endif // V8_TARGET_ARCH_64_BIT
// Allocate Isolate in C++ heap. // Allocate Isolate in C++ heap.
CHECK_EQ(mode, IsolateAllocationMode::kAllocateInCppHeap); CHECK_EQ(mode, IsolateAllocationMode::kInCppHeap);
page_allocator_ = GetPlatformPageAllocator(); page_allocator_ = GetPlatformPageAllocator();
isolate_memory_ = ::operator new(sizeof(Isolate)); isolate_memory_ = ::operator new(sizeof(Isolate));
DCHECK(!reservation_.IsReserved()); DCHECK(!reservation_.IsReserved());
......
...@@ -39,6 +39,11 @@ class V8_EXPORT_PRIVATE IsolateAllocator final { ...@@ -39,6 +39,11 @@ class V8_EXPORT_PRIVATE IsolateAllocator final {
v8::PageAllocator* page_allocator() const { return page_allocator_; } v8::PageAllocator* page_allocator() const { return page_allocator_; }
IsolateAllocationMode mode() {
return reservation_.IsReserved() ? IsolateAllocationMode::kInV8Heap
: IsolateAllocationMode::kInCppHeap;
}
private: private:
Address InitReservation(); Address InitReservation();
void CommitPagesForIsolate(Address heap_base); void CommitPagesForIsolate(Address heap_base);
......
...@@ -12,6 +12,10 @@ ...@@ -12,6 +12,10 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
IsolateAllocationMode Isolate::isolate_allocation_mode() {
return isolate_allocator_->mode();
}
bool Isolate::FromWritableHeapObject(HeapObject* obj, Isolate** isolate) { bool Isolate::FromWritableHeapObject(HeapObject* obj, Isolate** isolate) {
i::MemoryChunk* chunk = i::MemoryChunk::FromHeapObject(obj); i::MemoryChunk* chunk = i::MemoryChunk::FromHeapObject(obj);
if (chunk->owner()->identity() == i::RO_SPACE) { if (chunk->owner()->identity() == i::RO_SPACE) {
......
...@@ -217,6 +217,7 @@ void Isolate::InitializeOncePerProcess() { ...@@ -217,6 +217,7 @@ void Isolate::InitializeOncePerProcess() {
base::Relaxed_Store(&isolate_key_created_, 1); base::Relaxed_Store(&isolate_key_created_, 1);
#endif #endif
per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey(); per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey();
init_memcopy_functions();
} }
Address Isolate::get_address_from_id(IsolateAddressId id) { Address Isolate::get_address_from_id(IsolateAddressId id) {
...@@ -2641,7 +2642,7 @@ Isolate* Isolate::New(IsolateAllocationMode mode) { ...@@ -2641,7 +2642,7 @@ Isolate* Isolate::New(IsolateAllocationMode mode) {
// Construct Isolate object in the allocated memory. // Construct Isolate object in the allocated memory.
void* isolate_ptr = isolate_allocator->isolate_memory(); void* isolate_ptr = isolate_allocator->isolate_memory();
Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator)); Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator));
DCHECK_IMPLIES(mode == IsolateAllocationMode::kAllocateInV8Heap, DCHECK_IMPLIES(mode == IsolateAllocationMode::kInV8Heap,
IsAligned(isolate->isolate_root(), size_t{4} * GB)); IsAligned(isolate->isolate_root(), size_t{4} * GB));
#ifdef DEBUG #ifdef DEBUG
...@@ -2719,8 +2720,6 @@ Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator) ...@@ -2719,8 +2720,6 @@ Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator)
InitializeLoggingAndCounters(); InitializeLoggingAndCounters();
debug_ = new Debug(this); debug_ = new Debug(this);
init_memcopy_functions();
if (FLAG_embedded_builtins) { if (FLAG_embedded_builtins) {
#ifdef V8_MULTI_SNAPSHOTS #ifdef V8_MULTI_SNAPSHOTS
if (FLAG_untrusted_code_mitigations) { if (FLAG_untrusted_code_mitigations) {
......
...@@ -573,6 +573,9 @@ class Isolate final : private HiddenFactory { ...@@ -573,6 +573,9 @@ class Isolate final : private HiddenFactory {
// for legacy API reasons. // for legacy API reasons.
static void Delete(Isolate* isolate); static void Delete(Isolate* isolate);
// Returns allocation mode of this isolate.
V8_INLINE IsolateAllocationMode isolate_allocation_mode();
// Page allocator that must be used for allocating V8 heap pages. // Page allocator that must be used for allocating V8 heap pages.
v8::PageAllocator* page_allocator(); v8::PageAllocator* page_allocator();
......
This diff is collapsed.
...@@ -22,7 +22,7 @@ IsolateWrapper::IsolateWrapper(bool enforce_pointer_compression) ...@@ -22,7 +22,7 @@ IsolateWrapper::IsolateWrapper(bool enforce_pointer_compression)
create_params.array_buffer_allocator = array_buffer_allocator_; create_params.array_buffer_allocator = array_buffer_allocator_;
if (enforce_pointer_compression) { if (enforce_pointer_compression) {
isolate_ = reinterpret_cast<v8::Isolate*>( isolate_ = reinterpret_cast<v8::Isolate*>(
i::Isolate::New(i::IsolateAllocationMode::kAllocateInV8Heap)); i::Isolate::New(i::IsolateAllocationMode::kInV8Heap));
v8::Isolate::Initialize(isolate_, create_params); v8::Isolate::Initialize(isolate_, create_params);
} else { } else {
isolate_ = v8::Isolate::New(create_params); isolate_ = v8::Isolate::New(create_params);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment