Commit 6ca8453c authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

[ptr-cmpr] Remove runtime Isolate allocation flag

Remove the runtime functionality allowing the Isolate to be allocated
4GB aligned in non-pointer-compressed builds. This was barely used in
tests, so we can remove it to give slightly stronger compile-time
guarantees about pointer-compression-only methods being used only under
pointer-compression.

Change-Id: I8eb990faa8f8499ecdcb70ca104ffad4be1437b2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2442790Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70262}
parent a65c5fb7
...@@ -483,20 +483,6 @@ inline std::ostream& operator<<(std::ostream& os, DeoptimizeKind kind) { ...@@ -483,20 +483,6 @@ inline std::ostream& operator<<(std::ostream& os, DeoptimizeKind kind) {
UNREACHABLE(); UNREACHABLE();
} }
enum class IsolateAllocationMode {
// Allocate Isolate in C++ heap using default new/delete operators.
kInCppHeap,
// Allocate Isolate in a committed region inside V8 heap reservation.
kInV8Heap,
#ifdef V8_COMPRESS_POINTERS
kDefault = kInV8Heap,
#else
kDefault = kInCppHeap,
#endif
};
// Indicates whether the lookup is related to sloppy-mode block-scoped // Indicates whether the lookup is related to sloppy-mode block-scoped
// function hoisting, and is a synthetic assignment for that. // function hoisting, and is a synthetic assignment for that.
enum class LookupHoistingMode { kNormal, kLegacySloppy }; enum class LookupHoistingMode { kNormal, kLegacySloppy };
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
#if V8_TARGET_ARCH_64_BIT #ifdef V8_COMPRESS_POINTERS
// Compresses full-pointer representation of a tagged value to on-heap // Compresses full-pointer representation of a tagged value to on-heap
// representation. // representation.
V8_INLINE Tagged_t CompressTagged(Address tagged) { V8_INLINE Tagged_t CompressTagged(Address tagged) {
...@@ -57,15 +57,11 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr, ...@@ -57,15 +57,11 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
return DecompressTaggedPointer(on_heap_addr, raw_value); return DecompressTaggedPointer(on_heap_addr, raw_value);
} }
#ifdef V8_COMPRESS_POINTERS
STATIC_ASSERT(kPtrComprHeapReservationSize == STATIC_ASSERT(kPtrComprHeapReservationSize ==
Internals::kPtrComprHeapReservationSize); Internals::kPtrComprHeapReservationSize);
STATIC_ASSERT(kPtrComprIsolateRootAlignment == STATIC_ASSERT(kPtrComprIsolateRootAlignment ==
Internals::kPtrComprIsolateRootAlignment); Internals::kPtrComprIsolateRootAlignment);
#endif // V8_COMPRESS_POINTERS
#else #else
V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); } V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); }
...@@ -88,7 +84,7 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr, ...@@ -88,7 +84,7 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
UNREACHABLE(); UNREACHABLE();
} }
#endif // V8_TARGET_ARCH_64_BIT #endif // V8_COMPRESS_POINTERS
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
#include "src/common/globals.h" #include "src/common/globals.h"
#if V8_TARGET_ARCH_64_BIT #ifdef V8_COMPRESS_POINTERS
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -19,6 +19,6 @@ constexpr size_t kPtrComprIsolateRootAlignment = size_t{4} * GB; ...@@ -19,6 +19,6 @@ constexpr size_t kPtrComprIsolateRootAlignment = size_t{4} * GB;
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
#endif // V8_TARGET_ARCH_64_BIT #endif // V8_COMPRESS_POINTERS
#endif // V8_COMMON_PTR_COMPR_H_ #endif // V8_COMMON_PTR_COMPR_H_
...@@ -17,10 +17,6 @@ ...@@ -17,10 +17,6 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
IsolateAllocationMode Isolate::isolate_allocation_mode() {
return isolate_allocator_->mode();
}
void Isolate::set_context(Context context) { void Isolate::set_context(Context context) {
DCHECK(context.is_null() || context.IsContext()); DCHECK(context.is_null() || context.IsContext());
thread_local_top()->context_ = context; thread_local_top()->context_ = context;
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
#include "src/ast/ast-value-factory.h" #include "src/ast/ast-value-factory.h"
#include "src/ast/scopes.h" #include "src/ast/scopes.h"
#include "src/base/hashmap.h" #include "src/base/hashmap.h"
#include "src/base/logging.h"
#include "src/base/platform/platform.h" #include "src/base/platform/platform.h"
#include "src/base/sys-info.h" #include "src/base/sys-info.h"
#include "src/base/utils/random-number-generator.h" #include "src/base/utils/random-number-generator.h"
...@@ -2857,18 +2858,16 @@ std::atomic<size_t> Isolate::non_disposed_isolates_; ...@@ -2857,18 +2858,16 @@ std::atomic<size_t> Isolate::non_disposed_isolates_;
#endif // DEBUG #endif // DEBUG
// static // static
Isolate* Isolate::New(IsolateAllocationMode mode) { Isolate* Isolate::New() {
// IsolateAllocator allocates the memory for the Isolate object according to // IsolateAllocator allocates the memory for the Isolate object according to
// the given allocation mode. // the given allocation mode.
std::unique_ptr<IsolateAllocator> isolate_allocator = std::unique_ptr<IsolateAllocator> isolate_allocator =
std::make_unique<IsolateAllocator>(mode); std::make_unique<IsolateAllocator>();
// Construct Isolate object in the allocated memory. // Construct Isolate object in the allocated memory.
void* isolate_ptr = isolate_allocator->isolate_memory(); void* isolate_ptr = isolate_allocator->isolate_memory();
Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator)); Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator));
#if V8_TARGET_ARCH_64_BIT #ifdef V8_COMPRESS_POINTERS
DCHECK_IMPLIES( DCHECK(IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment));
mode == IsolateAllocationMode::kInV8Heap,
IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment));
#endif #endif
#ifdef DEBUG #ifdef DEBUG
......
...@@ -525,8 +525,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { ...@@ -525,8 +525,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
// Creates Isolate object. Must be used instead of constructing Isolate with // Creates Isolate object. Must be used instead of constructing Isolate with
// new operator. // new operator.
static Isolate* New( static Isolate* New();
IsolateAllocationMode mode = IsolateAllocationMode::kDefault);
// Deletes Isolate object. Must be used instead of delete operator. // Deletes Isolate object. Must be used instead of delete operator.
// Destroys the non-default isolates. // Destroys the non-default isolates.
...@@ -538,9 +537,6 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { ...@@ -538,9 +537,6 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
ReadOnlyHeap* ro_heap); ReadOnlyHeap* ro_heap);
void set_read_only_heap(ReadOnlyHeap* ro_heap) { read_only_heap_ = ro_heap; } void set_read_only_heap(ReadOnlyHeap* ro_heap) { read_only_heap_ = ro_heap; }
// Returns allocation mode of this isolate.
V8_INLINE IsolateAllocationMode isolate_allocation_mode();
// Page allocator that must be used for allocating V8 heap pages. // Page allocator that must be used for allocating V8 heap pages.
v8::PageAllocator* page_allocator(); v8::PageAllocator* page_allocator();
......
...@@ -12,20 +12,16 @@ ...@@ -12,20 +12,16 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
IsolateAllocator::IsolateAllocator(IsolateAllocationMode mode) { IsolateAllocator::IsolateAllocator() {
#if V8_TARGET_ARCH_64_BIT #ifdef V8_COMPRESS_POINTERS
if (mode == IsolateAllocationMode::kInV8Heap) { Address heap_reservation_address = InitReservation();
Address heap_reservation_address = InitReservation(); CommitPagesForIsolate(heap_reservation_address);
CommitPagesForIsolate(heap_reservation_address); #else
return;
}
#endif // V8_TARGET_ARCH_64_BIT
// Allocate Isolate in C++ heap. // Allocate Isolate in C++ heap.
CHECK_EQ(mode, IsolateAllocationMode::kInCppHeap);
page_allocator_ = GetPlatformPageAllocator(); page_allocator_ = GetPlatformPageAllocator();
isolate_memory_ = ::operator new(sizeof(Isolate)); isolate_memory_ = ::operator new(sizeof(Isolate));
DCHECK(!reservation_.IsReserved()); DCHECK(!reservation_.IsReserved());
#endif // V8_COMPRESS_POINTERS
} }
IsolateAllocator::~IsolateAllocator() { IsolateAllocator::~IsolateAllocator() {
...@@ -38,7 +34,7 @@ IsolateAllocator::~IsolateAllocator() { ...@@ -38,7 +34,7 @@ IsolateAllocator::~IsolateAllocator() {
::operator delete(isolate_memory_); ::operator delete(isolate_memory_);
} }
#if V8_TARGET_ARCH_64_BIT #ifdef V8_COMPRESS_POINTERS
namespace { namespace {
...@@ -192,7 +188,7 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) { ...@@ -192,7 +188,7 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) {
} }
isolate_memory_ = reinterpret_cast<void*>(isolate_address); isolate_memory_ = reinterpret_cast<void*>(isolate_address);
} }
#endif // V8_TARGET_ARCH_64_BIT #endif // V8_COMPRESS_POINTERS
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -22,7 +22,8 @@ class BoundedPageAllocator; ...@@ -22,7 +22,8 @@ class BoundedPageAllocator;
namespace internal { namespace internal {
// IsolateAllocator object is responsible for allocating memory for one (!) // IsolateAllocator object is responsible for allocating memory for one (!)
// Isolate object. Depending on the allocation mode the memory can be allocated // Isolate object. Depending on the whether pointer compression is enabled,
// the memory can be allocated
// 1) in the C++ heap (when pointer compression is disabled) // 1) in the C++ heap (when pointer compression is disabled)
// 2) in a proper part of a properly aligned region of a reserved address space // 2) in a proper part of a properly aligned region of a reserved address space
// (when pointer compression is enabled). // (when pointer compression is enabled).
...@@ -34,18 +35,13 @@ namespace internal { ...@@ -34,18 +35,13 @@ namespace internal {
// Isolate::Delete() takes care of the proper order of the objects destruction. // Isolate::Delete() takes care of the proper order of the objects destruction.
class V8_EXPORT_PRIVATE IsolateAllocator final { class V8_EXPORT_PRIVATE IsolateAllocator final {
public: public:
explicit IsolateAllocator(IsolateAllocationMode mode); IsolateAllocator();
~IsolateAllocator(); ~IsolateAllocator();
void* isolate_memory() const { return isolate_memory_; } void* isolate_memory() const { return isolate_memory_; }
v8::PageAllocator* page_allocator() const { return page_allocator_; } v8::PageAllocator* page_allocator() const { return page_allocator_; }
IsolateAllocationMode mode() {
return reservation_.IsReserved() ? IsolateAllocationMode::kInV8Heap
: IsolateAllocationMode::kInCppHeap;
}
private: private:
Address InitReservation(); Address InitReservation();
void CommitPagesForIsolate(Address heap_reservation_address); void CommitPagesForIsolate(Address heap_reservation_address);
......
...@@ -19,8 +19,7 @@ ...@@ -19,8 +19,7 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
using HeapTest = TestWithIsolate; using HeapTest = TestWithContext;
using HeapWithPointerCompressionTest = TestWithIsolateAndPointerCompression;
TEST(Heap, YoungGenerationSizeFromOldGenerationSize) { TEST(Heap, YoungGenerationSizeFromOldGenerationSize) {
const size_t MB = static_cast<size_t>(i::MB); const size_t MB = static_cast<size_t>(i::MB);
...@@ -136,8 +135,8 @@ TEST_F(HeapTest, ExternalLimitStaysAboveDefaultForExplicitHandling) { ...@@ -136,8 +135,8 @@ TEST_F(HeapTest, ExternalLimitStaysAboveDefaultForExplicitHandling) {
EXPECT_GE(heap->external_memory_limit(), kExternalAllocationSoftLimit); EXPECT_GE(heap->external_memory_limit(), kExternalAllocationSoftLimit);
} }
#if V8_TARGET_ARCH_64_BIT #ifdef V8_COMPRESS_POINTERS
TEST_F(HeapWithPointerCompressionTest, HeapLayout) { TEST_F(HeapTest, HeapLayout) {
// Produce some garbage. // Produce some garbage.
RunJS( RunJS(
"let ar = [];" "let ar = [];"
...@@ -163,7 +162,7 @@ TEST_F(HeapWithPointerCompressionTest, HeapLayout) { ...@@ -163,7 +162,7 @@ TEST_F(HeapWithPointerCompressionTest, HeapLayout) {
EXPECT_TRUE(heap_reservation.contains(address, size)); EXPECT_TRUE(heap_reservation.contains(address, size));
} }
} }
#endif // V8_TARGET_ARCH_64_BIT #endif // V8_COMPRESS_POINTERS
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -289,18 +289,15 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardownAfterAlreadyFreeingPooled) { ...@@ -289,18 +289,15 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardownAfterAlreadyFreeingPooled) {
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size, tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
PageAllocator::kNoAccess); PageAllocator::kNoAccess);
unmapper()->TearDown(); unmapper()->TearDown();
if (i_isolate()->isolate_allocation_mode() == #ifdef V8_COMPRESS_POINTERS
IsolateAllocationMode::kInV8Heap) { // In this mode Isolate uses bounded page allocator which allocates pages
// In this mode Isolate uses bounded page allocator which allocates pages // inside prereserved region. Thus these pages are kept reserved until
// inside prereserved region. Thus these pages are kept reserved until // the Isolate dies.
// the Isolate dies. tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size, PageAllocator::kNoAccess);
PageAllocator::kNoAccess); #else
} else { tracking_page_allocator()->CheckIsFree(page->address(), page_size);
CHECK_EQ(IsolateAllocationMode::kInCppHeap, #endif // V8_COMPRESS_POINTERS
i_isolate()->isolate_allocation_mode());
tracking_page_allocator()->CheckIsFree(page->address(), page_size);
}
} }
// See v8:5945. // See v8:5945.
...@@ -318,18 +315,15 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardown) { ...@@ -318,18 +315,15 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardown) {
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size, tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
PageAllocator::kReadWrite); PageAllocator::kReadWrite);
unmapper()->TearDown(); unmapper()->TearDown();
if (i_isolate()->isolate_allocation_mode() == #ifdef V8_COMPRESS_POINTERS
IsolateAllocationMode::kInV8Heap) { // In this mode Isolate uses bounded page allocator which allocates pages
// In this mode Isolate uses bounded page allocator which allocates pages // inside prereserved region. Thus these pages are kept reserved until
// inside prereserved region. Thus these pages are kept reserved until // the Isolate dies.
// the Isolate dies. tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size, PageAllocator::kNoAccess);
PageAllocator::kNoAccess); #else
} else { tracking_page_allocator()->CheckIsFree(page->address(), page_size);
CHECK_EQ(IsolateAllocationMode::kInCppHeap, #endif // V8_COMPRESS_POINTERS
i_isolate()->isolate_allocation_mode());
tracking_page_allocator()->CheckIsFree(page->address(), page_size);
}
} }
} // namespace internal } // namespace internal
......
...@@ -22,8 +22,7 @@ namespace { ...@@ -22,8 +22,7 @@ namespace {
CounterMap* kCurrentCounterMap = nullptr; CounterMap* kCurrentCounterMap = nullptr;
} // namespace } // namespace
IsolateWrapper::IsolateWrapper(CountersMode counters_mode, IsolateWrapper::IsolateWrapper(CountersMode counters_mode)
PointerCompressionMode pointer_compression_mode)
: array_buffer_allocator_( : array_buffer_allocator_(
v8::ArrayBuffer::Allocator::NewDefaultAllocator()) { v8::ArrayBuffer::Allocator::NewDefaultAllocator()) {
CHECK_NULL(kCurrentCounterMap); CHECK_NULL(kCurrentCounterMap);
...@@ -47,13 +46,7 @@ IsolateWrapper::IsolateWrapper(CountersMode counters_mode, ...@@ -47,13 +46,7 @@ IsolateWrapper::IsolateWrapper(CountersMode counters_mode,
}; };
} }
if (pointer_compression_mode == kEnforcePointerCompression) { isolate_ = v8::Isolate::New(create_params);
isolate_ = reinterpret_cast<v8::Isolate*>(
i::Isolate::New(i::IsolateAllocationMode::kInV8Heap));
v8::Isolate::Initialize(isolate(), create_params);
} else {
isolate_ = v8::Isolate::New(create_params);
}
CHECK_NOT_NULL(isolate()); CHECK_NOT_NULL(isolate());
} }
......
...@@ -27,20 +27,10 @@ using CounterMap = std::map<std::string, int>; ...@@ -27,20 +27,10 @@ using CounterMap = std::map<std::string, int>;
enum CountersMode { kNoCounters, kEnableCounters }; enum CountersMode { kNoCounters, kEnableCounters };
// When PointerCompressionMode is kEnforcePointerCompression, the Isolate is
// created with pointer compression force enabled. When it's
// kDefaultPointerCompression then the Isolate is created with the default
// pointer compression state for the current build.
enum PointerCompressionMode {
kDefaultPointerCompression,
kEnforcePointerCompression
};
// RAII-like Isolate instance wrapper. // RAII-like Isolate instance wrapper.
class IsolateWrapper final { class IsolateWrapper final {
public: public:
explicit IsolateWrapper(CountersMode counters_mode, explicit IsolateWrapper(CountersMode counters_mode);
PointerCompressionMode pointer_compression_mode);
~IsolateWrapper(); ~IsolateWrapper();
v8::Isolate* isolate() const { return isolate_; } v8::Isolate* isolate() const { return isolate_; }
...@@ -56,13 +46,10 @@ class IsolateWrapper final { ...@@ -56,13 +46,10 @@ class IsolateWrapper final {
// //
// A set of mixins from which the test fixtures will be constructed. // A set of mixins from which the test fixtures will be constructed.
// //
template <typename TMixin, CountersMode kCountersMode = kNoCounters, template <typename TMixin, CountersMode kCountersMode = kNoCounters>
PointerCompressionMode kPointerCompressionMode =
kDefaultPointerCompression>
class WithIsolateMixin : public TMixin { class WithIsolateMixin : public TMixin {
public: public:
WithIsolateMixin() WithIsolateMixin() : isolate_wrapper_(kCountersMode) {}
: isolate_wrapper_(kCountersMode, kPointerCompressionMode) {}
v8::Isolate* v8_isolate() const { return isolate_wrapper_.isolate(); } v8::Isolate* v8_isolate() const { return isolate_wrapper_.isolate(); }
...@@ -70,10 +57,6 @@ class WithIsolateMixin : public TMixin { ...@@ -70,10 +57,6 @@ class WithIsolateMixin : public TMixin {
v8::IsolateWrapper isolate_wrapper_; v8::IsolateWrapper isolate_wrapper_;
}; };
template <typename TMixin, CountersMode kCountersMode = kNoCounters>
using WithPointerCompressionIsolateMixin =
WithIsolateMixin<TMixin, kCountersMode, kEnforcePointerCompression>;
template <typename TMixin> template <typename TMixin>
class WithIsolateScopeMixin : public TMixin { class WithIsolateScopeMixin : public TMixin {
public: public:
...@@ -152,12 +135,6 @@ using TestWithContext = // ...@@ -152,12 +135,6 @@ using TestWithContext = //
WithIsolateMixin< // WithIsolateMixin< //
::testing::Test>>>; ::testing::Test>>>;
using TestWithIsolateAndPointerCompression = //
WithContextMixin< //
WithIsolateScopeMixin< //
WithPointerCompressionIsolateMixin< //
::testing::Test>>>;
namespace internal { namespace internal {
// Forward declarations. // Forward declarations.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment