Commit 6ca8453c authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

[ptr-cmpr] Remove runtime Isolate allocation flag

Remove the runtime functionality allowing the Isolate to be allocated
4GB aligned in non-pointer-compressed builds. This was barely used in
tests, so we can remove it to give slightly stronger compile-time
guarantees about pointer-compression-only methods being used only under
pointer-compression.

Change-Id: I8eb990faa8f8499ecdcb70ca104ffad4be1437b2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2442790Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70262}
parent a65c5fb7
......@@ -483,20 +483,6 @@ inline std::ostream& operator<<(std::ostream& os, DeoptimizeKind kind) {
UNREACHABLE();
}
enum class IsolateAllocationMode {
// Allocate Isolate in C++ heap using default new/delete operators.
kInCppHeap,
// Allocate Isolate in a committed region inside V8 heap reservation.
kInV8Heap,
#ifdef V8_COMPRESS_POINTERS
kDefault = kInV8Heap,
#else
kDefault = kInCppHeap,
#endif
};
// Indicates whether the lookup is related to sloppy-mode block-scoped
// function hoisting, and is a synthetic assignment for that.
enum class LookupHoistingMode { kNormal, kLegacySloppy };
......
......@@ -12,7 +12,7 @@
namespace v8 {
namespace internal {
#if V8_TARGET_ARCH_64_BIT
#ifdef V8_COMPRESS_POINTERS
// Compresses full-pointer representation of a tagged value to on-heap
// representation.
V8_INLINE Tagged_t CompressTagged(Address tagged) {
......@@ -57,15 +57,11 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
return DecompressTaggedPointer(on_heap_addr, raw_value);
}
#ifdef V8_COMPRESS_POINTERS
STATIC_ASSERT(kPtrComprHeapReservationSize ==
Internals::kPtrComprHeapReservationSize);
STATIC_ASSERT(kPtrComprIsolateRootAlignment ==
Internals::kPtrComprIsolateRootAlignment);
#endif // V8_COMPRESS_POINTERS
#else
V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); }
......@@ -88,7 +84,7 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
UNREACHABLE();
}
#endif // V8_TARGET_ARCH_64_BIT
#endif // V8_COMPRESS_POINTERS
} // namespace internal
} // namespace v8
......
......@@ -7,7 +7,7 @@
#include "src/common/globals.h"
#if V8_TARGET_ARCH_64_BIT
#ifdef V8_COMPRESS_POINTERS
namespace v8 {
namespace internal {
......@@ -19,6 +19,6 @@ constexpr size_t kPtrComprIsolateRootAlignment = size_t{4} * GB;
} // namespace internal
} // namespace v8
#endif // V8_TARGET_ARCH_64_BIT
#endif // V8_COMPRESS_POINTERS
#endif // V8_COMMON_PTR_COMPR_H_
......@@ -17,10 +17,6 @@
namespace v8 {
namespace internal {
IsolateAllocationMode Isolate::isolate_allocation_mode() {
return isolate_allocator_->mode();
}
void Isolate::set_context(Context context) {
DCHECK(context.is_null() || context.IsContext());
thread_local_top()->context_ = context;
......
......@@ -18,6 +18,7 @@
#include "src/ast/ast-value-factory.h"
#include "src/ast/scopes.h"
#include "src/base/hashmap.h"
#include "src/base/logging.h"
#include "src/base/platform/platform.h"
#include "src/base/sys-info.h"
#include "src/base/utils/random-number-generator.h"
......@@ -2857,18 +2858,16 @@ std::atomic<size_t> Isolate::non_disposed_isolates_;
#endif // DEBUG
// static
Isolate* Isolate::New(IsolateAllocationMode mode) {
Isolate* Isolate::New() {
// IsolateAllocator allocates the memory for the Isolate object according to
// the given allocation mode.
std::unique_ptr<IsolateAllocator> isolate_allocator =
std::make_unique<IsolateAllocator>(mode);
std::make_unique<IsolateAllocator>();
// Construct Isolate object in the allocated memory.
void* isolate_ptr = isolate_allocator->isolate_memory();
Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator));
#if V8_TARGET_ARCH_64_BIT
DCHECK_IMPLIES(
mode == IsolateAllocationMode::kInV8Heap,
IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment));
#ifdef V8_COMPRESS_POINTERS
DCHECK(IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment));
#endif
#ifdef DEBUG
......
......@@ -525,8 +525,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
// Creates Isolate object. Must be used instead of constructing Isolate with
// new operator.
static Isolate* New(
IsolateAllocationMode mode = IsolateAllocationMode::kDefault);
static Isolate* New();
// Deletes Isolate object. Must be used instead of delete operator.
// Destroys the non-default isolates.
......@@ -538,9 +537,6 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
ReadOnlyHeap* ro_heap);
void set_read_only_heap(ReadOnlyHeap* ro_heap) { read_only_heap_ = ro_heap; }
// Returns allocation mode of this isolate.
V8_INLINE IsolateAllocationMode isolate_allocation_mode();
// Page allocator that must be used for allocating V8 heap pages.
v8::PageAllocator* page_allocator();
......
......@@ -12,20 +12,16 @@
namespace v8 {
namespace internal {
IsolateAllocator::IsolateAllocator(IsolateAllocationMode mode) {
#if V8_TARGET_ARCH_64_BIT
if (mode == IsolateAllocationMode::kInV8Heap) {
Address heap_reservation_address = InitReservation();
CommitPagesForIsolate(heap_reservation_address);
return;
}
#endif // V8_TARGET_ARCH_64_BIT
IsolateAllocator::IsolateAllocator() {
#ifdef V8_COMPRESS_POINTERS
Address heap_reservation_address = InitReservation();
CommitPagesForIsolate(heap_reservation_address);
#else
// Allocate Isolate in C++ heap.
CHECK_EQ(mode, IsolateAllocationMode::kInCppHeap);
page_allocator_ = GetPlatformPageAllocator();
isolate_memory_ = ::operator new(sizeof(Isolate));
DCHECK(!reservation_.IsReserved());
#endif // V8_COMPRESS_POINTERS
}
IsolateAllocator::~IsolateAllocator() {
......@@ -38,7 +34,7 @@ IsolateAllocator::~IsolateAllocator() {
::operator delete(isolate_memory_);
}
#if V8_TARGET_ARCH_64_BIT
#ifdef V8_COMPRESS_POINTERS
namespace {
......@@ -192,7 +188,7 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) {
}
isolate_memory_ = reinterpret_cast<void*>(isolate_address);
}
#endif // V8_TARGET_ARCH_64_BIT
#endif // V8_COMPRESS_POINTERS
} // namespace internal
} // namespace v8
......@@ -22,7 +22,8 @@ class BoundedPageAllocator;
namespace internal {
// IsolateAllocator object is responsible for allocating memory for one (!)
// Isolate object. Depending on the allocation mode the memory can be allocated
// Isolate object. Depending on the whether pointer compression is enabled,
// the memory can be allocated
// 1) in the C++ heap (when pointer compression is disabled)
// 2) in a proper part of a properly aligned region of a reserved address space
// (when pointer compression is enabled).
......@@ -34,18 +35,13 @@ namespace internal {
// Isolate::Delete() takes care of the proper order of the objects destruction.
class V8_EXPORT_PRIVATE IsolateAllocator final {
public:
explicit IsolateAllocator(IsolateAllocationMode mode);
IsolateAllocator();
~IsolateAllocator();
void* isolate_memory() const { return isolate_memory_; }
v8::PageAllocator* page_allocator() const { return page_allocator_; }
IsolateAllocationMode mode() {
return reservation_.IsReserved() ? IsolateAllocationMode::kInV8Heap
: IsolateAllocationMode::kInCppHeap;
}
private:
Address InitReservation();
void CommitPagesForIsolate(Address heap_reservation_address);
......
......@@ -19,8 +19,7 @@
namespace v8 {
namespace internal {
using HeapTest = TestWithIsolate;
using HeapWithPointerCompressionTest = TestWithIsolateAndPointerCompression;
using HeapTest = TestWithContext;
TEST(Heap, YoungGenerationSizeFromOldGenerationSize) {
const size_t MB = static_cast<size_t>(i::MB);
......@@ -136,8 +135,8 @@ TEST_F(HeapTest, ExternalLimitStaysAboveDefaultForExplicitHandling) {
EXPECT_GE(heap->external_memory_limit(), kExternalAllocationSoftLimit);
}
#if V8_TARGET_ARCH_64_BIT
TEST_F(HeapWithPointerCompressionTest, HeapLayout) {
#ifdef V8_COMPRESS_POINTERS
TEST_F(HeapTest, HeapLayout) {
// Produce some garbage.
RunJS(
"let ar = [];"
......@@ -163,7 +162,7 @@ TEST_F(HeapWithPointerCompressionTest, HeapLayout) {
EXPECT_TRUE(heap_reservation.contains(address, size));
}
}
#endif // V8_TARGET_ARCH_64_BIT
#endif // V8_COMPRESS_POINTERS
} // namespace internal
} // namespace v8
......@@ -289,18 +289,15 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardownAfterAlreadyFreeingPooled) {
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
PageAllocator::kNoAccess);
unmapper()->TearDown();
if (i_isolate()->isolate_allocation_mode() ==
IsolateAllocationMode::kInV8Heap) {
// In this mode Isolate uses bounded page allocator which allocates pages
// inside prereserved region. Thus these pages are kept reserved until
// the Isolate dies.
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
PageAllocator::kNoAccess);
} else {
CHECK_EQ(IsolateAllocationMode::kInCppHeap,
i_isolate()->isolate_allocation_mode());
tracking_page_allocator()->CheckIsFree(page->address(), page_size);
}
#ifdef V8_COMPRESS_POINTERS
// In this mode Isolate uses bounded page allocator which allocates pages
// inside prereserved region. Thus these pages are kept reserved until
// the Isolate dies.
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
PageAllocator::kNoAccess);
#else
tracking_page_allocator()->CheckIsFree(page->address(), page_size);
#endif // V8_COMPRESS_POINTERS
}
// See v8:5945.
......@@ -318,18 +315,15 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardown) {
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
PageAllocator::kReadWrite);
unmapper()->TearDown();
if (i_isolate()->isolate_allocation_mode() ==
IsolateAllocationMode::kInV8Heap) {
// In this mode Isolate uses bounded page allocator which allocates pages
// inside prereserved region. Thus these pages are kept reserved until
// the Isolate dies.
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
PageAllocator::kNoAccess);
} else {
CHECK_EQ(IsolateAllocationMode::kInCppHeap,
i_isolate()->isolate_allocation_mode());
tracking_page_allocator()->CheckIsFree(page->address(), page_size);
}
#ifdef V8_COMPRESS_POINTERS
// In this mode Isolate uses bounded page allocator which allocates pages
// inside prereserved region. Thus these pages are kept reserved until
// the Isolate dies.
tracking_page_allocator()->CheckPagePermissions(page->address(), page_size,
PageAllocator::kNoAccess);
#else
tracking_page_allocator()->CheckIsFree(page->address(), page_size);
#endif // V8_COMPRESS_POINTERS
}
} // namespace internal
......
......@@ -22,8 +22,7 @@ namespace {
CounterMap* kCurrentCounterMap = nullptr;
} // namespace
IsolateWrapper::IsolateWrapper(CountersMode counters_mode,
PointerCompressionMode pointer_compression_mode)
IsolateWrapper::IsolateWrapper(CountersMode counters_mode)
: array_buffer_allocator_(
v8::ArrayBuffer::Allocator::NewDefaultAllocator()) {
CHECK_NULL(kCurrentCounterMap);
......@@ -47,13 +46,7 @@ IsolateWrapper::IsolateWrapper(CountersMode counters_mode,
};
}
if (pointer_compression_mode == kEnforcePointerCompression) {
isolate_ = reinterpret_cast<v8::Isolate*>(
i::Isolate::New(i::IsolateAllocationMode::kInV8Heap));
v8::Isolate::Initialize(isolate(), create_params);
} else {
isolate_ = v8::Isolate::New(create_params);
}
isolate_ = v8::Isolate::New(create_params);
CHECK_NOT_NULL(isolate());
}
......
......@@ -27,20 +27,10 @@ using CounterMap = std::map<std::string, int>;
enum CountersMode { kNoCounters, kEnableCounters };
// When PointerCompressionMode is kEnforcePointerCompression, the Isolate is
// created with pointer compression force enabled. When it's
// kDefaultPointerCompression then the Isolate is created with the default
// pointer compression state for the current build.
enum PointerCompressionMode {
kDefaultPointerCompression,
kEnforcePointerCompression
};
// RAII-like Isolate instance wrapper.
class IsolateWrapper final {
public:
explicit IsolateWrapper(CountersMode counters_mode,
PointerCompressionMode pointer_compression_mode);
explicit IsolateWrapper(CountersMode counters_mode);
~IsolateWrapper();
v8::Isolate* isolate() const { return isolate_; }
......@@ -56,13 +46,10 @@ class IsolateWrapper final {
//
// A set of mixins from which the test fixtures will be constructed.
//
template <typename TMixin, CountersMode kCountersMode = kNoCounters,
PointerCompressionMode kPointerCompressionMode =
kDefaultPointerCompression>
template <typename TMixin, CountersMode kCountersMode = kNoCounters>
class WithIsolateMixin : public TMixin {
public:
WithIsolateMixin()
: isolate_wrapper_(kCountersMode, kPointerCompressionMode) {}
WithIsolateMixin() : isolate_wrapper_(kCountersMode) {}
v8::Isolate* v8_isolate() const { return isolate_wrapper_.isolate(); }
......@@ -70,10 +57,6 @@ class WithIsolateMixin : public TMixin {
v8::IsolateWrapper isolate_wrapper_;
};
template <typename TMixin, CountersMode kCountersMode = kNoCounters>
using WithPointerCompressionIsolateMixin =
WithIsolateMixin<TMixin, kCountersMode, kEnforcePointerCompression>;
template <typename TMixin>
class WithIsolateScopeMixin : public TMixin {
public:
......@@ -152,12 +135,6 @@ using TestWithContext = //
WithIsolateMixin< //
::testing::Test>>>;
using TestWithIsolateAndPointerCompression = //
WithContextMixin< //
WithIsolateScopeMixin< //
WithPointerCompressionIsolateMixin< //
::testing::Test>>>;
namespace internal {
// Forward declarations.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment