Commit 3421ad20 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[ptr-compr] Move IsolateData from Heap to Isolate

and also move embedder fields from Isolate to IsolateData.

The external memory counter fields are temporarily moved to IsolateData in
order to avoid unexpected Node JS bot failures which happen if the fields
are left in the Heap class.

Bug: v8:8182
Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng
Change-Id: I9d58f235c0ce40e110f595addd03b80b3617aa77
Reviewed-on: https://chromium-review.googlesource.com/c/1278793
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#57037}
parent 01d5da4b
...@@ -144,15 +144,18 @@ class Internals { ...@@ -144,15 +144,18 @@ class Internals {
static const int kExternalTwoByteRepresentationTag = 0x02; static const int kExternalTwoByteRepresentationTag = 0x02;
static const int kExternalOneByteRepresentationTag = 0x0a; static const int kExternalOneByteRepresentationTag = 0x0a;
static const uint32_t kNumIsolateDataSlots = 4;
static const int kIsolateEmbedderDataOffset = 0 * kApiPointerSize; static const int kIsolateEmbedderDataOffset = 0 * kApiPointerSize;
static const int kExternalMemoryOffset = 4 * kApiPointerSize; static const int kExternalMemoryOffset =
kNumIsolateDataSlots * kApiPointerSize;
static const int kExternalMemoryLimitOffset = static const int kExternalMemoryLimitOffset =
kExternalMemoryOffset + kApiInt64Size; kExternalMemoryOffset + kApiInt64Size;
static const int kExternalMemoryAtLastMarkCompactOffset = static const int kExternalMemoryAtLastMarkCompactOffset =
kExternalMemoryLimitOffset + kApiInt64Size; kExternalMemoryLimitOffset + kApiInt64Size;
static const int kIsolateRootsOffset = kExternalMemoryLimitOffset + static const int kIsolateRootsOffset =
kApiInt64Size + kApiInt64Size + kExternalMemoryAtLastMarkCompactOffset + kApiInt64Size;
kApiPointerSize + kApiPointerSize;
static const int kUndefinedValueRootIndex = 4; static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5; static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6; static const int kNullValueRootIndex = 6;
...@@ -179,8 +182,6 @@ class Internals { ...@@ -179,8 +182,6 @@ class Internals {
static const int kUndefinedOddballKind = 5; static const int kUndefinedOddballKind = 5;
static const int kNullOddballKind = 3; static const int kNullOddballKind = 3;
static const uint32_t kNumIsolateDataSlots = 4;
// Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
// incremental GC once the external memory reaches this limit. // incremental GC once the external memory reaches this limit.
static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024; static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
......
...@@ -38,6 +38,7 @@ ...@@ -38,6 +38,7 @@
#include <forward_list> #include <forward_list>
#include "src/deoptimize-reason.h" #include "src/deoptimize-reason.h"
#include "src/external-reference.h"
#include "src/flags.h" #include "src/flags.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/handles.h" #include "src/handles.h"
......
...@@ -21,6 +21,7 @@ ...@@ -21,6 +21,7 @@
// TODO(mstarzinger): There is one more include to remove in order to no longer // TODO(mstarzinger): There is one more include to remove in order to no longer
// leak heap internals to users of this interface! // leak heap internals to users of this interface!
#include "src/heap/spaces-inl.h" #include "src/heap/spaces-inl.h"
#include "src/isolate-data.h"
#include "src/isolate.h" #include "src/isolate.h"
#include "src/log.h" #include "src/log.h"
#include "src/msan.h" #include "src/msan.h"
...@@ -55,6 +56,32 @@ HeapObject* AllocationResult::ToObjectChecked() { ...@@ -55,6 +56,32 @@ HeapObject* AllocationResult::ToObjectChecked() {
return HeapObject::cast(object_); return HeapObject::cast(object_);
} }
Isolate* Heap::isolate() {
return reinterpret_cast<Isolate*>(
reinterpret_cast<intptr_t>(this) -
reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
}
int64_t Heap::external_memory() {
return isolate()->isolate_data()->external_memory_;
}
void Heap::update_external_memory(int64_t delta) {
isolate()->isolate_data()->external_memory_ += delta;
}
void Heap::update_external_memory_concurrently_freed(intptr_t freed) {
external_memory_concurrently_freed_ += freed;
}
void Heap::account_external_memory_concurrently_freed() {
isolate()->isolate_data()->external_memory_ -=
external_memory_concurrently_freed_;
external_memory_concurrently_freed_ = 0;
}
RootsTable& Heap::roots_table() { return isolate()->roots_table(); }
// TODO(jkummerow): Drop std::remove_pointer after the migration to ObjectPtr. // TODO(jkummerow): Drop std::remove_pointer after the migration to ObjectPtr.
#define ROOT_ACCESSOR(Type, name, CamelName) \ #define ROOT_ACCESSOR(Type, name, CamelName) \
Type Heap::name() { \ Type Heap::name() { \
...@@ -564,12 +591,6 @@ void Heap::UpdateAllocationSite(Map* map, HeapObject* object, ...@@ -564,12 +591,6 @@ void Heap::UpdateAllocationSite(Map* map, HeapObject* object,
(*pretenuring_feedback)[reinterpret_cast<AllocationSite*>(key)]++; (*pretenuring_feedback)[reinterpret_cast<AllocationSite*>(key)]++;
} }
Isolate* Heap::isolate() {
return reinterpret_cast<Isolate*>(
reinterpret_cast<intptr_t>(this) -
reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
}
void Heap::ExternalStringTable::AddString(String* string) { void Heap::ExternalStringTable::AddString(String* string) {
DCHECK(string->IsExternalString()); DCHECK(string->IsExternalString());
DCHECK(!Contains(string)); DCHECK(!Contains(string));
......
...@@ -377,7 +377,7 @@ void Heap::PrintShortHeapStatistics() { ...@@ -377,7 +377,7 @@ void Heap::PrintShortHeapStatistics() {
memory_allocator()->unmapper()->NumberOfChunks(), memory_allocator()->unmapper()->NumberOfChunks(),
CommittedMemoryOfHeapAndUnmapper() / KB); CommittedMemoryOfHeapAndUnmapper() / KB);
PrintIsolate(isolate_, "External memory reported: %6" PRId64 " KB\n", PrintIsolate(isolate_, "External memory reported: %6" PRId64 " KB\n",
external_memory_ / KB); isolate()->isolate_data()->external_memory_ / KB);
PrintIsolate(isolate_, "Backing store memory: %6" PRIuS " KB\n", PrintIsolate(isolate_, "Backing store memory: %6" PRIuS " KB\n",
backing_store_bytes_ / KB); backing_store_bytes_ / KB);
PrintIsolate(isolate_, "External memory global %zu KB\n", PrintIsolate(isolate_, "External memory global %zu KB\n",
...@@ -1186,8 +1186,9 @@ void Heap::ReportExternalMemoryPressure() { ...@@ -1186,8 +1186,9 @@ void Heap::ReportExternalMemoryPressure() {
static_cast<GCCallbackFlags>( static_cast<GCCallbackFlags>(
kGCCallbackFlagSynchronousPhantomCallbackProcessing | kGCCallbackFlagSynchronousPhantomCallbackProcessing |
kGCCallbackFlagCollectAllExternalMemory); kGCCallbackFlagCollectAllExternalMemory);
if (external_memory_ > if (isolate()->isolate_data()->external_memory_ >
(external_memory_at_last_mark_compact_ + external_memory_hard_limit())) { (isolate()->isolate_data()->external_memory_at_last_mark_compact_ +
external_memory_hard_limit())) {
CollectAllGarbage( CollectAllGarbage(
kReduceMemoryFootprintMask, kReduceMemoryFootprintMask,
GarbageCollectionReason::kExternalMemoryPressure, GarbageCollectionReason::kExternalMemoryPressure,
...@@ -1209,10 +1210,12 @@ void Heap::ReportExternalMemoryPressure() { ...@@ -1209,10 +1210,12 @@ void Heap::ReportExternalMemoryPressure() {
// Incremental marking is turned on an has already been started. // Incremental marking is turned on an has already been started.
const double kMinStepSize = 5; const double kMinStepSize = 5;
const double kMaxStepSize = 10; const double kMaxStepSize = 10;
const double ms_step = const double ms_step = Min(
Min(kMaxStepSize, kMaxStepSize,
Max(kMinStepSize, static_cast<double>(external_memory_) / Max(kMinStepSize,
external_memory_limit_ * kMinStepSize)); static_cast<double>(isolate()->isolate_data()->external_memory_) /
isolate()->isolate_data()->external_memory_limit_ *
kMinStepSize));
const double deadline = MonotonicallyIncreasingTimeInMs() + ms_step; const double deadline = MonotonicallyIncreasingTimeInMs() + ms_step;
// Extend the gc callback flags with external memory flags. // Extend the gc callback flags with external memory flags.
current_gc_callback_flags_ = static_cast<GCCallbackFlags>( current_gc_callback_flags_ = static_cast<GCCallbackFlags>(
...@@ -1704,8 +1707,11 @@ bool Heap::PerformGarbageCollection( ...@@ -1704,8 +1707,11 @@ bool Heap::PerformGarbageCollection(
size_t old_gen_size = OldGenerationSizeOfObjects(); size_t old_gen_size = OldGenerationSizeOfObjects();
if (collector == MARK_COMPACTOR) { if (collector == MARK_COMPACTOR) {
// Register the amount of external allocated memory. // Register the amount of external allocated memory.
external_memory_at_last_mark_compact_ = external_memory_; isolate()->isolate_data()->external_memory_at_last_mark_compact_ =
external_memory_limit_ = external_memory_ + kExternalAllocationSoftLimit; isolate()->isolate_data()->external_memory_;
isolate()->isolate_data()->external_memory_limit_ =
isolate()->isolate_data()->external_memory_ +
kExternalAllocationSoftLimit;
double max_factor = double max_factor =
heap_controller()->MaxGrowingFactor(max_old_generation_size_); heap_controller()->MaxGrowingFactor(max_old_generation_size_);
...@@ -3189,8 +3195,8 @@ void Heap::CollectGarbageOnMemoryPressure() { ...@@ -3189,8 +3195,8 @@ void Heap::CollectGarbageOnMemoryPressure() {
double end = MonotonicallyIncreasingTimeInMs(); double end = MonotonicallyIncreasingTimeInMs();
// Estimate how much memory we can free. // Estimate how much memory we can free.
int64_t potential_garbage = int64_t potential_garbage = (CommittedMemory() - SizeOfObjects()) +
(CommittedMemory() - SizeOfObjects()) + external_memory_; isolate()->isolate_data()->external_memory_;
// If we can potentially free large amount of memory, then start GC right // If we can potentially free large amount of memory, then start GC right
// away instead of waiting for memory reducer. // away instead of waiting for memory reducer.
if (potential_garbage >= kGarbageThresholdInBytes && if (potential_garbage >= kGarbageThresholdInBytes &&
...@@ -3655,12 +3661,12 @@ Code* Heap::builtin(int index) { ...@@ -3655,12 +3661,12 @@ Code* Heap::builtin(int index) {
DCHECK(Builtins::IsBuiltinId(index)); DCHECK(Builtins::IsBuiltinId(index));
// Code::cast cannot be used here since we access builtins // Code::cast cannot be used here since we access builtins
// during the marking phase of mark sweep. See IC::Clear. // during the marking phase of mark sweep. See IC::Clear.
return reinterpret_cast<Code*>(builtins_table()[index]); return reinterpret_cast<Code*>(isolate()->builtins_table()[index]);
} }
Address Heap::builtin_address(int index) { Address Heap::builtin_address(int index) {
DCHECK(Builtins::IsBuiltinId(index) || index == Builtins::builtin_count); DCHECK(Builtins::IsBuiltinId(index) || index == Builtins::builtin_count);
return reinterpret_cast<Address>(&builtins_table()[index]); return reinterpret_cast<Address>(&isolate()->builtins_table()[index]);
} }
void Heap::set_builtin(int index, HeapObject* builtin) { void Heap::set_builtin(int index, HeapObject* builtin) {
...@@ -3668,7 +3674,7 @@ void Heap::set_builtin(int index, HeapObject* builtin) { ...@@ -3668,7 +3674,7 @@ void Heap::set_builtin(int index, HeapObject* builtin) {
DCHECK(Internals::HasHeapObjectTag(reinterpret_cast<Address>(builtin))); DCHECK(Internals::HasHeapObjectTag(reinterpret_cast<Address>(builtin)));
// The given builtin may be completely uninitialized thus we cannot check its // The given builtin may be completely uninitialized thus we cannot check its
// type here. // type here.
builtins_table()[index] = builtin; isolate()->builtins_table()[index] = builtin;
} }
void Heap::IterateRoots(RootVisitor* v, VisitMode mode) { void Heap::IterateRoots(RootVisitor* v, VisitMode mode) {
...@@ -4031,9 +4037,14 @@ size_t Heap::OldGenerationSizeOfObjects() { ...@@ -4031,9 +4037,14 @@ size_t Heap::OldGenerationSizeOfObjects() {
} }
uint64_t Heap::PromotedExternalMemorySize() { uint64_t Heap::PromotedExternalMemorySize() {
if (external_memory_ <= external_memory_at_last_mark_compact_) return 0; IsolateData* isolate_data = isolate()->isolate_data();
return static_cast<uint64_t>(external_memory_ - if (isolate_data->external_memory_ <=
external_memory_at_last_mark_compact_); isolate_data->external_memory_at_last_mark_compact_) {
return 0;
}
return static_cast<uint64_t>(
isolate_data->external_memory_ -
isolate_data->external_memory_at_last_mark_compact_);
} }
bool Heap::ShouldOptimizeForLoadTime() { bool Heap::ShouldOptimizeForLoadTime() {
...@@ -4114,7 +4125,8 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() { ...@@ -4114,7 +4125,8 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
if (FLAG_stress_marking > 0) { if (FLAG_stress_marking > 0) {
double gained_since_last_gc = double gained_since_last_gc =
PromotedSinceLastGC() + PromotedSinceLastGC() +
(external_memory_ - external_memory_at_last_mark_compact_); (isolate()->isolate_data()->external_memory_ -
isolate()->isolate_data()->external_memory_at_last_mark_compact_);
double size_before_gc = double size_before_gc =
OldGenerationObjectsAndPromotedExternalMemorySize() - OldGenerationObjectsAndPromotedExternalMemorySize() -
gained_since_last_gc; gained_since_last_gc;
...@@ -4399,8 +4411,6 @@ void Heap::SetUp() { ...@@ -4399,8 +4411,6 @@ void Heap::SetUp() {
} }
write_protect_code_memory_ = FLAG_write_protect_code_memory; write_protect_code_memory_ = FLAG_write_protect_code_memory;
isolate_data_.external_reference_table()->Init(isolate_);
} }
void Heap::InitializeHashSeed() { void Heap::InitializeHashSeed() {
......
...@@ -21,7 +21,6 @@ ...@@ -21,7 +21,6 @@
#include "src/base/atomic-utils.h" #include "src/base/atomic-utils.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/heap-symbols.h" #include "src/heap-symbols.h"
#include "src/isolate-data.h"
#include "src/objects.h" #include "src/objects.h"
#include "src/objects/fixed-array.h" #include "src/objects/fixed-array.h"
#include "src/objects/string-table.h" #include "src/objects/string-table.h"
...@@ -508,17 +507,10 @@ class Heap { ...@@ -508,17 +507,10 @@ class Heap {
int64_t external_memory_hard_limit() { return MaxOldGenerationSize() / 2; } int64_t external_memory_hard_limit() { return MaxOldGenerationSize() / 2; }
int64_t external_memory() { return external_memory_; } V8_INLINE int64_t external_memory();
void update_external_memory(int64_t delta) { external_memory_ += delta; } V8_INLINE void update_external_memory(int64_t delta);
V8_INLINE void update_external_memory_concurrently_freed(intptr_t freed);
void update_external_memory_concurrently_freed(intptr_t freed) { V8_INLINE void account_external_memory_concurrently_freed();
external_memory_concurrently_freed_ += freed;
}
void account_external_memory_concurrently_freed() {
external_memory_ -= external_memory_concurrently_freed_;
external_memory_concurrently_freed_ = 0;
}
size_t backing_store_bytes() const { return backing_store_bytes_; } size_t backing_store_bytes() const { return backing_store_bytes_; }
...@@ -636,18 +628,12 @@ class Heap { ...@@ -636,18 +628,12 @@ class Heap {
return array_buffer_collector_; return array_buffer_collector_;
} }
const IsolateData* isolate_data() const { return &isolate_data_; }
IsolateData* isolate_data() { return &isolate_data_; }
// =========================================================================== // ===========================================================================
// Root set access. ========================================================== // Root set access. ==========================================================
// =========================================================================== // ===========================================================================
// Shortcut to the roots table stored in |isolate_data_|. // Shortcut to the roots table stored in the Isolate.
V8_INLINE const RootsTable& roots_table() const { V8_INLINE RootsTable& roots_table();
return isolate_data_.roots();
}
V8_INLINE RootsTable& roots_table() { return isolate_data_.roots(); }
// Heap root getters. // Heap root getters.
#define ROOT_ACCESSOR(type, name, CamelName) inline type name(); #define ROOT_ACCESSOR(type, name, CamelName) inline type name();
...@@ -732,9 +718,6 @@ class Heap { ...@@ -732,9 +718,6 @@ class Heap {
// Builtins. ================================================================= // Builtins. =================================================================
// =========================================================================== // ===========================================================================
// Shortcut to the builtins table stored in |isolate_data_|.
V8_INLINE Object** builtins_table() { return isolate_data_.builtins(); }
Code* builtin(int index); Code* builtin(int index);
Address builtin_address(int index); Address builtin_address(int index);
void set_builtin(int index, HeapObject* builtin); void set_builtin(int index, HeapObject* builtin);
...@@ -1746,15 +1729,6 @@ class Heap { ...@@ -1746,15 +1729,6 @@ class Heap {
bool IsRetainingPathTarget(HeapObject* object, RetainingPathOption* option); bool IsRetainingPathTarget(HeapObject* object, RetainingPathOption* option);
void PrintRetainingPath(HeapObject* object, RetainingPathOption option); void PrintRetainingPath(HeapObject* object, RetainingPathOption option);
// The amount of external memory registered through the API.
int64_t external_memory_ = 0;
// The limit when to trigger memory pressure from the API.
int64_t external_memory_limit_ = kExternalAllocationSoftLimit;
// Caches the amount of external memory registered at the last MC.
int64_t external_memory_at_last_mark_compact_ = 0;
// The amount of memory that has been freed concurrently. // The amount of memory that has been freed concurrently.
std::atomic<intptr_t> external_memory_concurrently_freed_{0}; std::atomic<intptr_t> external_memory_concurrently_freed_{0};
...@@ -1762,8 +1736,6 @@ class Heap { ...@@ -1762,8 +1736,6 @@ class Heap {
// more expedient to get at the isolate directly from within Heap methods. // more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_ = nullptr; Isolate* isolate_ = nullptr;
IsolateData isolate_data_;
size_t code_range_size_ = 0; size_t code_range_size_ = 0;
size_t max_semi_space_size_ = 8 * (kPointerSize / 4) * MB; size_t max_semi_space_size_ = 8 * (kPointerSize / 4) * MB;
size_t initial_semispace_size_ = kMinSemiSpaceSizeInKB * KB; size_t initial_semispace_size_ = kMinSemiSpaceSizeInKB * KB;
......
...@@ -101,22 +101,46 @@ class IsolateData final { ...@@ -101,22 +101,46 @@ class IsolateData final {
// Static layout definition. // Static layout definition.
#define FIELDS(V) \ #define FIELDS(V) \
V(kEmbedderDataOffset, Internals::kNumIsolateDataSlots* kPointerSize) \
V(kExternalMemoryOffset, kInt64Size) \
V(kExternalMemoryLlimitOffset, kInt64Size) \
V(kExternalMemoryAtLastMarkCompactOffset, kInt64Size) \
V(kRootsTableOffset, RootsTable::kEntriesCount* kPointerSize) \ V(kRootsTableOffset, RootsTable::kEntriesCount* kPointerSize) \
V(kExternalReferenceTableOffset, ExternalReferenceTable::SizeInBytes()) \ V(kExternalReferenceTableOffset, ExternalReferenceTable::SizeInBytes()) \
V(kBuiltinsTableOffset, Builtins::builtin_count* kPointerSize) \ V(kBuiltinsTableOffset, Builtins::builtin_count* kPointerSize) \
V(kMagicNumberOffset, kIntptrSize) \ V(kMagicNumberOffset, kIntptrSize) \
V(kVirtualCallTargetRegisterOffset, kPointerSize) \ V(kVirtualCallTargetRegisterOffset, kPointerSize) \
/* This padding aligns IsolateData size by 8 bytes. */ \
V(kPaddingOffset, \
8 + RoundUp<8>(static_cast<int>(kPaddingOffset)) - kPaddingOffset) \
/* Total size. */ \ /* Total size. */ \
V(kSize, 0) V(kSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(0, FIELDS) DEFINE_FIELD_OFFSET_CONSTANTS(0, FIELDS)
#undef FIELDS #undef FIELDS
// These fields are accessed through the API, offsets must be kept in sync
// with v8::internal::Internals (in include/v8-internal.h) constants.
// The layout consitency is verified in Isolate::CheckIsolateLayout() using
// runtime checks.
void* embedder_data_[Internals::kNumIsolateDataSlots] = {};
// TODO(ishell): Move these external memory counters back to Heap once the
// Node JS bot issue is solved.
// The amount of external memory registered through the API.
int64_t external_memory_ = 0;
// The limit when to trigger memory pressure from the API.
int64_t external_memory_limit_ = kExternalAllocationSoftLimit;
// Caches the amount of external memory registered at the last MC.
int64_t external_memory_at_last_mark_compact_ = 0;
RootsTable roots_; RootsTable roots_;
ExternalReferenceTable external_reference_table_; ExternalReferenceTable external_reference_table_;
Object* builtins_[Builtins::builtin_count]; Object* builtins_[Builtins::builtin_count] = {};
// For root register verification. // For root register verification.
// TODO(v8:6666): Remove once the root register is fully supported on ia32. // TODO(v8:6666): Remove once the root register is fully supported on ia32.
...@@ -127,9 +151,21 @@ class IsolateData final { ...@@ -127,9 +151,21 @@ class IsolateData final {
// ia32 (otherwise the arguments adaptor call runs out of registers). // ia32 (otherwise the arguments adaptor call runs out of registers).
void* virtual_call_target_register_ = nullptr; void* virtual_call_target_register_ = nullptr;
// Ensure the size is 8-byte aligned in order to make alignment of the field
// following the IsolateData field predictable. This solves the issue with
// C++ compilers for 32-bit platforms which are not consistent at aligning
// int64_t fields.
// In order to avoid dealing with zero-size arrays the padding size is always
// in the range [8, 15).
STATIC_ASSERT(kPaddingOffsetEnd + 1 - kPaddingOffset >= 8);
char padding_[kPaddingOffsetEnd + 1 - kPaddingOffset];
V8_INLINE static void AssertPredictableLayout(); V8_INLINE static void AssertPredictableLayout();
friend class Isolate; friend class Isolate;
friend class Heap;
FRIEND_TEST(HeapTest, ExternalLimitDefault);
FRIEND_TEST(HeapTest, ExternalLimitStaysAboveDefaultForExplicitHandling);
DISALLOW_COPY_AND_ASSIGN(IsolateData); DISALLOW_COPY_AND_ASSIGN(IsolateData);
}; };
...@@ -139,12 +175,22 @@ class IsolateData final { ...@@ -139,12 +175,22 @@ class IsolateData final {
// issues because of different compilers used for snapshot generator and // issues because of different compilers used for snapshot generator and
// actual V8 code. // actual V8 code.
void IsolateData::AssertPredictableLayout() { void IsolateData::AssertPredictableLayout() {
STATIC_ASSERT(offsetof(IsolateData, roots_) == STATIC_ASSERT(std::is_standard_layout<RootsTable>::value);
IsolateData::kRootsTableOffset); STATIC_ASSERT(std::is_standard_layout<ExternalReferenceTable>::value);
STATIC_ASSERT(std::is_standard_layout<IsolateData>::value);
STATIC_ASSERT(offsetof(IsolateData, roots_) == kRootsTableOffset);
STATIC_ASSERT(offsetof(IsolateData, external_reference_table_) == STATIC_ASSERT(offsetof(IsolateData, external_reference_table_) ==
IsolateData::kExternalReferenceTableOffset); kExternalReferenceTableOffset);
STATIC_ASSERT(offsetof(IsolateData, builtins_) == STATIC_ASSERT(offsetof(IsolateData, builtins_) == kBuiltinsTableOffset);
IsolateData::kBuiltinsTableOffset); STATIC_ASSERT(offsetof(IsolateData, magic_number_) == kMagicNumberOffset);
STATIC_ASSERT(offsetof(IsolateData, virtual_call_target_register_) ==
kVirtualCallTargetRegisterOffset);
STATIC_ASSERT(offsetof(IsolateData, external_memory_) ==
kExternalMemoryOffset);
STATIC_ASSERT(offsetof(IsolateData, external_memory_limit_) ==
kExternalMemoryLlimitOffset);
STATIC_ASSERT(offsetof(IsolateData, external_memory_at_last_mark_compact_) ==
kExternalMemoryAtLastMarkCompactOffset);
STATIC_ASSERT(sizeof(IsolateData) == IsolateData::kSize); STATIC_ASSERT(sizeof(IsolateData) == IsolateData::kSize);
} }
......
...@@ -12,15 +12,6 @@ ...@@ -12,15 +12,6 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
base::AddressRegion Isolate::root_register_addressable_region() {
// TODO(ishell): limit this region to the IsolateData object once all the
// data is moved there.
Address start = reinterpret_cast<Address>(this);
Address end =
reinterpret_cast<Address>(heap_.isolate_data()) + sizeof(IsolateData);
return base::AddressRegion(start, end - start);
}
bool Isolate::FromWritableHeapObject(HeapObject* obj, Isolate** isolate) { bool Isolate::FromWritableHeapObject(HeapObject* obj, Isolate** isolate) {
i::MemoryChunk* chunk = i::MemoryChunk::FromHeapObject(obj); i::MemoryChunk* chunk = i::MemoryChunk::FromHeapObject(obj);
if (chunk->owner()->identity() == i::RO_SPACE) { if (chunk->owner()->identity() == i::RO_SPACE) {
......
...@@ -2646,8 +2646,7 @@ std::atomic<size_t> Isolate::non_disposed_isolates_; ...@@ -2646,8 +2646,7 @@ std::atomic<size_t> Isolate::non_disposed_isolates_;
#endif // DEBUG #endif // DEBUG
Isolate::Isolate() Isolate::Isolate()
: embedder_data_(), : entry_stack_(nullptr),
entry_stack_(nullptr),
stack_trace_nesting_level_(0), stack_trace_nesting_level_(0),
incomplete_message_(nullptr), incomplete_message_(nullptr),
bootstrapper_(nullptr), bootstrapper_(nullptr),
...@@ -2718,6 +2717,7 @@ Isolate::Isolate() ...@@ -2718,6 +2717,7 @@ Isolate::Isolate()
cancelable_task_manager_(new CancelableTaskManager()), cancelable_task_manager_(new CancelableTaskManager()),
abort_on_uncaught_exception_callback_(nullptr), abort_on_uncaught_exception_callback_(nullptr),
total_regexp_code_generated_(0) { total_regexp_code_generated_(0) {
CheckIsolateLayout();
id_ = base::Relaxed_AtomicIncrement(&isolate_counter_, 1); id_ = base::Relaxed_AtomicIncrement(&isolate_counter_, 1);
TRACE_ISOLATE(constructor); TRACE_ISOLATE(constructor);
...@@ -2766,6 +2766,24 @@ Isolate::Isolate() ...@@ -2766,6 +2766,24 @@ Isolate::Isolate()
} }
} }
void Isolate::CheckIsolateLayout() {
CHECK_EQ(OFFSET_OF(Isolate, isolate_data_), 0);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.embedder_data_)),
Internals::kIsolateEmbedderDataOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_)),
Internals::kIsolateRootsOffset);
CHECK_EQ(Internals::kExternalMemoryOffset % 8, 0);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.external_memory_)),
Internals::kExternalMemoryOffset);
CHECK_EQ(Internals::kExternalMemoryLimitOffset % 8, 0);
CHECK_EQ(static_cast<int>(
OFFSET_OF(Isolate, isolate_data_.external_memory_limit_)),
Internals::kExternalMemoryLimitOffset);
CHECK_EQ(Internals::kExternalMemoryAtLastMarkCompactOffset % 8, 0);
CHECK_EQ(static_cast<int>(OFFSET_OF(
Isolate, isolate_data_.external_memory_at_last_mark_compact_)),
Internals::kExternalMemoryAtLastMarkCompactOffset);
}
void Isolate::TearDown() { void Isolate::TearDown() {
TRACE_ISOLATE(tear_down); TRACE_ISOLATE(tear_down);
...@@ -3186,6 +3204,8 @@ bool Isolate::Init(StartupDeserializer* des) { ...@@ -3186,6 +3204,8 @@ bool Isolate::Init(StartupDeserializer* des) {
DCHECK(!heap_.HasBeenSetUp()); DCHECK(!heap_.HasBeenSetUp());
heap_.SetUp(); heap_.SetUp();
isolate_data_.external_reference_table()->Init(this);
// Setup the wasm engine. // Setup the wasm engine.
if (wasm_engine_ == nullptr) { if (wasm_engine_ == nullptr) {
SetWasmEngine(wasm::WasmEngine::GetWasmEngine()); SetWasmEngine(wasm::WasmEngine::GetWasmEngine());
...@@ -3282,24 +3302,6 @@ bool Isolate::Init(StartupDeserializer* des) { ...@@ -3282,24 +3302,6 @@ bool Isolate::Init(StartupDeserializer* des) {
std::ofstream(GetTurboCfgFileName(this).c_str(), std::ios_base::trunc); std::ofstream(GetTurboCfgFileName(this).c_str(), std::ios_base::trunc);
} }
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, embedder_data_)),
Internals::kIsolateEmbedderDataOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.isolate_data_.roots_)),
Internals::kIsolateRootsOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.external_memory_)),
Internals::kExternalMemoryOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.external_memory_limit_)),
Internals::kExternalMemoryLimitOffset);
CHECK_EQ(static_cast<int>(
OFFSET_OF(Isolate, heap_.external_memory_at_last_mark_compact_)),
Internals::kExternalMemoryAtLastMarkCompactOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(
Isolate, heap_.isolate_data_.external_reference_table_)),
Internals::kIsolateRootsOffset +
IsolateData::kExternalReferenceTableOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.isolate_data_.builtins_)),
Internals::kIsolateRootsOffset + IsolateData::kBuiltinsTableOffset);
{ {
HandleScope scope(this); HandleScope scope(this);
ast_string_constants_ = new AstStringConstants(this, heap()->HashSeed()); ast_string_constants_ = new AstStringConstants(this, heap()->HashSeed());
......
...@@ -27,6 +27,7 @@ ...@@ -27,6 +27,7 @@
#include "src/handles.h" #include "src/handles.h"
#include "src/heap/factory.h" #include "src/heap/factory.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/isolate-data.h"
#include "src/messages.h" #include "src/messages.h"
#include "src/objects/code.h" #include "src/objects/code.h"
#include "src/objects/debug-objects.h" #include "src/objects/debug-objects.h"
...@@ -999,8 +1000,8 @@ class Isolate : private HiddenFactory { ...@@ -999,8 +1000,8 @@ class Isolate : private HiddenFactory {
StackGuard* stack_guard() { return &stack_guard_; } StackGuard* stack_guard() { return &stack_guard_; }
Heap* heap() { return &heap_; } Heap* heap() { return &heap_; }
const IsolateData* isolate_data() const { return heap_.isolate_data(); } const IsolateData* isolate_data() const { return &isolate_data_; }
IsolateData* isolate_data() { return heap_.isolate_data(); } IsolateData* isolate_data() { return &isolate_data_; }
// Generated code can embed this address to get access to the isolate-specific // Generated code can embed this address to get access to the isolate-specific
// data (for example, roots, external references, builtins, etc.). // data (for example, roots, external references, builtins, etc.).
...@@ -1009,10 +1010,17 @@ class Isolate : private HiddenFactory { ...@@ -1009,10 +1010,17 @@ class Isolate : private HiddenFactory {
RootsTable& roots_table() { return isolate_data()->roots(); } RootsTable& roots_table() { return isolate_data()->roots(); }
// kRootRegister may be used to address any location that falls into this // A sub-region of the Isolate object that has "predictable" layout which
// region. Fields outside this region are not guaranteed to live at a static // depends only on the pointer size and therefore it's guaranteed that there
// offset from kRootRegister. // will be no compatibility issues because of different compilers used for
inline base::AddressRegion root_register_addressable_region(); // snapshot generator and actual V8 code.
// Thus, kRootRegister may be used to address any location that falls into
// this region.
// See IsolateData::AssertPredictableLayout() for details.
base::AddressRegion root_register_addressable_region() const {
return base::AddressRegion(reinterpret_cast<Address>(&isolate_data_),
sizeof(IsolateData));
}
Object* root(RootIndex index) { return roots_table()[index]; } Object* root(RootIndex index) { return roots_table()[index]; }
...@@ -1025,6 +1033,8 @@ class Isolate : private HiddenFactory { ...@@ -1025,6 +1033,8 @@ class Isolate : private HiddenFactory {
return isolate_data()->external_reference_table(); return isolate_data()->external_reference_table();
} }
V8_INLINE Object** builtins_table() { return isolate_data_.builtins(); }
StubCache* load_stub_cache() { return load_stub_cache_; } StubCache* load_stub_cache() { return load_stub_cache_; }
StubCache* store_stub_cache() { return store_stub_cache_; } StubCache* store_stub_cache() { return store_stub_cache_; }
DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; } DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
...@@ -1124,11 +1134,11 @@ class Isolate : private HiddenFactory { ...@@ -1124,11 +1134,11 @@ class Isolate : private HiddenFactory {
void SetData(uint32_t slot, void* data) { void SetData(uint32_t slot, void* data) {
DCHECK_LT(slot, Internals::kNumIsolateDataSlots); DCHECK_LT(slot, Internals::kNumIsolateDataSlots);
embedder_data_[slot] = data; isolate_data_.embedder_data_[slot] = data;
} }
void* GetData(uint32_t slot) { void* GetData(uint32_t slot) {
DCHECK_LT(slot, Internals::kNumIsolateDataSlots); DCHECK_LT(slot, Internals::kNumIsolateDataSlots);
return embedder_data_[slot]; return isolate_data_.embedder_data_[slot];
} }
bool serializer_enabled() const { return serializer_enabled_; } bool serializer_enabled() const { return serializer_enabled_; }
...@@ -1617,18 +1627,14 @@ class Isolate : private HiddenFactory { ...@@ -1617,18 +1627,14 @@ class Isolate : private HiddenFactory {
protected: protected:
Isolate(); Isolate();
void CheckIsolateLayout();
bool IsArrayOrObjectOrStringPrototype(Object* object); bool IsArrayOrObjectOrStringPrototype(Object* object);
private: private:
friend struct GlobalState; friend struct GlobalState;
friend struct InitializeGlobalState; friend struct InitializeGlobalState;
// These fields are accessed through the API, offsets must be kept in sync
// with v8::internal::Internals (in include/v8.h) constants. This is also
// verified in Isolate::Init() using runtime checks.
void* embedder_data_[Internals::kNumIsolateDataSlots];
Heap heap_;
class ThreadDataTable { class ThreadDataTable {
public: public:
ThreadDataTable() = default; ThreadDataTable() = default;
...@@ -1737,6 +1743,12 @@ class Isolate : private HiddenFactory { ...@@ -1737,6 +1743,12 @@ class Isolate : private HiddenFactory {
return ""; return "";
} }
// This class contains a collection of data accessible from both C++ runtime
// and compiled code (including assembly stubs, builtins, interpreter bytecode
// handlers and optimized code).
IsolateData isolate_data_;
Heap heap_;
base::Atomic32 id_; base::Atomic32 id_;
EntryStackItem* entry_stack_; EntryStackItem* entry_stack_;
int stack_trace_nesting_level_; int stack_trace_nesting_level_;
......
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
#include "src/assembler-arch.h" #include "src/assembler-arch.h"
#include "src/base/template-utils.h" #include "src/base/template-utils.h"
#include "src/heap/heap.h" #include "src/builtins/builtins.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
......
...@@ -61,14 +61,16 @@ TEST_F(HeapTest, ASLR) { ...@@ -61,14 +61,16 @@ TEST_F(HeapTest, ASLR) {
TEST_F(HeapTest, ExternalLimitDefault) { TEST_F(HeapTest, ExternalLimitDefault) {
Heap* heap = i_isolate()->heap(); Heap* heap = i_isolate()->heap();
EXPECT_EQ(kExternalAllocationSoftLimit, heap->external_memory_limit_); EXPECT_EQ(kExternalAllocationSoftLimit,
heap->isolate()->isolate_data()->external_memory_limit_);
} }
TEST_F(HeapTest, ExternalLimitStaysAboveDefaultForExplicitHandling) { TEST_F(HeapTest, ExternalLimitStaysAboveDefaultForExplicitHandling) {
v8_isolate()->AdjustAmountOfExternalAllocatedMemory(+10 * MB); v8_isolate()->AdjustAmountOfExternalAllocatedMemory(+10 * MB);
v8_isolate()->AdjustAmountOfExternalAllocatedMemory(-10 * MB); v8_isolate()->AdjustAmountOfExternalAllocatedMemory(-10 * MB);
Heap* heap = i_isolate()->heap(); Heap* heap = i_isolate()->heap();
EXPECT_GE(heap->external_memory_limit_, kExternalAllocationSoftLimit); EXPECT_GE(heap->isolate()->isolate_data()->external_memory_limit_,
kExternalAllocationSoftLimit);
} }
} // namespace internal } // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment