Commit d09fc540 authored by Jakob Gruber's avatar Jakob Gruber Committed by V8 LUCI CQ

[isolate-data] Consistent field names

This is a refactor-only change in preparation for the upcoming
builtins table split.

- Define fields through a macro list to avoid some manual boilerplate
  code.
- Consistent names for builtin_entry_table_ and builtin_table_, and
  update names of related methods as well.
- Add Builtins::ToInt to replace manual static_casts.
- Move around IsolateData methods s.t. they're in the same order as
  the underlying fields.

Bug: v8:12203
Change-Id: I68cd036b8de1dd2708e2d4579d76bb3baaea5e1c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3162128Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/main@{#76874}
parent b01d2f45
......@@ -314,7 +314,7 @@ void Builtins::EmitCodeCreateEvents(Isolate* isolate) {
return; // No need to iterate the entire table in this case.
}
Address* builtins = isolate->builtins_table();
Address* builtins = isolate->builtin_table();
int i = 0;
HandleScope scope(isolate);
for (; i < static_cast<int>(Builtin::kFirstBytecodeHandler); i++) {
......
......@@ -101,6 +101,10 @@ class Builtins {
DCHECK(IsBuiltinId(id));
return static_cast<Builtin>(id);
}
static constexpr int ToInt(Builtin id) {
DCHECK(IsBuiltinId(id));
return static_cast<int>(id);
}
// The different builtin kinds are documented in builtins-definitions.h.
enum Kind { CPP, TFJ, TFC, TFS, TFH, BCH, ASM };
......
......@@ -128,8 +128,8 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
const unsigned kRootsTableSize = sizeof(RootsTable);
const int kExtRefsTableStart = IsolateData::external_reference_table_offset();
const unsigned kExtRefsTableSize = ExternalReferenceTable::kSizeInBytes;
const int kBuiltinsTableStart = IsolateData::builtins_table_offset();
const unsigned kBuiltinsTableSize =
const int kBuiltinTableStart = IsolateData::builtin_table_offset();
const unsigned kBuiltinTableSize =
Builtins::kBuiltinCount * kSystemPointerSize;
if (static_cast<unsigned>(offset - kRootsTableStart) < kRootsTableSize) {
......@@ -163,9 +163,9 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
offset_in_extref_table));
return v8_buffer_.begin();
} else if (static_cast<unsigned>(offset - kBuiltinsTableStart) <
kBuiltinsTableSize) {
uint32_t offset_in_builtins_table = (offset - kBuiltinsTableStart);
} else if (static_cast<unsigned>(offset - kBuiltinTableStart) <
kBuiltinTableSize) {
uint32_t offset_in_builtins_table = (offset - kBuiltinTableStart);
Builtin builtin =
Builtins::FromInt(offset_in_builtins_table / kSystemPointerSize);
......
......@@ -346,7 +346,8 @@ SafeStackFrameIterator::SafeStackFrameIterator(Isolate* isolate, Address pc,
top_frame_type_ = type;
state.fp = fast_c_fp;
state.sp = sp;
state.pc_address = isolate->isolate_data()->fast_c_call_caller_pc_address();
state.pc_address = reinterpret_cast<Address*>(
isolate->isolate_data()->fast_c_call_caller_pc_address());
advance_frame = false;
} else if (IsValidTop(top)) {
type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
......
......@@ -20,13 +20,39 @@ namespace internal {
class Isolate;
// IsolateData fields, defined as: V(Offset, Size, Name)
#define ISOLATE_DATA_FIELDS(V) \
V(kEmbedderDataOffset, Internals::kNumIsolateDataSlots* kSystemPointerSize, \
embedder_data) \
V(kFastCCallCallerFPOffset, kSystemPointerSize, fast_c_call_caller_fp) \
V(kFastCCallCallerPCOffset, kSystemPointerSize, fast_c_call_caller_pc) \
V(kFastApiCallTargetOffset, kSystemPointerSize, fast_api_call_target) \
V(kCageBaseOffset, kSystemPointerSize, cage_base) \
V(kLongTaskStatsCounterOffset, kSizetSize, long_task_stats_counter) \
V(kStackGuardOffset, StackGuard::kSizeInBytes, stack_guard) \
V(kRootsTableOffset, RootsTable::kEntriesCount* kSystemPointerSize, \
roots_table) \
V(kExternalReferenceTableOffset, ExternalReferenceTable::kSizeInBytes, \
external_reference_table) \
V(kThreadLocalTopOffset, ThreadLocalTop::kSizeInBytes, thread_local_top) \
V(kBuiltinEntryTableOffset, Builtins::kBuiltinCount* kSystemPointerSize, \
builtin_entry_table) \
V(kBuiltinTableOffset, Builtins::kBuiltinCount* kSystemPointerSize, \
builtin_table) \
ISOLATE_DATA_FIELDS_HEAP_SANDBOX(V) \
V(kStackIsIterableOffset, kUInt8Size, stack_is_iterable)
#ifdef V8_HEAP_SANDBOX
#define ISOLATE_DATA_FIELDS_HEAP_SANDBOX(V) \
V(kExternalPointerTableOffset, kSystemPointerSize * 3, external_pointer_table)
#else
#define ISOLATE_DATA_FIELDS_HEAP_SANDBOX(V)
#endif // V8_HEAP_SANDBOX
// This class contains a collection of data accessible from both C++ runtime
// and compiled code (including assembly stubs, builtins, interpreter bytecode
// handlers and optimized code).
// In particular, it contains pointer to the V8 heap roots table, external
// reference table and builtins array.
// The compiled code accesses the isolate data fields indirectly via the root
// register.
// and compiled code (including builtins, interpreter bytecode handlers and
// optimized code). The compiled code accesses the isolate data fields
// indirectly via the root register.
class IsolateData final {
public:
IsolateData(Isolate* isolate, Address cage_base)
......@@ -37,158 +63,90 @@ class IsolateData final {
static constexpr intptr_t kIsolateRootBias = kRootRegisterBias;
// The value of kPointerCageBaseRegister
Address cage_base() const {
return COMPRESS_POINTERS_BOOL ? cage_base_ : kNullAddress;
}
// The value of the kRootRegister.
Address isolate_root() const {
return reinterpret_cast<Address>(this) + kIsolateRootBias;
}
// Root-register-relative offset of the roots table.
static constexpr int roots_table_offset() {
return kRootsTableOffset - kIsolateRootBias;
}
// Root-register-relative offsets.
#define V(Offset, Size, Name) \
static constexpr int Name##_offset() { return Offset - kIsolateRootBias; }
ISOLATE_DATA_FIELDS(V)
#undef V
// Root-register-relative offset of the given root table entry.
static constexpr int root_slot_offset(RootIndex root_index) {
return roots_table_offset() + RootsTable::offset_of(root_index);
}
// Root-register-relative offset of the external reference table.
static constexpr int external_reference_table_offset() {
return kExternalReferenceTableOffset - kIsolateRootBias;
}
// Root-register-relative offset of the builtin entry table.
static constexpr int builtin_entry_table_offset() {
return kBuiltinEntryTableOffset - kIsolateRootBias;
}
static constexpr int builtin_entry_slot_offset(Builtin builtin) {
DCHECK(Builtins::IsBuiltinId(builtin));
static int builtin_entry_slot_offset(Builtin id) {
return builtin_entry_table_offset() +
static_cast<int>(builtin) * kSystemPointerSize;
}
// Root-register-relative offset of the builtins table.
static constexpr int builtins_table_offset() {
return kBuiltinsTableOffset - kIsolateRootBias;
}
// Root-register-relative offset of the external pointer table.
#ifdef V8_HEAP_SANDBOX
static constexpr int external_pointer_table_offset() {
return kExternalPointerTableOffset - kIsolateRootBias;
}
#endif
static constexpr int fast_c_call_caller_fp_offset() {
return kFastCCallCallerFPOffset - kIsolateRootBias;
}
static constexpr int fast_c_call_caller_pc_offset() {
return kFastCCallCallerPCOffset - kIsolateRootBias;
Builtins::ToInt(id) * kSystemPointerSize;
}
static constexpr int fast_api_call_target_offset() {
return kFastApiCallTargetOffset - kIsolateRootBias;
}
static constexpr int cage_base_offset() {
return kCageBaseOffset - kIsolateRootBias;
}
// Root-register-relative offset of the given builtin table entry.
// TODO(ishell): remove in favour of typified id version.
static int builtin_slot_offset(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index));
return builtins_table_offset() + builtin_index * kSystemPointerSize;
return builtin_table_offset() + builtin_index * kSystemPointerSize;
}
// Root-register-relative offset of the builtin table entry.
static int builtin_slot_offset(Builtin id) {
return builtins_table_offset() + static_cast<int>(id) * kSystemPointerSize;
return builtin_table_offset() + Builtins::ToInt(id) * kSystemPointerSize;
}
// The FP and PC that are saved right before TurboAssembler::CallCFunction.
Address* fast_c_call_caller_fp_address() { return &fast_c_call_caller_fp_; }
Address* fast_c_call_caller_pc_address() { return &fast_c_call_caller_pc_; }
// The address of the fast API callback right before it's executed from
// generated code.
Address* fast_api_call_target_address() { return &fast_api_call_target_; }
#define V(Offset, Size, Name) \
Address Name##_address() { return reinterpret_cast<Address>(&Name##_); }
ISOLATE_DATA_FIELDS(V)
#undef V
Address fast_c_call_caller_fp() const { return fast_c_call_caller_fp_; }
Address fast_c_call_caller_pc() const { return fast_c_call_caller_pc_; }
Address fast_api_call_target() const { return fast_api_call_target_; }
// The value of kPointerCageBaseRegister.
Address cage_base() const { return cage_base_; }
StackGuard* stack_guard() { return &stack_guard_; }
uint8_t* stack_is_iterable_address() { return &stack_is_iterable_; }
Address fast_c_call_caller_fp() { return fast_c_call_caller_fp_; }
Address fast_c_call_caller_pc() { return fast_c_call_caller_pc_; }
Address fast_api_call_target() { return fast_api_call_target_; }
uint8_t stack_is_iterable() { return stack_is_iterable_; }
// Returns true if this address points to data stored in this instance.
// If it's the case then the value can be accessed indirectly through the
// root register.
bool contains(Address address) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
Address start = reinterpret_cast<Address>(this);
return (address - start) < sizeof(*this);
RootsTable& roots() { return roots_table_; }
const RootsTable& roots() const { return roots_table_; }
ExternalReferenceTable* external_reference_table() {
return &external_reference_table_;
}
ThreadLocalTop& thread_local_top() { return thread_local_top_; }
ThreadLocalTop const& thread_local_top() const { return thread_local_top_; }
Address* builtin_entry_table() { return builtin_entry_table_; }
Address* builtin_table() { return builtin_table_; }
uint8_t stack_is_iterable() const { return stack_is_iterable_; }
RootsTable& roots() { return roots_; }
const RootsTable& roots() const { return roots_; }
ExternalReferenceTable* external_reference_table() {
return &external_reference_table_;
// Returns true if this address points to data stored in this instance. If
// it's the case then the value can be accessed indirectly through the root
// register.
bool contains(Address address) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
Address start = reinterpret_cast<Address>(this);
return (address - start) < sizeof(*this);
}
Address* builtin_entry_table() { return builtin_entry_table_; }
Address* builtins() { return builtins_; }
private:
// Static layout definition.
//
// Note: The location of fields within IsolateData is significant. The
// closer they are to the value of kRootRegister (i.e.: isolate_root()), the
// cheaper it is to access them. See also: https://crbug.com/993264.
// The recommend guideline is to put frequently-accessed fields close to the
// beginning of IsolateData.
#define FIELDS(V) \
V(kEmbedderDataOffset, Internals::kNumIsolateDataSlots* kSystemPointerSize) \
V(kFastCCallCallerFPOffset, kSystemPointerSize) \
V(kFastCCallCallerPCOffset, kSystemPointerSize) \
V(kFastApiCallTargetOffset, kSystemPointerSize) \
V(kCageBaseOffset, kSystemPointerSize) \
V(kLongTaskStatsCounterOffset, kSizetSize) \
V(kStackGuardOffset, StackGuard::kSizeInBytes) \
V(kRootsTableOffset, RootsTable::kEntriesCount* kSystemPointerSize) \
V(kExternalReferenceTableOffset, ExternalReferenceTable::kSizeInBytes) \
V(kThreadLocalTopOffset, ThreadLocalTop::kSizeInBytes) \
V(kBuiltinEntryTableOffset, Builtins::kBuiltinCount* kSystemPointerSize) \
V(kBuiltinsTableOffset, Builtins::kBuiltinCount* kSystemPointerSize) \
FIELDS_HEAP_SANDBOX(V) \
V(kStackIsIterableOffset, kUInt8Size) \
/* This padding aligns IsolateData size by 8 bytes. */ \
V(kPaddingOffset, \
8 + RoundUp<8>(static_cast<int>(kPaddingOffset)) - kPaddingOffset) \
/* Total size. */ \
// The recommended guideline is to put frequently-accessed fields close to
// the beginning of IsolateData.
#define FIELDS(V) \
ISOLATE_DATA_FIELDS(V) \
/* This padding aligns IsolateData size by 8 bytes. */ \
V(kPaddingOffset, \
8 + RoundUp<8>(static_cast<int>(kPaddingOffset)) - kPaddingOffset) \
/* Total size. */ \
V(kSize, 0)
#ifdef V8_HEAP_SANDBOX
#define FIELDS_HEAP_SANDBOX(V) \
V(kExternalPointerTableOffset, kSystemPointerSize * 3)
#else
#define FIELDS_HEAP_SANDBOX(V)
#endif // V8_HEAP_SANDBOX
DEFINE_FIELD_OFFSET_CONSTANTS(0, FIELDS)
#undef FIELDS
// These fields are accessed through the API, offsets must be kept in sync
// with v8::internal::Internals (in include/v8-internal.h) constants.
// The layout consitency is verified in Isolate::CheckIsolateLayout() using
// with v8::internal::Internals (in include/v8-internal.h) constants. The
// layout consistency is verified in Isolate::CheckIsolateLayout() using
// runtime checks.
void* embedder_data_[Internals::kNumIsolateDataSlots] = {};
......@@ -196,11 +154,15 @@ class IsolateData final {
// the sampling CPU profiler can iterate the stack during such calls. These
// are stored on IsolateData so that they can be stored to with only one move
// instruction in compiled code.
//
// The FP and PC that are saved right before TurboAssembler::CallCFunction.
Address fast_c_call_caller_fp_ = kNullAddress;
Address fast_c_call_caller_pc_ = kNullAddress;
// The address of the fast API callback right before it's executed from
// generated code.
Address fast_api_call_target_ = kNullAddress;
Address cage_base_ = kNullAddress;
const Address cage_base_;
// Used for implementation of LongTaskStats. Counts the number of potential
// long tasks.
......@@ -210,8 +172,7 @@ class IsolateData final {
// the stack limit used by stack checks in generated code.
StackGuard stack_guard_;
RootsTable roots_;
RootsTable roots_table_;
ExternalReferenceTable external_reference_table_;
ThreadLocalTop thread_local_top_;
......@@ -222,7 +183,7 @@ class IsolateData final {
Address builtin_entry_table_[Builtins::kBuiltinCount] = {};
// The entries in this array are tagged pointers to Code objects.
Address builtins_[Builtins::kBuiltinCount] = {};
Address builtin_table_[Builtins::kBuiltinCount] = {};
// Table containing pointers to external objects.
#ifdef V8_HEAP_SANDBOX
......@@ -259,31 +220,16 @@ void IsolateData::AssertPredictableLayout() {
STATIC_ASSERT(std::is_standard_layout<ThreadLocalTop>::value);
STATIC_ASSERT(std::is_standard_layout<ExternalReferenceTable>::value);
STATIC_ASSERT(std::is_standard_layout<IsolateData>::value);
STATIC_ASSERT(offsetof(IsolateData, roots_) == kRootsTableOffset);
STATIC_ASSERT(offsetof(IsolateData, external_reference_table_) ==
kExternalReferenceTableOffset);
STATIC_ASSERT(offsetof(IsolateData, thread_local_top_) ==
kThreadLocalTopOffset);
STATIC_ASSERT(offsetof(IsolateData, builtins_) == kBuiltinsTableOffset);
STATIC_ASSERT(offsetof(IsolateData, fast_c_call_caller_fp_) ==
kFastCCallCallerFPOffset);
STATIC_ASSERT(offsetof(IsolateData, fast_c_call_caller_pc_) ==
kFastCCallCallerPCOffset);
STATIC_ASSERT(offsetof(IsolateData, fast_api_call_target_) ==
kFastApiCallTargetOffset);
STATIC_ASSERT(offsetof(IsolateData, cage_base_) == kCageBaseOffset);
STATIC_ASSERT(offsetof(IsolateData, long_task_stats_counter_) ==
kLongTaskStatsCounterOffset);
STATIC_ASSERT(offsetof(IsolateData, stack_guard_) == kStackGuardOffset);
#ifdef V8_HEAP_SANDBOX
STATIC_ASSERT(offsetof(IsolateData, external_pointer_table_) ==
kExternalPointerTableOffset);
#endif
STATIC_ASSERT(offsetof(IsolateData, stack_is_iterable_) ==
kStackIsIterableOffset);
#define V(Offset, Size, Name) \
STATIC_ASSERT(offsetof(IsolateData, Name##_) == Offset);
ISOLATE_DATA_FIELDS(V)
#undef V
STATIC_ASSERT(sizeof(IsolateData) == IsolateData::kSize);
}
#undef ISOLATE_DATA_FIELDS_HEAP_SANDBOX
#undef ISOLATE_DATA_FIELDS
} // namespace internal
} // namespace v8
......
......@@ -2675,10 +2675,10 @@ void Isolate::ReleaseSharedPtrs() {
}
}
bool Isolate::IsBuiltinsTableHandleLocation(Address* handle_location) {
bool Isolate::IsBuiltinTableHandleLocation(Address* handle_location) {
FullObjectSlot location(handle_location);
FullObjectSlot first_root(builtins_table());
FullObjectSlot last_root(builtins_table() + Builtins::kBuiltinCount);
FullObjectSlot first_root(builtin_table());
FullObjectSlot last_root(builtin_table() + Builtins::kBuiltinCount);
if (location >= last_root) return false;
if (location < first_root) return false;
return true;
......@@ -3049,7 +3049,7 @@ void Isolate::CheckIsolateLayout() {
Internals::kIsolateLongTaskStatsCounterOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.stack_guard_)),
Internals::kIsolateStackGuardOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_)),
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_table_)),
Internals::kIsolateRootsOffset);
#ifdef V8_HEAP_SANDBOX
......
......@@ -1123,9 +1123,9 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
}
Address* builtin_entry_table() { return isolate_data_.builtin_entry_table(); }
V8_INLINE Address* builtins_table() { return isolate_data_.builtins(); }
V8_INLINE Address* builtin_table() { return isolate_data_.builtin_table(); }
bool IsBuiltinsTableHandleLocation(Address* handle_location);
bool IsBuiltinTableHandleLocation(Address* handle_location);
StubCache* load_stub_cache() const { return load_stub_cache_; }
StubCache* store_stub_cache() const { return store_stub_cache_; }
......
......@@ -43,7 +43,7 @@ bool HandleBase::IsDereferenceAllowed() const {
RootsTable::IsImmortalImmovable(root_index)) {
return true;
}
if (isolate->IsBuiltinsTableHandleLocation(location_)) return true;
if (isolate->IsBuiltinTableHandleLocation(location_)) return true;
if (!AllowHandleDereference::IsAllowed()) return false;
LocalHeap* local_heap = isolate->CurrentLocalHeap();
......
......@@ -4653,21 +4653,20 @@ void Heap::ZapCodeObject(Address start_address, int size_in_bytes) {
Code Heap::builtin(Builtin builtin) {
DCHECK(Builtins::IsBuiltinId(builtin));
return Code::cast(
Object(isolate()->builtins_table()[static_cast<int>(builtin)]));
Object(isolate()->builtin_table()[static_cast<int>(builtin)]));
}
Address Heap::builtin_address(Builtin builtin) {
const int index = static_cast<int>(builtin);
const int index = Builtins::ToInt(builtin);
DCHECK(Builtins::IsBuiltinId(builtin) || index == Builtins::kBuiltinCount);
return reinterpret_cast<Address>(&isolate()->builtins_table()[index]);
return reinterpret_cast<Address>(&isolate()->builtin_table()[index]);
}
void Heap::set_builtin(Builtin builtin, Code code) {
DCHECK(Builtins::IsBuiltinId(builtin));
DCHECK(Internals::HasHeapObjectTag(code.ptr()));
// The given builtin may be completely uninitialized thus we cannot check its
// type here.
isolate()->builtins_table()[static_cast<int>(builtin)] = code.ptr();
// The given builtin may be uninitialized thus we cannot check its type here.
isolate()->builtin_table()[Builtins::ToInt(builtin)] = code.ptr();
}
void Heap::IterateWeakRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) {
......
......@@ -217,7 +217,8 @@ inline T RoundingAverageUnsigned(T a, T b) {
//
// DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, MAP_FIELDS)
//
#define DEFINE_ONE_FIELD_OFFSET(Name, Size) Name, Name##End = Name + (Size)-1,
#define DEFINE_ONE_FIELD_OFFSET(Name, Size, ...) \
Name, Name##End = Name + (Size)-1,
#define DEFINE_FIELD_OFFSET_CONSTANTS(StartOffset, LIST_MACRO) \
enum { \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment