Commit 06af754c authored by Jakob Gruber's avatar Jakob Gruber Committed by V8 LUCI CQ

[isolate-data] Split builtin tables into tiers

.. for more efficient access to builtins from generated code.

Root-relative accesses tend to be faster and produce more compact
code when the root-relative offset is small. IsolateData contains
a few large tables (roots, external references, builtins), resulting
in very large offsets in general.

This CL starts by splitting the builtin table into tiers: tier 0
is a minimal set of perf-critical builtins that should be cheap to
access. The offset to tier 0 builtins is guaranteed to be small.

The full builtin table also remains in IsolateData for occasions in
which we need to lookup builtins by index.

In future work, we can also split external references and roots into
tiers.

On x64, this reduces deopt exit sizes from 7 to 4 bytes and from 12
to 9 bytes (dynamic map checks / EagerWithResume deopts).

Bug: v8:12203,v8:8661
Change-Id: I5a9ed22b0e00682aca1abcf15892ae1458dbdd70
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3162142
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#76947}
parent 165e725d
......@@ -224,23 +224,30 @@ class Internals {
static const int kExternalOneByteRepresentationTag = 0x0a;
static const uint32_t kNumIsolateDataSlots = 4;
static const int kStackGuardSize = 7 * kApiSystemPointerSize;
static const int kBuiltinTier0EntryTableSize = 13 * kApiSystemPointerSize;
static const int kBuiltinTier0TableSize = 13 * kApiSystemPointerSize;
// IsolateData layout guarantees.
static const int kIsolateEmbedderDataOffset = 0;
static const int kIsolateCageBaseOffset = 0;
static const int kIsolateStackGuardOffset =
kIsolateCageBaseOffset + kApiSystemPointerSize;
static const int kBuiltinTier0EntryTableOffset =
kIsolateStackGuardOffset + kStackGuardSize;
static const int kBuiltinTier0TableOffset =
kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
static const int kIsolateEmbedderDataOffset =
kBuiltinTier0TableOffset + kBuiltinTier0TableSize;
static const int kIsolateFastCCallCallerFpOffset =
kNumIsolateDataSlots * kApiSystemPointerSize;
kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
static const int kIsolateFastCCallCallerPcOffset =
kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
static const int kIsolateFastApiCallTargetOffset =
kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
static const int kIsolateCageBaseOffset =
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
static const int kIsolateLongTaskStatsCounterOffset =
kIsolateCageBaseOffset + kApiSystemPointerSize;
static const int kIsolateStackGuardOffset =
kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
static const int kIsolateRootsOffset =
kIsolateStackGuardOffset + 7 * kApiSystemPointerSize;
kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
static const int kExternalPointerTableBufferOffset = 0;
static const int kExternalPointerTableLengthOffset =
......
......@@ -31,16 +31,40 @@ namespace internal {
// TODO(jgruber): Remove DummyDescriptor once all ASM builtins have been
// properly associated with their descriptor.
#define BUILTIN_LIST_BASE(CPP, TFJ, TFC, TFS, TFH, ASM) \
/* GC write barrirer */ \
TFC(RecordWriteEmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteEmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(EphemeronKeyBarrierSaveFP, WriteBarrier) \
TFC(EphemeronKeyBarrierIgnoreFP, WriteBarrier) \
\
/* TSAN support for stores in generated code.*/ \
// Builtins are additionally split into tiers, where the tier determines the
// distance of the builtins table from the root register within IsolateData.
//
// - Tier 0 (T0) are guaranteed to be close to the root register and can thus
// be accessed efficiently root-relative calls (so not, e.g., calls from
// generated code when short-builtin-calls is on).
// - T1 builtins have no distance guarantees.
//
// Note, this mechanism works only if the set of T0 builtins is kept as small
// as possible. Please, resist the temptation to add your builtin here unless
// there's a very good reason.
#define BUILTIN_LIST_BASE_TIER0(CPP, TFJ, TFC, TFS, TFH, ASM) \
/* Deoptimization entries. */ \
ASM(DeoptimizationEntry_Eager, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Soft, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Bailout, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Lazy, DeoptimizationEntry) \
ASM(DynamicCheckMapsTrampoline, DynamicCheckMaps) \
ASM(DynamicCheckMapsWithFeedbackVectorTrampoline, \
DynamicCheckMapsWithFeedbackVector) \
\
/* GC write barrier. */ \
TFC(RecordWriteEmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteEmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(EphemeronKeyBarrierSaveFP, WriteBarrier) \
TFC(EphemeronKeyBarrierIgnoreFP, WriteBarrier) \
\
/* Adaptor for CPP builtins. */ \
TFC(AdaptorWithBuiltinExitFrame, CppBuiltinAdaptor)
#define BUILTIN_LIST_BASE_TIER1(CPP, TFJ, TFC, TFS, TFH, ASM) \
/* TSAN support for stores in generated code. */ \
IF_TSAN(TFC, TSANRelaxedStore8IgnoreFP, TSANStore) \
IF_TSAN(TFC, TSANRelaxedStore8SaveFP, TSANStore) \
IF_TSAN(TFC, TSANRelaxedStore16IgnoreFP, TSANStore) \
......@@ -58,15 +82,12 @@ namespace internal {
IF_TSAN(TFC, TSANSeqCstStore64IgnoreFP, TSANStore) \
IF_TSAN(TFC, TSANSeqCstStore64SaveFP, TSANStore) \
\
/* TSAN support for loads in generated code.*/ \
/* TSAN support for loads in generated code. */ \
IF_TSAN(TFC, TSANRelaxedLoad32IgnoreFP, TSANLoad) \
IF_TSAN(TFC, TSANRelaxedLoad32SaveFP, TSANLoad) \
IF_TSAN(TFC, TSANRelaxedLoad64IgnoreFP, TSANLoad) \
IF_TSAN(TFC, TSANRelaxedLoad64SaveFP, TSANLoad) \
\
/* Adaptor for CPP builtin */ \
TFC(AdaptorWithBuiltinExitFrame, CppBuiltinAdaptor) \
\
/* Calls */ \
/* ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) */ \
ASM(CallFunction_ReceiverIsNullOrUndefined, CallTrampoline) \
......@@ -187,10 +208,6 @@ namespace internal {
TFC(CompileLazyDeoptimizedCode, JSTrampoline) \
TFC(InstantiateAsmJs, JSTrampoline) \
ASM(NotifyDeoptimized, Dummy) \
ASM(DeoptimizationEntry_Eager, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Soft, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Bailout, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Lazy, DeoptimizationEntry) \
\
/* Trampolines called when returning from a deoptimization that expects */ \
/* to continue in a JavaScript builtin to finish the functionality of a */ \
......@@ -282,10 +299,7 @@ namespace internal {
TFH(HasIndexedInterceptorIC, LoadWithVector) \
\
/* Dynamic check maps */ \
ASM(DynamicCheckMapsTrampoline, DynamicCheckMaps) \
TFC(DynamicCheckMaps, DynamicCheckMaps) \
ASM(DynamicCheckMapsWithFeedbackVectorTrampoline, \
DynamicCheckMapsWithFeedbackVector) \
TFC(DynamicCheckMapsWithFeedbackVector, DynamicCheckMapsWithFeedbackVector) \
\
/* Microtask helpers */ \
......@@ -1032,6 +1046,10 @@ namespace internal {
CPP(CallAsyncModuleFulfilled) \
CPP(CallAsyncModuleRejected)
#define BUILTIN_LIST_BASE(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_BASE_TIER0(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_BASE_TIER1(CPP, TFJ, TFC, TFS, TFH, ASM)
#ifdef V8_INTL_SUPPORT
#define BUILTIN_LIST_INTL(CPP, TFJ, TFS) \
/* ecma402 #sec-intl.collator */ \
......@@ -1218,6 +1236,17 @@ namespace internal {
BUILTIN_LIST_INTL(CPP, TFJ, TFS) \
BUILTIN_LIST_BYTECODE_HANDLERS(BCH)
// See the comment on top of BUILTIN_LIST_BASE_TIER0 for an explanation of
// tiers.
#define BUILTIN_LIST_TIER0(CPP, TFJ, TFC, TFS, TFH, BCH, ASM) \
BUILTIN_LIST_BASE_TIER0(CPP, TFJ, TFC, TFS, TFH, ASM)
#define BUILTIN_LIST_TIER1(CPP, TFJ, TFC, TFS, TFH, BCH, ASM) \
BUILTIN_LIST_BASE_TIER1(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_FROM_TORQUE(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_INTL(CPP, TFJ, TFS) \
BUILTIN_LIST_BYTECODE_HANDLERS(BCH)
// The exception thrown in the following builtins are caught
// internally and result in a promise rejection.
#define BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(V) \
......
......@@ -92,7 +92,7 @@ BytecodeOffset Builtins::GetContinuationBytecodeOffset(Builtin builtin) {
DCHECK(Builtins::KindOf(builtin) == TFJ || Builtins::KindOf(builtin) == TFC ||
Builtins::KindOf(builtin) == TFS);
return BytecodeOffset(BytecodeOffset::kFirstBuiltinContinuationId +
static_cast<int>(builtin));
ToInt(builtin));
}
Builtin Builtins::GetBuiltinFromBytecodeOffset(BytecodeOffset id) {
......@@ -182,7 +182,7 @@ Handle<Code> Builtins::code_handle(Builtin builtin) {
// static
int Builtins::GetStackParameterCount(Builtin builtin) {
DCHECK(Builtins::KindOf(builtin) == TFJ);
return builtin_metadata[static_cast<int>(builtin)].data.parameter_count;
return builtin_metadata[ToInt(builtin)].data.parameter_count;
}
// static
......@@ -224,7 +224,7 @@ bool Builtins::HasJSLinkage(Builtin builtin) {
// static
const char* Builtins::name(Builtin builtin) {
int index = static_cast<int>(builtin);
int index = ToInt(builtin);
DCHECK(IsBuiltinId(index));
return builtin_metadata[index].name;
}
......@@ -262,7 +262,7 @@ void Builtins::PrintBuiltinSize() {
// static
Address Builtins::CppEntryOf(Builtin builtin) {
DCHECK(Builtins::IsCpp(builtin));
return builtin_metadata[static_cast<int>(builtin)].data.cpp_entry;
return builtin_metadata[ToInt(builtin)].data.cpp_entry;
}
// static
......@@ -292,18 +292,24 @@ bool Builtins::IsIsolateIndependentBuiltin(const Code code) {
}
// static
void Builtins::InitializeBuiltinEntryTable(Isolate* isolate) {
EmbeddedData d = EmbeddedData::FromBlob(isolate);
Address* builtin_entry_table = isolate->builtin_entry_table();
for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
++builtin) {
// TODO(jgruber,chromium:1020986): Remove the CHECK once the linked issue is
// resolved.
CHECK(
Builtins::IsBuiltinId(isolate->heap()->builtin(builtin).builtin_id()));
DCHECK(isolate->heap()->builtin(builtin).is_off_heap_trampoline());
builtin_entry_table[static_cast<int>(builtin)] =
d.InstructionStartOfBuiltin(builtin);
void Builtins::InitializeIsolateDataTables(Isolate* isolate) {
EmbeddedData embedded_data = EmbeddedData::FromBlob(isolate);
IsolateData* isolate_data = isolate->isolate_data();
// The entry table.
for (Builtin i = Builtins::kFirst; i <= Builtins::kLast; ++i) {
DCHECK(Builtins::IsBuiltinId(isolate->heap()->builtin(i).builtin_id()));
DCHECK(isolate->heap()->builtin(i).is_off_heap_trampoline());
isolate_data->builtin_entry_table()[ToInt(i)] =
embedded_data.InstructionStartOfBuiltin(i);
}
// T0 tables.
for (Builtin i = Builtins::kFirst; i <= Builtins::kLastTier0; ++i) {
const int ii = ToInt(i);
isolate_data->builtin_tier0_entry_table()[ii] =
isolate_data->builtin_entry_table()[ii];
isolate_data->builtin_tier0_table()[ii] = isolate_data->builtin_table()[ii];
}
}
......@@ -317,7 +323,7 @@ void Builtins::EmitCodeCreateEvents(Isolate* isolate) {
Address* builtins = isolate->builtin_table();
int i = 0;
HandleScope scope(isolate);
for (; i < static_cast<int>(Builtin::kFirstBytecodeHandler); i++) {
for (; i < ToInt(Builtin::kFirstBytecodeHandler); i++) {
Handle<AbstractCode> code(AbstractCode::cast(Object(builtins[i])), isolate);
PROFILE(isolate, CodeCreateEvent(CodeEventListener::BUILTIN_TAG, code,
Builtins::name(FromInt(i))));
......@@ -420,7 +426,7 @@ Handle<ByteArray> Builtins::GenerateOffHeapTrampolineRelocInfo(
Builtins::Kind Builtins::KindOf(Builtin builtin) {
DCHECK(IsBuiltinId(builtin));
return builtin_metadata[static_cast<int>(builtin)].kind;
return builtin_metadata[ToInt(builtin)].kind;
}
// static
......
......@@ -74,10 +74,14 @@ class Builtins {
#define ADD_ONE(Name, ...) +1
static constexpr int kBuiltinCount = 0 BUILTIN_LIST(
ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE);
static constexpr int kBuiltinTier0Count = 0 BUILTIN_LIST_TIER0(
ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE);
#undef ADD_ONE
static constexpr Builtin kFirst = static_cast<Builtin>(0);
static constexpr Builtin kLast = static_cast<Builtin>(kBuiltinCount - 1);
static constexpr Builtin kLastTier0 =
static_cast<Builtin>(kBuiltinTier0Count - 1);
static constexpr int kFirstWideBytecodeHandler =
static_cast<int>(Builtin::kFirstBytecodeHandler) +
......@@ -96,6 +100,9 @@ class Builtins {
return static_cast<uint32_t>(maybe_id) <
static_cast<uint32_t>(kBuiltinCount);
}
static constexpr bool IsTier0(Builtin builtin) {
return builtin <= kLastTier0 && IsBuiltinId(builtin);
}
static constexpr Builtin FromInt(int id) {
DCHECK(IsBuiltinId(id));
......@@ -199,9 +206,7 @@ class Builtins {
return kAllBuiltinsAreIsolateIndependent;
}
// Initializes the table of builtin entry points based on the current contents
// of the builtins table.
static void InitializeBuiltinEntryTable(Isolate* isolate);
static void InitializeIsolateDataTables(Isolate* isolate);
// Emits a CodeCreateEvent for every builtin.
static void EmitCodeCreateEvents(Isolate* isolate);
......
......@@ -1067,7 +1067,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
~BlockConstPoolScope() { assem_->EndBlockConstPool(); }
private:
Assembler* assem_;
Assembler* const assem_;
DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope);
};
......@@ -1250,6 +1250,12 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
(pc_offset() < no_const_pool_before_);
}
bool has_pending_constants() const {
bool result = !pending_32_bit_constants_.empty();
DCHECK_EQ(result, first_const_pool_32_use_ != -1);
return result;
}
bool VfpRegisterIsAvailable(DwVfpRegister reg) {
DCHECK(reg.is_valid());
return IsEnabled(VFP32DREGS) ||
......
......@@ -182,7 +182,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
// size s.t. pc-relative calls may be used.
UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
int offset = IsolateData::builtin_entry_slot_offset(code->builtin_id());
int offset = IsolateData::BuiltinEntrySlotOffset(code->builtin_id());
ldr(scratch, MemOperand(kRootRegister, offset));
Jump(scratch, cond);
return;
......@@ -269,7 +269,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
// This branch is taken only for specific cctests, where we force isolate
// creation at runtime. At this point, Code space isn't restricted to a
// size s.t. pc-relative calls may be used.
int offset = IsolateData::builtin_entry_slot_offset(code->builtin_id());
int offset = IsolateData::BuiltinEntrySlotOffset(code->builtin_id());
ldr(ip, MemOperand(kRootRegister, offset));
Call(ip, cond);
return;
......@@ -315,7 +315,7 @@ MemOperand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) {
ASM_CODE_COMMENT(this);
DCHECK(root_array_available());
return MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin));
IsolateData::BuiltinEntrySlotOffset(builtin));
}
void TurboAssembler::CallBuiltin(Builtin builtin, Condition cond) {
......@@ -2671,9 +2671,15 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret,
Label*) {
ASM_CODE_COMMENT(this);
// All constants should have been emitted prior to deoptimization exit
// emission. See PrepareForDeoptimizationExits.
DCHECK(!has_pending_constants());
BlockConstPoolScope block_const_pool(this);
ldr(ip, MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(target)));
CHECK_LE(target, Builtins::kLastTier0);
ldr(ip,
MemOperand(kRootRegister, IsolateData::BuiltinEntrySlotOffset(target)));
Call(ip);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
(kind == DeoptimizeKind::kLazy)
......@@ -2685,6 +2691,9 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeBeforeArgsSize);
}
// The above code must not emit constants either.
DCHECK(!has_pending_constants());
}
void TurboAssembler::Trap() { stop(); }
......
......@@ -1968,7 +1968,7 @@ MemOperand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) {
ASM_CODE_COMMENT(this);
DCHECK(root_array_available());
return MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin));
IsolateData::BuiltinEntrySlotOffset(builtin));
}
void TurboAssembler::CallBuiltinByIndex(Register builtin_index) {
......
......@@ -1895,8 +1895,7 @@ void TurboAssembler::CallBuiltin(Builtin builtin) {
Operand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) {
ASM_CODE_COMMENT(this);
return Operand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin));
return Operand(kRootRegister, IsolateData::BuiltinEntrySlotOffset(builtin));
}
void TurboAssembler::LoadCodeObjectEntry(Register destination,
......
......@@ -97,7 +97,7 @@ int32_t TurboAssemblerBase::RootRegisterOffsetForRootIndex(
// static
int32_t TurboAssemblerBase::RootRegisterOffsetForBuiltin(Builtin builtin) {
return IsolateData::builtin_slot_offset(builtin);
return IsolateData::BuiltinSlotOffset(builtin);
}
// static
......
......@@ -1901,8 +1901,7 @@ void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
Operand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) {
DCHECK(root_array_available());
return Operand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin));
return Operand(kRootRegister, IsolateData::BuiltinEntrySlotOffset(builtin));
}
Operand TurboAssembler::EntryFromBuiltinIndexAsOperand(Register builtin_index) {
......
......@@ -3589,7 +3589,7 @@ Node* WasmGraphBuilder::BuildCallToRuntimeWithContext(Runtime::FunctionId f,
Builtin::kCEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit;
Node* centry_stub =
gasm_->LoadFromObject(MachineType::Pointer(), isolate_root,
IsolateData::builtin_slot_offset(centry_id));
IsolateData::BuiltinSlotOffset(centry_id));
// TODO(titzer): allow arbitrary number of runtime arguments
// At the moment we only allow 5 parameters. If more parameters are needed,
// increase this constant accordingly.
......
......@@ -3,10 +3,23 @@
// found in the LICENSE file.
#include "src/deoptimizer/deoptimizer.h"
#include "src/execution/isolate-data.h"
namespace v8 {
namespace internal {
// The deopt exit sizes below depend on the following IsolateData layout
// guarantees:
#define ASSERT_OFFSET(BuiltinName) \
STATIC_ASSERT(IsolateData::builtin_tier0_entry_table_offset() + \
Builtins::ToInt(BuiltinName) * kSystemPointerSize <= \
0x1000)
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Eager);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Bailout);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
......
......@@ -5,14 +5,27 @@
#if V8_TARGET_ARCH_X64
#include "src/deoptimizer/deoptimizer.h"
#include "src/execution/isolate-data.h"
namespace v8 {
namespace internal {
// The deopt exit sizes below depend on the following IsolateData layout
// guarantees:
#define ASSERT_OFFSET(BuiltinName) \
STATIC_ASSERT(IsolateData::builtin_tier0_entry_table_offset() + \
Builtins::ToInt(BuiltinName) * kSystemPointerSize <= \
0x7F)
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Eager);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Bailout);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 7;
const int Deoptimizer::kLazyDeoptExitSize = 7;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 12;
const int Deoptimizer::kNonLazyDeoptExitSize = 4;
const int Deoptimizer::kLazyDeoptExitSize = 4;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 9;
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = 5;
......
......@@ -128,6 +128,9 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
const unsigned kRootsTableSize = sizeof(RootsTable);
const int kExtRefsTableStart = IsolateData::external_reference_table_offset();
const unsigned kExtRefsTableSize = ExternalReferenceTable::kSizeInBytes;
const int kBuiltinTier0TableStart = IsolateData::builtin_tier0_table_offset();
const unsigned kBuiltinTier0TableSize =
Builtins::kBuiltinTier0Count * kSystemPointerSize;
const int kBuiltinTableStart = IsolateData::builtin_table_offset();
const unsigned kBuiltinTableSize =
Builtins::kBuiltinCount * kSystemPointerSize;
......@@ -143,7 +146,6 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
SNPrintF(v8_buffer_, "root (%s)", RootsTable::name(root_index));
return v8_buffer_.begin();
} else if (static_cast<unsigned>(offset - kExtRefsTableStart) <
kExtRefsTableSize) {
uint32_t offset_in_extref_table = offset - kExtRefsTableStart;
......@@ -162,7 +164,15 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
isolate_->external_reference_table()->NameFromOffset(
offset_in_extref_table));
return v8_buffer_.begin();
} else if (static_cast<unsigned>(offset - kBuiltinTier0TableStart) <
kBuiltinTier0TableSize) {
uint32_t offset_in_builtins_table = (offset - kBuiltinTier0TableStart);
Builtin builtin =
Builtins::FromInt(offset_in_builtins_table / kSystemPointerSize);
const char* name = Builtins::name(builtin);
SNPrintF(v8_buffer_, "builtin (%s)", name);
return v8_buffer_.begin();
} else if (static_cast<unsigned>(offset - kBuiltinTableStart) <
kBuiltinTableSize) {
uint32_t offset_in_builtins_table = (offset - kBuiltinTableStart);
......@@ -172,7 +182,6 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
const char* name = Builtins::name(builtin);
SNPrintF(v8_buffer_, "builtin (%s)", name);
return v8_buffer_.begin();
} else {
// It must be a direct access to one of the external values.
if (directly_accessed_external_refs_.empty()) {
......
......@@ -22,14 +22,23 @@ class Isolate;
// IsolateData fields, defined as: V(Offset, Size, Name)
#define ISOLATE_DATA_FIELDS(V) \
/* Misc. fields. */ \
V(kCageBaseOffset, kSystemPointerSize, cage_base) \
V(kStackGuardOffset, StackGuard::kSizeInBytes, stack_guard) \
/* Tier 0 tables (small but fast access). */ \
V(kBuiltinTier0EntryTableOffset, \
Builtins::kBuiltinTier0Count* kSystemPointerSize, \
builtin_tier0_entry_table) \
V(kBuiltinsTier0TableOffset, \
Builtins::kBuiltinTier0Count* kSystemPointerSize, builtin_tier0_table) \
/* Misc. fields. */ \
V(kEmbedderDataOffset, Internals::kNumIsolateDataSlots* kSystemPointerSize, \
embedder_data) \
V(kFastCCallCallerFPOffset, kSystemPointerSize, fast_c_call_caller_fp) \
V(kFastCCallCallerPCOffset, kSystemPointerSize, fast_c_call_caller_pc) \
V(kFastApiCallTargetOffset, kSystemPointerSize, fast_api_call_target) \
V(kCageBaseOffset, kSystemPointerSize, cage_base) \
V(kLongTaskStatsCounterOffset, kSizetSize, long_task_stats_counter) \
V(kStackGuardOffset, StackGuard::kSizeInBytes, stack_guard) \
/* Full tables (arbitrary size, potentially slower access). */ \
V(kRootsTableOffset, RootsTable::kEntriesCount* kSystemPointerSize, \
roots_table) \
V(kExternalReferenceTableOffset, ExternalReferenceTable::kSizeInBytes, \
......@@ -79,19 +88,20 @@ class IsolateData final {
return roots_table_offset() + RootsTable::offset_of(root_index);
}
static int builtin_entry_slot_offset(Builtin id) {
return builtin_entry_table_offset() +
static constexpr int BuiltinEntrySlotOffset(Builtin id) {
DCHECK(Builtins::IsBuiltinId(id));
return (Builtins::IsTier0(id) ? builtin_tier0_entry_table_offset()
: builtin_entry_table_offset()) +
Builtins::ToInt(id) * kSystemPointerSize;
}
// TODO(ishell): remove in favour of typified id version.
static int builtin_slot_offset(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index));
return builtin_table_offset() + builtin_index * kSystemPointerSize;
static constexpr int builtin_slot_offset(int builtin_index) {
return BuiltinSlotOffset(Builtins::FromInt(builtin_index));
}
static int builtin_slot_offset(Builtin id) {
return builtin_table_offset() + Builtins::ToInt(id) * kSystemPointerSize;
static constexpr int BuiltinSlotOffset(Builtin id) {
return (Builtins::IsTier0(id) ? builtin_tier0_table_offset()
: builtin_table_offset()) +
Builtins::ToInt(id) * kSystemPointerSize;
}
#define V(Offset, Size, Name) \
......@@ -105,6 +115,8 @@ class IsolateData final {
// The value of kPointerCageBaseRegister.
Address cage_base() const { return cage_base_; }
StackGuard* stack_guard() { return &stack_guard_; }
Address* builtin_tier0_entry_table() { return builtin_tier0_entry_table_; }
Address* builtin_tier0_table() { return builtin_tier0_table_; }
RootsTable& roots() { return roots_table_; }
const RootsTable& roots() const { return roots_table_; }
ExternalReferenceTable* external_reference_table() {
......@@ -144,6 +156,16 @@ class IsolateData final {
DEFINE_FIELD_OFFSET_CONSTANTS(0, FIELDS)
#undef FIELDS
const Address cage_base_;
// Fields related to the system and JS stack. In particular, this contains
// the stack limit used by stack checks in generated code.
StackGuard stack_guard_;
// Tier 0 tables. See also builtin_entry_table_ and builtin_table_.
Address builtin_tier0_entry_table_[Builtins::kBuiltinTier0Count] = {};
Address builtin_tier0_table_[Builtins::kBuiltinTier0Count] = {};
// These fields are accessed through the API, offsets must be kept in sync
// with v8::internal::Internals (in include/v8-internal.h) constants. The
// layout consistency is verified in Isolate::CheckIsolateLayout() using
......@@ -162,22 +184,16 @@ class IsolateData final {
// generated code.
Address fast_api_call_target_ = kNullAddress;
const Address cage_base_;
// Used for implementation of LongTaskStats. Counts the number of potential
// long tasks.
size_t long_task_stats_counter_ = 0;
// Fields related to the system and JS stack. In particular, this contains
// the stack limit used by stack checks in generated code.
StackGuard stack_guard_;
RootsTable roots_table_;
ExternalReferenceTable external_reference_table_;
ThreadLocalTop thread_local_top_;
// The entry points for all builtins. This corresponds to
// The entry points for builtins. This corresponds to
// Code::InstructionStart() for each Code object in the builtins table below.
// The entry table is in IsolateData for easy access through kRootRegister.
Address builtin_entry_table_[Builtins::kBuiltinCount] = {};
......
......@@ -3054,6 +3054,12 @@ void Isolate::CheckIsolateLayout() {
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_table_)),
Internals::kIsolateRootsOffset);
STATIC_ASSERT(Internals::kStackGuardSize == sizeof(StackGuard));
STATIC_ASSERT(Internals::kBuiltinTier0TableSize ==
Builtins::kBuiltinTier0Count * kSystemPointerSize);
STATIC_ASSERT(Internals::kBuiltinTier0EntryTableSize ==
Builtins::kBuiltinTier0Count * kSystemPointerSize);
#ifdef V8_HEAP_SANDBOX
CHECK_EQ(static_cast<int>(OFFSET_OF(ExternalPointerTable, buffer_)),
Internals::kExternalPointerTableBufferOffset);
......@@ -3754,7 +3760,7 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
delete setup_delegate_;
setup_delegate_ = nullptr;
Builtins::InitializeBuiltinEntryTable(this);
Builtins::InitializeIsolateDataTables(this);
Builtins::EmitCodeCreateEvents(this);
#ifdef DEBUG
......
......@@ -4659,9 +4659,18 @@ Code Heap::builtin(Builtin builtin) {
Address Heap::builtin_address(Builtin builtin) {
const int index = Builtins::ToInt(builtin);
DCHECK(Builtins::IsBuiltinId(builtin) || index == Builtins::kBuiltinCount);
// Note: Must return an address within the full builtin_table for
// IterateBuiltins to work.
return reinterpret_cast<Address>(&isolate()->builtin_table()[index]);
}
Address Heap::builtin_tier0_address(Builtin builtin) {
const int index = static_cast<int>(builtin);
DCHECK(Builtins::IsBuiltinId(builtin) || index == Builtins::kBuiltinCount);
return reinterpret_cast<Address>(
&isolate()->isolate_data()->builtin_tier0_table()[index]);
}
void Heap::set_builtin(Builtin builtin, Code code) {
DCHECK(Builtins::IsBuiltinId(builtin));
DCHECK(Internals::HasHeapObjectTag(code.ptr()));
......@@ -4897,6 +4906,12 @@ void Heap::IterateBuiltins(RootVisitor* v) {
FullObjectSlot(builtin_address(builtin)));
}
for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLastTier0;
++builtin) {
v->VisitRootPointer(Root::kBuiltins, Builtins::name(builtin),
FullObjectSlot(builtin_tier0_address(builtin)));
}
// The entry table doesn't need to be updated since all builtins are embedded.
STATIC_ASSERT(Builtins::AllBuiltinsAreIsolateIndependent());
}
......
......@@ -1053,6 +1053,7 @@ class Heap {
V8_EXPORT_PRIVATE Code builtin(Builtin builtin);
Address builtin_address(Builtin builtin);
Address builtin_tier0_address(Builtin builtin);
void set_builtin(Builtin builtin, Code code);
// ===========================================================================
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment