Commit 06af754c authored by Jakob Gruber's avatar Jakob Gruber Committed by V8 LUCI CQ

[isolate-data] Split builtin tables into tiers

.. for more efficient access to builtins from generated code.

Root-relative accesses tend to be faster and produce more compact
code when the root-relative offset is small. IsolateData contains
a few large tables (roots, external references, builtins), resulting
in very large offsets in general.

This CL starts by splitting the builtin table into tiers: tier 0
is a minimal set of perf-critical builtins that should be cheap to
access. The offset to tier 0 builtins is guaranteed to be small.

The full builtin table also remains in IsolateData for occasions in
which we need to lookup builtins by index.

In future work, we can also split external references and roots into
tiers.

On x64, this reduces deopt exit sizes from 7 to 4 bytes and from 12
to 9 bytes (dynamic map checks / EagerWithResume deopts).

Bug: v8:12203,v8:8661
Change-Id: I5a9ed22b0e00682aca1abcf15892ae1458dbdd70
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3162142
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#76947}
parent 165e725d
...@@ -224,23 +224,30 @@ class Internals { ...@@ -224,23 +224,30 @@ class Internals {
static const int kExternalOneByteRepresentationTag = 0x0a; static const int kExternalOneByteRepresentationTag = 0x0a;
static const uint32_t kNumIsolateDataSlots = 4; static const uint32_t kNumIsolateDataSlots = 4;
static const int kStackGuardSize = 7 * kApiSystemPointerSize;
static const int kBuiltinTier0EntryTableSize = 13 * kApiSystemPointerSize;
static const int kBuiltinTier0TableSize = 13 * kApiSystemPointerSize;
// IsolateData layout guarantees. // IsolateData layout guarantees.
static const int kIsolateEmbedderDataOffset = 0; static const int kIsolateCageBaseOffset = 0;
static const int kIsolateStackGuardOffset =
kIsolateCageBaseOffset + kApiSystemPointerSize;
static const int kBuiltinTier0EntryTableOffset =
kIsolateStackGuardOffset + kStackGuardSize;
static const int kBuiltinTier0TableOffset =
kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
static const int kIsolateEmbedderDataOffset =
kBuiltinTier0TableOffset + kBuiltinTier0TableSize;
static const int kIsolateFastCCallCallerFpOffset = static const int kIsolateFastCCallCallerFpOffset =
kNumIsolateDataSlots * kApiSystemPointerSize; kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
static const int kIsolateFastCCallCallerPcOffset = static const int kIsolateFastCCallCallerPcOffset =
kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize; kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
static const int kIsolateFastApiCallTargetOffset = static const int kIsolateFastApiCallTargetOffset =
kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize; kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
static const int kIsolateCageBaseOffset =
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
static const int kIsolateLongTaskStatsCounterOffset = static const int kIsolateLongTaskStatsCounterOffset =
kIsolateCageBaseOffset + kApiSystemPointerSize; kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
static const int kIsolateStackGuardOffset =
kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
static const int kIsolateRootsOffset = static const int kIsolateRootsOffset =
kIsolateStackGuardOffset + 7 * kApiSystemPointerSize; kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
static const int kExternalPointerTableBufferOffset = 0; static const int kExternalPointerTableBufferOffset = 0;
static const int kExternalPointerTableLengthOffset = static const int kExternalPointerTableLengthOffset =
......
...@@ -31,16 +31,40 @@ namespace internal { ...@@ -31,16 +31,40 @@ namespace internal {
// TODO(jgruber): Remove DummyDescriptor once all ASM builtins have been // TODO(jgruber): Remove DummyDescriptor once all ASM builtins have been
// properly associated with their descriptor. // properly associated with their descriptor.
#define BUILTIN_LIST_BASE(CPP, TFJ, TFC, TFS, TFH, ASM) \ // Builtins are additionally split into tiers, where the tier determines the
/* GC write barrirer */ \ // distance of the builtins table from the root register within IsolateData.
TFC(RecordWriteEmitRememberedSetSaveFP, WriteBarrier) \ //
TFC(RecordWriteOmitRememberedSetSaveFP, WriteBarrier) \ // - Tier 0 (T0) are guaranteed to be close to the root register and can thus
TFC(RecordWriteEmitRememberedSetIgnoreFP, WriteBarrier) \ // be accessed efficiently root-relative calls (so not, e.g., calls from
TFC(RecordWriteOmitRememberedSetIgnoreFP, WriteBarrier) \ // generated code when short-builtin-calls is on).
TFC(EphemeronKeyBarrierSaveFP, WriteBarrier) \ // - T1 builtins have no distance guarantees.
TFC(EphemeronKeyBarrierIgnoreFP, WriteBarrier) \ //
\ // Note, this mechanism works only if the set of T0 builtins is kept as small
/* TSAN support for stores in generated code.*/ \ // as possible. Please, resist the temptation to add your builtin here unless
// there's a very good reason.
#define BUILTIN_LIST_BASE_TIER0(CPP, TFJ, TFC, TFS, TFH, ASM) \
/* Deoptimization entries. */ \
ASM(DeoptimizationEntry_Eager, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Soft, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Bailout, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Lazy, DeoptimizationEntry) \
ASM(DynamicCheckMapsTrampoline, DynamicCheckMaps) \
ASM(DynamicCheckMapsWithFeedbackVectorTrampoline, \
DynamicCheckMapsWithFeedbackVector) \
\
/* GC write barrier. */ \
TFC(RecordWriteEmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteEmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(EphemeronKeyBarrierSaveFP, WriteBarrier) \
TFC(EphemeronKeyBarrierIgnoreFP, WriteBarrier) \
\
/* Adaptor for CPP builtins. */ \
TFC(AdaptorWithBuiltinExitFrame, CppBuiltinAdaptor)
#define BUILTIN_LIST_BASE_TIER1(CPP, TFJ, TFC, TFS, TFH, ASM) \
/* TSAN support for stores in generated code. */ \
IF_TSAN(TFC, TSANRelaxedStore8IgnoreFP, TSANStore) \ IF_TSAN(TFC, TSANRelaxedStore8IgnoreFP, TSANStore) \
IF_TSAN(TFC, TSANRelaxedStore8SaveFP, TSANStore) \ IF_TSAN(TFC, TSANRelaxedStore8SaveFP, TSANStore) \
IF_TSAN(TFC, TSANRelaxedStore16IgnoreFP, TSANStore) \ IF_TSAN(TFC, TSANRelaxedStore16IgnoreFP, TSANStore) \
...@@ -58,15 +82,12 @@ namespace internal { ...@@ -58,15 +82,12 @@ namespace internal {
IF_TSAN(TFC, TSANSeqCstStore64IgnoreFP, TSANStore) \ IF_TSAN(TFC, TSANSeqCstStore64IgnoreFP, TSANStore) \
IF_TSAN(TFC, TSANSeqCstStore64SaveFP, TSANStore) \ IF_TSAN(TFC, TSANSeqCstStore64SaveFP, TSANStore) \
\ \
/* TSAN support for loads in generated code.*/ \ /* TSAN support for loads in generated code. */ \
IF_TSAN(TFC, TSANRelaxedLoad32IgnoreFP, TSANLoad) \ IF_TSAN(TFC, TSANRelaxedLoad32IgnoreFP, TSANLoad) \
IF_TSAN(TFC, TSANRelaxedLoad32SaveFP, TSANLoad) \ IF_TSAN(TFC, TSANRelaxedLoad32SaveFP, TSANLoad) \
IF_TSAN(TFC, TSANRelaxedLoad64IgnoreFP, TSANLoad) \ IF_TSAN(TFC, TSANRelaxedLoad64IgnoreFP, TSANLoad) \
IF_TSAN(TFC, TSANRelaxedLoad64SaveFP, TSANLoad) \ IF_TSAN(TFC, TSANRelaxedLoad64SaveFP, TSANLoad) \
\ \
/* Adaptor for CPP builtin */ \
TFC(AdaptorWithBuiltinExitFrame, CppBuiltinAdaptor) \
\
/* Calls */ \ /* Calls */ \
/* ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) */ \ /* ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) */ \
ASM(CallFunction_ReceiverIsNullOrUndefined, CallTrampoline) \ ASM(CallFunction_ReceiverIsNullOrUndefined, CallTrampoline) \
...@@ -187,10 +208,6 @@ namespace internal { ...@@ -187,10 +208,6 @@ namespace internal {
TFC(CompileLazyDeoptimizedCode, JSTrampoline) \ TFC(CompileLazyDeoptimizedCode, JSTrampoline) \
TFC(InstantiateAsmJs, JSTrampoline) \ TFC(InstantiateAsmJs, JSTrampoline) \
ASM(NotifyDeoptimized, Dummy) \ ASM(NotifyDeoptimized, Dummy) \
ASM(DeoptimizationEntry_Eager, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Soft, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Bailout, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Lazy, DeoptimizationEntry) \
\ \
/* Trampolines called when returning from a deoptimization that expects */ \ /* Trampolines called when returning from a deoptimization that expects */ \
/* to continue in a JavaScript builtin to finish the functionality of a */ \ /* to continue in a JavaScript builtin to finish the functionality of a */ \
...@@ -282,10 +299,7 @@ namespace internal { ...@@ -282,10 +299,7 @@ namespace internal {
TFH(HasIndexedInterceptorIC, LoadWithVector) \ TFH(HasIndexedInterceptorIC, LoadWithVector) \
\ \
/* Dynamic check maps */ \ /* Dynamic check maps */ \
ASM(DynamicCheckMapsTrampoline, DynamicCheckMaps) \
TFC(DynamicCheckMaps, DynamicCheckMaps) \ TFC(DynamicCheckMaps, DynamicCheckMaps) \
ASM(DynamicCheckMapsWithFeedbackVectorTrampoline, \
DynamicCheckMapsWithFeedbackVector) \
TFC(DynamicCheckMapsWithFeedbackVector, DynamicCheckMapsWithFeedbackVector) \ TFC(DynamicCheckMapsWithFeedbackVector, DynamicCheckMapsWithFeedbackVector) \
\ \
/* Microtask helpers */ \ /* Microtask helpers */ \
...@@ -1032,6 +1046,10 @@ namespace internal { ...@@ -1032,6 +1046,10 @@ namespace internal {
CPP(CallAsyncModuleFulfilled) \ CPP(CallAsyncModuleFulfilled) \
CPP(CallAsyncModuleRejected) CPP(CallAsyncModuleRejected)
#define BUILTIN_LIST_BASE(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_BASE_TIER0(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_BASE_TIER1(CPP, TFJ, TFC, TFS, TFH, ASM)
#ifdef V8_INTL_SUPPORT #ifdef V8_INTL_SUPPORT
#define BUILTIN_LIST_INTL(CPP, TFJ, TFS) \ #define BUILTIN_LIST_INTL(CPP, TFJ, TFS) \
/* ecma402 #sec-intl.collator */ \ /* ecma402 #sec-intl.collator */ \
...@@ -1218,6 +1236,17 @@ namespace internal { ...@@ -1218,6 +1236,17 @@ namespace internal {
BUILTIN_LIST_INTL(CPP, TFJ, TFS) \ BUILTIN_LIST_INTL(CPP, TFJ, TFS) \
BUILTIN_LIST_BYTECODE_HANDLERS(BCH) BUILTIN_LIST_BYTECODE_HANDLERS(BCH)
// See the comment on top of BUILTIN_LIST_BASE_TIER0 for an explanation of
// tiers.
#define BUILTIN_LIST_TIER0(CPP, TFJ, TFC, TFS, TFH, BCH, ASM) \
BUILTIN_LIST_BASE_TIER0(CPP, TFJ, TFC, TFS, TFH, ASM)
#define BUILTIN_LIST_TIER1(CPP, TFJ, TFC, TFS, TFH, BCH, ASM) \
BUILTIN_LIST_BASE_TIER1(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_FROM_TORQUE(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_INTL(CPP, TFJ, TFS) \
BUILTIN_LIST_BYTECODE_HANDLERS(BCH)
// The exception thrown in the following builtins are caught // The exception thrown in the following builtins are caught
// internally and result in a promise rejection. // internally and result in a promise rejection.
#define BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(V) \ #define BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(V) \
......
...@@ -92,7 +92,7 @@ BytecodeOffset Builtins::GetContinuationBytecodeOffset(Builtin builtin) { ...@@ -92,7 +92,7 @@ BytecodeOffset Builtins::GetContinuationBytecodeOffset(Builtin builtin) {
DCHECK(Builtins::KindOf(builtin) == TFJ || Builtins::KindOf(builtin) == TFC || DCHECK(Builtins::KindOf(builtin) == TFJ || Builtins::KindOf(builtin) == TFC ||
Builtins::KindOf(builtin) == TFS); Builtins::KindOf(builtin) == TFS);
return BytecodeOffset(BytecodeOffset::kFirstBuiltinContinuationId + return BytecodeOffset(BytecodeOffset::kFirstBuiltinContinuationId +
static_cast<int>(builtin)); ToInt(builtin));
} }
Builtin Builtins::GetBuiltinFromBytecodeOffset(BytecodeOffset id) { Builtin Builtins::GetBuiltinFromBytecodeOffset(BytecodeOffset id) {
...@@ -182,7 +182,7 @@ Handle<Code> Builtins::code_handle(Builtin builtin) { ...@@ -182,7 +182,7 @@ Handle<Code> Builtins::code_handle(Builtin builtin) {
// static // static
int Builtins::GetStackParameterCount(Builtin builtin) { int Builtins::GetStackParameterCount(Builtin builtin) {
DCHECK(Builtins::KindOf(builtin) == TFJ); DCHECK(Builtins::KindOf(builtin) == TFJ);
return builtin_metadata[static_cast<int>(builtin)].data.parameter_count; return builtin_metadata[ToInt(builtin)].data.parameter_count;
} }
// static // static
...@@ -224,7 +224,7 @@ bool Builtins::HasJSLinkage(Builtin builtin) { ...@@ -224,7 +224,7 @@ bool Builtins::HasJSLinkage(Builtin builtin) {
// static // static
const char* Builtins::name(Builtin builtin) { const char* Builtins::name(Builtin builtin) {
int index = static_cast<int>(builtin); int index = ToInt(builtin);
DCHECK(IsBuiltinId(index)); DCHECK(IsBuiltinId(index));
return builtin_metadata[index].name; return builtin_metadata[index].name;
} }
...@@ -262,7 +262,7 @@ void Builtins::PrintBuiltinSize() { ...@@ -262,7 +262,7 @@ void Builtins::PrintBuiltinSize() {
// static // static
Address Builtins::CppEntryOf(Builtin builtin) { Address Builtins::CppEntryOf(Builtin builtin) {
DCHECK(Builtins::IsCpp(builtin)); DCHECK(Builtins::IsCpp(builtin));
return builtin_metadata[static_cast<int>(builtin)].data.cpp_entry; return builtin_metadata[ToInt(builtin)].data.cpp_entry;
} }
// static // static
...@@ -292,18 +292,24 @@ bool Builtins::IsIsolateIndependentBuiltin(const Code code) { ...@@ -292,18 +292,24 @@ bool Builtins::IsIsolateIndependentBuiltin(const Code code) {
} }
// static // static
void Builtins::InitializeBuiltinEntryTable(Isolate* isolate) { void Builtins::InitializeIsolateDataTables(Isolate* isolate) {
EmbeddedData d = EmbeddedData::FromBlob(isolate); EmbeddedData embedded_data = EmbeddedData::FromBlob(isolate);
Address* builtin_entry_table = isolate->builtin_entry_table(); IsolateData* isolate_data = isolate->isolate_data();
for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
++builtin) { // The entry table.
// TODO(jgruber,chromium:1020986): Remove the CHECK once the linked issue is for (Builtin i = Builtins::kFirst; i <= Builtins::kLast; ++i) {
// resolved. DCHECK(Builtins::IsBuiltinId(isolate->heap()->builtin(i).builtin_id()));
CHECK( DCHECK(isolate->heap()->builtin(i).is_off_heap_trampoline());
Builtins::IsBuiltinId(isolate->heap()->builtin(builtin).builtin_id())); isolate_data->builtin_entry_table()[ToInt(i)] =
DCHECK(isolate->heap()->builtin(builtin).is_off_heap_trampoline()); embedded_data.InstructionStartOfBuiltin(i);
builtin_entry_table[static_cast<int>(builtin)] = }
d.InstructionStartOfBuiltin(builtin);
// T0 tables.
for (Builtin i = Builtins::kFirst; i <= Builtins::kLastTier0; ++i) {
const int ii = ToInt(i);
isolate_data->builtin_tier0_entry_table()[ii] =
isolate_data->builtin_entry_table()[ii];
isolate_data->builtin_tier0_table()[ii] = isolate_data->builtin_table()[ii];
} }
} }
...@@ -317,7 +323,7 @@ void Builtins::EmitCodeCreateEvents(Isolate* isolate) { ...@@ -317,7 +323,7 @@ void Builtins::EmitCodeCreateEvents(Isolate* isolate) {
Address* builtins = isolate->builtin_table(); Address* builtins = isolate->builtin_table();
int i = 0; int i = 0;
HandleScope scope(isolate); HandleScope scope(isolate);
for (; i < static_cast<int>(Builtin::kFirstBytecodeHandler); i++) { for (; i < ToInt(Builtin::kFirstBytecodeHandler); i++) {
Handle<AbstractCode> code(AbstractCode::cast(Object(builtins[i])), isolate); Handle<AbstractCode> code(AbstractCode::cast(Object(builtins[i])), isolate);
PROFILE(isolate, CodeCreateEvent(CodeEventListener::BUILTIN_TAG, code, PROFILE(isolate, CodeCreateEvent(CodeEventListener::BUILTIN_TAG, code,
Builtins::name(FromInt(i)))); Builtins::name(FromInt(i))));
...@@ -420,7 +426,7 @@ Handle<ByteArray> Builtins::GenerateOffHeapTrampolineRelocInfo( ...@@ -420,7 +426,7 @@ Handle<ByteArray> Builtins::GenerateOffHeapTrampolineRelocInfo(
Builtins::Kind Builtins::KindOf(Builtin builtin) { Builtins::Kind Builtins::KindOf(Builtin builtin) {
DCHECK(IsBuiltinId(builtin)); DCHECK(IsBuiltinId(builtin));
return builtin_metadata[static_cast<int>(builtin)].kind; return builtin_metadata[ToInt(builtin)].kind;
} }
// static // static
......
...@@ -74,10 +74,14 @@ class Builtins { ...@@ -74,10 +74,14 @@ class Builtins {
#define ADD_ONE(Name, ...) +1 #define ADD_ONE(Name, ...) +1
static constexpr int kBuiltinCount = 0 BUILTIN_LIST( static constexpr int kBuiltinCount = 0 BUILTIN_LIST(
ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE); ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE);
static constexpr int kBuiltinTier0Count = 0 BUILTIN_LIST_TIER0(
ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE);
#undef ADD_ONE #undef ADD_ONE
static constexpr Builtin kFirst = static_cast<Builtin>(0); static constexpr Builtin kFirst = static_cast<Builtin>(0);
static constexpr Builtin kLast = static_cast<Builtin>(kBuiltinCount - 1); static constexpr Builtin kLast = static_cast<Builtin>(kBuiltinCount - 1);
static constexpr Builtin kLastTier0 =
static_cast<Builtin>(kBuiltinTier0Count - 1);
static constexpr int kFirstWideBytecodeHandler = static constexpr int kFirstWideBytecodeHandler =
static_cast<int>(Builtin::kFirstBytecodeHandler) + static_cast<int>(Builtin::kFirstBytecodeHandler) +
...@@ -96,6 +100,9 @@ class Builtins { ...@@ -96,6 +100,9 @@ class Builtins {
return static_cast<uint32_t>(maybe_id) < return static_cast<uint32_t>(maybe_id) <
static_cast<uint32_t>(kBuiltinCount); static_cast<uint32_t>(kBuiltinCount);
} }
static constexpr bool IsTier0(Builtin builtin) {
return builtin <= kLastTier0 && IsBuiltinId(builtin);
}
static constexpr Builtin FromInt(int id) { static constexpr Builtin FromInt(int id) {
DCHECK(IsBuiltinId(id)); DCHECK(IsBuiltinId(id));
...@@ -199,9 +206,7 @@ class Builtins { ...@@ -199,9 +206,7 @@ class Builtins {
return kAllBuiltinsAreIsolateIndependent; return kAllBuiltinsAreIsolateIndependent;
} }
// Initializes the table of builtin entry points based on the current contents static void InitializeIsolateDataTables(Isolate* isolate);
// of the builtins table.
static void InitializeBuiltinEntryTable(Isolate* isolate);
// Emits a CodeCreateEvent for every builtin. // Emits a CodeCreateEvent for every builtin.
static void EmitCodeCreateEvents(Isolate* isolate); static void EmitCodeCreateEvents(Isolate* isolate);
......
...@@ -1067,7 +1067,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -1067,7 +1067,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
~BlockConstPoolScope() { assem_->EndBlockConstPool(); } ~BlockConstPoolScope() { assem_->EndBlockConstPool(); }
private: private:
Assembler* assem_; Assembler* const assem_;
DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope); DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope);
}; };
...@@ -1250,6 +1250,12 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -1250,6 +1250,12 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
(pc_offset() < no_const_pool_before_); (pc_offset() < no_const_pool_before_);
} }
bool has_pending_constants() const {
bool result = !pending_32_bit_constants_.empty();
DCHECK_EQ(result, first_const_pool_32_use_ != -1);
return result;
}
bool VfpRegisterIsAvailable(DwVfpRegister reg) { bool VfpRegisterIsAvailable(DwVfpRegister reg) {
DCHECK(reg.is_valid()); DCHECK(reg.is_valid());
return IsEnabled(VFP32DREGS) || return IsEnabled(VFP32DREGS) ||
......
...@@ -182,7 +182,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, ...@@ -182,7 +182,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
// size s.t. pc-relative calls may be used. // size s.t. pc-relative calls may be used.
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
int offset = IsolateData::builtin_entry_slot_offset(code->builtin_id()); int offset = IsolateData::BuiltinEntrySlotOffset(code->builtin_id());
ldr(scratch, MemOperand(kRootRegister, offset)); ldr(scratch, MemOperand(kRootRegister, offset));
Jump(scratch, cond); Jump(scratch, cond);
return; return;
...@@ -269,7 +269,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode, ...@@ -269,7 +269,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
// This branch is taken only for specific cctests, where we force isolate // This branch is taken only for specific cctests, where we force isolate
// creation at runtime. At this point, Code space isn't restricted to a // creation at runtime. At this point, Code space isn't restricted to a
// size s.t. pc-relative calls may be used. // size s.t. pc-relative calls may be used.
int offset = IsolateData::builtin_entry_slot_offset(code->builtin_id()); int offset = IsolateData::BuiltinEntrySlotOffset(code->builtin_id());
ldr(ip, MemOperand(kRootRegister, offset)); ldr(ip, MemOperand(kRootRegister, offset));
Call(ip, cond); Call(ip, cond);
return; return;
...@@ -315,7 +315,7 @@ MemOperand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) { ...@@ -315,7 +315,7 @@ MemOperand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) {
ASM_CODE_COMMENT(this); ASM_CODE_COMMENT(this);
DCHECK(root_array_available()); DCHECK(root_array_available());
return MemOperand(kRootRegister, return MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin)); IsolateData::BuiltinEntrySlotOffset(builtin));
} }
void TurboAssembler::CallBuiltin(Builtin builtin, Condition cond) { void TurboAssembler::CallBuiltin(Builtin builtin, Condition cond) {
...@@ -2671,9 +2671,15 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit, ...@@ -2671,9 +2671,15 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret, DeoptimizeKind kind, Label* ret,
Label*) { Label*) {
ASM_CODE_COMMENT(this); ASM_CODE_COMMENT(this);
// All constants should have been emitted prior to deoptimization exit
// emission. See PrepareForDeoptimizationExits.
DCHECK(!has_pending_constants());
BlockConstPoolScope block_const_pool(this); BlockConstPoolScope block_const_pool(this);
ldr(ip, MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(target))); CHECK_LE(target, Builtins::kLastTier0);
ldr(ip,
MemOperand(kRootRegister, IsolateData::BuiltinEntrySlotOffset(target)));
Call(ip); Call(ip);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit), DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
(kind == DeoptimizeKind::kLazy) (kind == DeoptimizeKind::kLazy)
...@@ -2685,6 +2691,9 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit, ...@@ -2685,6 +2691,9 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DCHECK_EQ(SizeOfCodeGeneratedSince(exit), DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeBeforeArgsSize); Deoptimizer::kEagerWithResumeBeforeArgsSize);
} }
// The above code must not emit constants either.
DCHECK(!has_pending_constants());
} }
void TurboAssembler::Trap() { stop(); } void TurboAssembler::Trap() { stop(); }
......
...@@ -1968,7 +1968,7 @@ MemOperand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) { ...@@ -1968,7 +1968,7 @@ MemOperand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) {
ASM_CODE_COMMENT(this); ASM_CODE_COMMENT(this);
DCHECK(root_array_available()); DCHECK(root_array_available());
return MemOperand(kRootRegister, return MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin)); IsolateData::BuiltinEntrySlotOffset(builtin));
} }
void TurboAssembler::CallBuiltinByIndex(Register builtin_index) { void TurboAssembler::CallBuiltinByIndex(Register builtin_index) {
......
...@@ -1895,8 +1895,7 @@ void TurboAssembler::CallBuiltin(Builtin builtin) { ...@@ -1895,8 +1895,7 @@ void TurboAssembler::CallBuiltin(Builtin builtin) {
Operand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) { Operand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) {
ASM_CODE_COMMENT(this); ASM_CODE_COMMENT(this);
return Operand(kRootRegister, return Operand(kRootRegister, IsolateData::BuiltinEntrySlotOffset(builtin));
IsolateData::builtin_entry_slot_offset(builtin));
} }
void TurboAssembler::LoadCodeObjectEntry(Register destination, void TurboAssembler::LoadCodeObjectEntry(Register destination,
......
...@@ -97,7 +97,7 @@ int32_t TurboAssemblerBase::RootRegisterOffsetForRootIndex( ...@@ -97,7 +97,7 @@ int32_t TurboAssemblerBase::RootRegisterOffsetForRootIndex(
// static // static
int32_t TurboAssemblerBase::RootRegisterOffsetForBuiltin(Builtin builtin) { int32_t TurboAssemblerBase::RootRegisterOffsetForBuiltin(Builtin builtin) {
return IsolateData::builtin_slot_offset(builtin); return IsolateData::BuiltinSlotOffset(builtin);
} }
// static // static
......
...@@ -1901,8 +1901,7 @@ void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) { ...@@ -1901,8 +1901,7 @@ void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
Operand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) { Operand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin) {
DCHECK(root_array_available()); DCHECK(root_array_available());
return Operand(kRootRegister, return Operand(kRootRegister, IsolateData::BuiltinEntrySlotOffset(builtin));
IsolateData::builtin_entry_slot_offset(builtin));
} }
Operand TurboAssembler::EntryFromBuiltinIndexAsOperand(Register builtin_index) { Operand TurboAssembler::EntryFromBuiltinIndexAsOperand(Register builtin_index) {
......
...@@ -3589,7 +3589,7 @@ Node* WasmGraphBuilder::BuildCallToRuntimeWithContext(Runtime::FunctionId f, ...@@ -3589,7 +3589,7 @@ Node* WasmGraphBuilder::BuildCallToRuntimeWithContext(Runtime::FunctionId f,
Builtin::kCEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit; Builtin::kCEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit;
Node* centry_stub = Node* centry_stub =
gasm_->LoadFromObject(MachineType::Pointer(), isolate_root, gasm_->LoadFromObject(MachineType::Pointer(), isolate_root,
IsolateData::builtin_slot_offset(centry_id)); IsolateData::BuiltinSlotOffset(centry_id));
// TODO(titzer): allow arbitrary number of runtime arguments // TODO(titzer): allow arbitrary number of runtime arguments
// At the moment we only allow 5 parameters. If more parameters are needed, // At the moment we only allow 5 parameters. If more parameters are needed,
// increase this constant accordingly. // increase this constant accordingly.
......
...@@ -3,10 +3,23 @@ ...@@ -3,10 +3,23 @@
// found in the LICENSE file. // found in the LICENSE file.
#include "src/deoptimizer/deoptimizer.h" #include "src/deoptimizer/deoptimizer.h"
#include "src/execution/isolate-data.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// The deopt exit sizes below depend on the following IsolateData layout
// guarantees:
#define ASSERT_OFFSET(BuiltinName) \
STATIC_ASSERT(IsolateData::builtin_tier0_entry_table_offset() + \
Builtins::ToInt(BuiltinName) * kSystemPointerSize <= \
0x1000)
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Eager);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Bailout);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true; const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 2 * kInstrSize; const int Deoptimizer::kNonLazyDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize; const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
......
...@@ -5,14 +5,27 @@ ...@@ -5,14 +5,27 @@
#if V8_TARGET_ARCH_X64 #if V8_TARGET_ARCH_X64
#include "src/deoptimizer/deoptimizer.h" #include "src/deoptimizer/deoptimizer.h"
#include "src/execution/isolate-data.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// The deopt exit sizes below depend on the following IsolateData layout
// guarantees:
#define ASSERT_OFFSET(BuiltinName) \
STATIC_ASSERT(IsolateData::builtin_tier0_entry_table_offset() + \
Builtins::ToInt(BuiltinName) * kSystemPointerSize <= \
0x7F)
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Eager);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Lazy);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Soft);
ASSERT_OFFSET(Builtin::kDeoptimizationEntry_Bailout);
#undef ASSERT_OFFSET
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true; const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 7; const int Deoptimizer::kNonLazyDeoptExitSize = 4;
const int Deoptimizer::kLazyDeoptExitSize = 7; const int Deoptimizer::kLazyDeoptExitSize = 4;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 12; const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 9;
const int Deoptimizer::kEagerWithResumeDeoptExitSize = const int Deoptimizer::kEagerWithResumeDeoptExitSize =
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize; kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = 5; const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = 5;
......
...@@ -128,6 +128,9 @@ const char* V8NameConverter::RootRelativeName(int offset) const { ...@@ -128,6 +128,9 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
const unsigned kRootsTableSize = sizeof(RootsTable); const unsigned kRootsTableSize = sizeof(RootsTable);
const int kExtRefsTableStart = IsolateData::external_reference_table_offset(); const int kExtRefsTableStart = IsolateData::external_reference_table_offset();
const unsigned kExtRefsTableSize = ExternalReferenceTable::kSizeInBytes; const unsigned kExtRefsTableSize = ExternalReferenceTable::kSizeInBytes;
const int kBuiltinTier0TableStart = IsolateData::builtin_tier0_table_offset();
const unsigned kBuiltinTier0TableSize =
Builtins::kBuiltinTier0Count * kSystemPointerSize;
const int kBuiltinTableStart = IsolateData::builtin_table_offset(); const int kBuiltinTableStart = IsolateData::builtin_table_offset();
const unsigned kBuiltinTableSize = const unsigned kBuiltinTableSize =
Builtins::kBuiltinCount * kSystemPointerSize; Builtins::kBuiltinCount * kSystemPointerSize;
...@@ -143,7 +146,6 @@ const char* V8NameConverter::RootRelativeName(int offset) const { ...@@ -143,7 +146,6 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
SNPrintF(v8_buffer_, "root (%s)", RootsTable::name(root_index)); SNPrintF(v8_buffer_, "root (%s)", RootsTable::name(root_index));
return v8_buffer_.begin(); return v8_buffer_.begin();
} else if (static_cast<unsigned>(offset - kExtRefsTableStart) < } else if (static_cast<unsigned>(offset - kExtRefsTableStart) <
kExtRefsTableSize) { kExtRefsTableSize) {
uint32_t offset_in_extref_table = offset - kExtRefsTableStart; uint32_t offset_in_extref_table = offset - kExtRefsTableStart;
...@@ -162,7 +164,15 @@ const char* V8NameConverter::RootRelativeName(int offset) const { ...@@ -162,7 +164,15 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
isolate_->external_reference_table()->NameFromOffset( isolate_->external_reference_table()->NameFromOffset(
offset_in_extref_table)); offset_in_extref_table));
return v8_buffer_.begin(); return v8_buffer_.begin();
} else if (static_cast<unsigned>(offset - kBuiltinTier0TableStart) <
kBuiltinTier0TableSize) {
uint32_t offset_in_builtins_table = (offset - kBuiltinTier0TableStart);
Builtin builtin =
Builtins::FromInt(offset_in_builtins_table / kSystemPointerSize);
const char* name = Builtins::name(builtin);
SNPrintF(v8_buffer_, "builtin (%s)", name);
return v8_buffer_.begin();
} else if (static_cast<unsigned>(offset - kBuiltinTableStart) < } else if (static_cast<unsigned>(offset - kBuiltinTableStart) <
kBuiltinTableSize) { kBuiltinTableSize) {
uint32_t offset_in_builtins_table = (offset - kBuiltinTableStart); uint32_t offset_in_builtins_table = (offset - kBuiltinTableStart);
...@@ -172,7 +182,6 @@ const char* V8NameConverter::RootRelativeName(int offset) const { ...@@ -172,7 +182,6 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
const char* name = Builtins::name(builtin); const char* name = Builtins::name(builtin);
SNPrintF(v8_buffer_, "builtin (%s)", name); SNPrintF(v8_buffer_, "builtin (%s)", name);
return v8_buffer_.begin(); return v8_buffer_.begin();
} else { } else {
// It must be a direct access to one of the external values. // It must be a direct access to one of the external values.
if (directly_accessed_external_refs_.empty()) { if (directly_accessed_external_refs_.empty()) {
......
...@@ -22,14 +22,23 @@ class Isolate; ...@@ -22,14 +22,23 @@ class Isolate;
// IsolateData fields, defined as: V(Offset, Size, Name) // IsolateData fields, defined as: V(Offset, Size, Name)
#define ISOLATE_DATA_FIELDS(V) \ #define ISOLATE_DATA_FIELDS(V) \
/* Misc. fields. */ \
V(kCageBaseOffset, kSystemPointerSize, cage_base) \
V(kStackGuardOffset, StackGuard::kSizeInBytes, stack_guard) \
/* Tier 0 tables (small but fast access). */ \
V(kBuiltinTier0EntryTableOffset, \
Builtins::kBuiltinTier0Count* kSystemPointerSize, \
builtin_tier0_entry_table) \
V(kBuiltinsTier0TableOffset, \
Builtins::kBuiltinTier0Count* kSystemPointerSize, builtin_tier0_table) \
/* Misc. fields. */ \
V(kEmbedderDataOffset, Internals::kNumIsolateDataSlots* kSystemPointerSize, \ V(kEmbedderDataOffset, Internals::kNumIsolateDataSlots* kSystemPointerSize, \
embedder_data) \ embedder_data) \
V(kFastCCallCallerFPOffset, kSystemPointerSize, fast_c_call_caller_fp) \ V(kFastCCallCallerFPOffset, kSystemPointerSize, fast_c_call_caller_fp) \
V(kFastCCallCallerPCOffset, kSystemPointerSize, fast_c_call_caller_pc) \ V(kFastCCallCallerPCOffset, kSystemPointerSize, fast_c_call_caller_pc) \
V(kFastApiCallTargetOffset, kSystemPointerSize, fast_api_call_target) \ V(kFastApiCallTargetOffset, kSystemPointerSize, fast_api_call_target) \
V(kCageBaseOffset, kSystemPointerSize, cage_base) \
V(kLongTaskStatsCounterOffset, kSizetSize, long_task_stats_counter) \ V(kLongTaskStatsCounterOffset, kSizetSize, long_task_stats_counter) \
V(kStackGuardOffset, StackGuard::kSizeInBytes, stack_guard) \ /* Full tables (arbitrary size, potentially slower access). */ \
V(kRootsTableOffset, RootsTable::kEntriesCount* kSystemPointerSize, \ V(kRootsTableOffset, RootsTable::kEntriesCount* kSystemPointerSize, \
roots_table) \ roots_table) \
V(kExternalReferenceTableOffset, ExternalReferenceTable::kSizeInBytes, \ V(kExternalReferenceTableOffset, ExternalReferenceTable::kSizeInBytes, \
...@@ -79,19 +88,20 @@ class IsolateData final { ...@@ -79,19 +88,20 @@ class IsolateData final {
return roots_table_offset() + RootsTable::offset_of(root_index); return roots_table_offset() + RootsTable::offset_of(root_index);
} }
static int builtin_entry_slot_offset(Builtin id) { static constexpr int BuiltinEntrySlotOffset(Builtin id) {
return builtin_entry_table_offset() + DCHECK(Builtins::IsBuiltinId(id));
return (Builtins::IsTier0(id) ? builtin_tier0_entry_table_offset()
: builtin_entry_table_offset()) +
Builtins::ToInt(id) * kSystemPointerSize; Builtins::ToInt(id) * kSystemPointerSize;
} }
// TODO(ishell): remove in favour of typified id version. // TODO(ishell): remove in favour of typified id version.
static int builtin_slot_offset(int builtin_index) { static constexpr int builtin_slot_offset(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index)); return BuiltinSlotOffset(Builtins::FromInt(builtin_index));
return builtin_table_offset() + builtin_index * kSystemPointerSize;
} }
static constexpr int BuiltinSlotOffset(Builtin id) {
static int builtin_slot_offset(Builtin id) { return (Builtins::IsTier0(id) ? builtin_tier0_table_offset()
return builtin_table_offset() + Builtins::ToInt(id) * kSystemPointerSize; : builtin_table_offset()) +
Builtins::ToInt(id) * kSystemPointerSize;
} }
#define V(Offset, Size, Name) \ #define V(Offset, Size, Name) \
...@@ -105,6 +115,8 @@ class IsolateData final { ...@@ -105,6 +115,8 @@ class IsolateData final {
// The value of kPointerCageBaseRegister. // The value of kPointerCageBaseRegister.
Address cage_base() const { return cage_base_; } Address cage_base() const { return cage_base_; }
StackGuard* stack_guard() { return &stack_guard_; } StackGuard* stack_guard() { return &stack_guard_; }
Address* builtin_tier0_entry_table() { return builtin_tier0_entry_table_; }
Address* builtin_tier0_table() { return builtin_tier0_table_; }
RootsTable& roots() { return roots_table_; } RootsTable& roots() { return roots_table_; }
const RootsTable& roots() const { return roots_table_; } const RootsTable& roots() const { return roots_table_; }
ExternalReferenceTable* external_reference_table() { ExternalReferenceTable* external_reference_table() {
...@@ -144,6 +156,16 @@ class IsolateData final { ...@@ -144,6 +156,16 @@ class IsolateData final {
DEFINE_FIELD_OFFSET_CONSTANTS(0, FIELDS) DEFINE_FIELD_OFFSET_CONSTANTS(0, FIELDS)
#undef FIELDS #undef FIELDS
const Address cage_base_;
// Fields related to the system and JS stack. In particular, this contains
// the stack limit used by stack checks in generated code.
StackGuard stack_guard_;
// Tier 0 tables. See also builtin_entry_table_ and builtin_table_.
Address builtin_tier0_entry_table_[Builtins::kBuiltinTier0Count] = {};
Address builtin_tier0_table_[Builtins::kBuiltinTier0Count] = {};
// These fields are accessed through the API, offsets must be kept in sync // These fields are accessed through the API, offsets must be kept in sync
// with v8::internal::Internals (in include/v8-internal.h) constants. The // with v8::internal::Internals (in include/v8-internal.h) constants. The
// layout consistency is verified in Isolate::CheckIsolateLayout() using // layout consistency is verified in Isolate::CheckIsolateLayout() using
...@@ -162,22 +184,16 @@ class IsolateData final { ...@@ -162,22 +184,16 @@ class IsolateData final {
// generated code. // generated code.
Address fast_api_call_target_ = kNullAddress; Address fast_api_call_target_ = kNullAddress;
const Address cage_base_;
// Used for implementation of LongTaskStats. Counts the number of potential // Used for implementation of LongTaskStats. Counts the number of potential
// long tasks. // long tasks.
size_t long_task_stats_counter_ = 0; size_t long_task_stats_counter_ = 0;
// Fields related to the system and JS stack. In particular, this contains
// the stack limit used by stack checks in generated code.
StackGuard stack_guard_;
RootsTable roots_table_; RootsTable roots_table_;
ExternalReferenceTable external_reference_table_; ExternalReferenceTable external_reference_table_;
ThreadLocalTop thread_local_top_; ThreadLocalTop thread_local_top_;
// The entry points for all builtins. This corresponds to // The entry points for builtins. This corresponds to
// Code::InstructionStart() for each Code object in the builtins table below. // Code::InstructionStart() for each Code object in the builtins table below.
// The entry table is in IsolateData for easy access through kRootRegister. // The entry table is in IsolateData for easy access through kRootRegister.
Address builtin_entry_table_[Builtins::kBuiltinCount] = {}; Address builtin_entry_table_[Builtins::kBuiltinCount] = {};
......
...@@ -3054,6 +3054,12 @@ void Isolate::CheckIsolateLayout() { ...@@ -3054,6 +3054,12 @@ void Isolate::CheckIsolateLayout() {
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_table_)), CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_table_)),
Internals::kIsolateRootsOffset); Internals::kIsolateRootsOffset);
STATIC_ASSERT(Internals::kStackGuardSize == sizeof(StackGuard));
STATIC_ASSERT(Internals::kBuiltinTier0TableSize ==
Builtins::kBuiltinTier0Count * kSystemPointerSize);
STATIC_ASSERT(Internals::kBuiltinTier0EntryTableSize ==
Builtins::kBuiltinTier0Count * kSystemPointerSize);
#ifdef V8_HEAP_SANDBOX #ifdef V8_HEAP_SANDBOX
CHECK_EQ(static_cast<int>(OFFSET_OF(ExternalPointerTable, buffer_)), CHECK_EQ(static_cast<int>(OFFSET_OF(ExternalPointerTable, buffer_)),
Internals::kExternalPointerTableBufferOffset); Internals::kExternalPointerTableBufferOffset);
...@@ -3754,7 +3760,7 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data, ...@@ -3754,7 +3760,7 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
delete setup_delegate_; delete setup_delegate_;
setup_delegate_ = nullptr; setup_delegate_ = nullptr;
Builtins::InitializeBuiltinEntryTable(this); Builtins::InitializeIsolateDataTables(this);
Builtins::EmitCodeCreateEvents(this); Builtins::EmitCodeCreateEvents(this);
#ifdef DEBUG #ifdef DEBUG
......
...@@ -4659,9 +4659,18 @@ Code Heap::builtin(Builtin builtin) { ...@@ -4659,9 +4659,18 @@ Code Heap::builtin(Builtin builtin) {
Address Heap::builtin_address(Builtin builtin) { Address Heap::builtin_address(Builtin builtin) {
const int index = Builtins::ToInt(builtin); const int index = Builtins::ToInt(builtin);
DCHECK(Builtins::IsBuiltinId(builtin) || index == Builtins::kBuiltinCount); DCHECK(Builtins::IsBuiltinId(builtin) || index == Builtins::kBuiltinCount);
// Note: Must return an address within the full builtin_table for
// IterateBuiltins to work.
return reinterpret_cast<Address>(&isolate()->builtin_table()[index]); return reinterpret_cast<Address>(&isolate()->builtin_table()[index]);
} }
Address Heap::builtin_tier0_address(Builtin builtin) {
const int index = static_cast<int>(builtin);
DCHECK(Builtins::IsBuiltinId(builtin) || index == Builtins::kBuiltinCount);
return reinterpret_cast<Address>(
&isolate()->isolate_data()->builtin_tier0_table()[index]);
}
void Heap::set_builtin(Builtin builtin, Code code) { void Heap::set_builtin(Builtin builtin, Code code) {
DCHECK(Builtins::IsBuiltinId(builtin)); DCHECK(Builtins::IsBuiltinId(builtin));
DCHECK(Internals::HasHeapObjectTag(code.ptr())); DCHECK(Internals::HasHeapObjectTag(code.ptr()));
...@@ -4897,6 +4906,12 @@ void Heap::IterateBuiltins(RootVisitor* v) { ...@@ -4897,6 +4906,12 @@ void Heap::IterateBuiltins(RootVisitor* v) {
FullObjectSlot(builtin_address(builtin))); FullObjectSlot(builtin_address(builtin)));
} }
for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLastTier0;
++builtin) {
v->VisitRootPointer(Root::kBuiltins, Builtins::name(builtin),
FullObjectSlot(builtin_tier0_address(builtin)));
}
// The entry table doesn't need to be updated since all builtins are embedded. // The entry table doesn't need to be updated since all builtins are embedded.
STATIC_ASSERT(Builtins::AllBuiltinsAreIsolateIndependent()); STATIC_ASSERT(Builtins::AllBuiltinsAreIsolateIndependent());
} }
......
...@@ -1053,6 +1053,7 @@ class Heap { ...@@ -1053,6 +1053,7 @@ class Heap {
V8_EXPORT_PRIVATE Code builtin(Builtin builtin); V8_EXPORT_PRIVATE Code builtin(Builtin builtin);
Address builtin_address(Builtin builtin); Address builtin_address(Builtin builtin);
Address builtin_tier0_address(Builtin builtin);
void set_builtin(Builtin builtin, Code code); void set_builtin(Builtin builtin, Code code);
// =========================================================================== // ===========================================================================
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment