Commit b40b0bd2 authored by Sigurd Schneider's avatar Sigurd Schneider Committed by Commit Bot

[builtins] Add kind specific flag for off-heap trampolines

This CL adds the BUILTIN kind specific flag is_off_heap_trampoline
to Code objects. Code objects hence know whether they are off-heap
trampolines, and can decide whether they need to return the off-heap
instruction start or not without looking at the builtin index and/or
considering the existence of an embedded blob.

Bug: v8:6666
Change-Id: I0fbc540a2624ed5fa8256b807299e2fe4b907dda
Reviewed-on: https://chromium-review.googlesource.com/1109691
Commit-Queue: Sigurd Schneider <sigurds@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53956}
parent 27b41ea0
......@@ -198,11 +198,10 @@ bool Builtins::IsBuiltinHandle(Handle<HeapObject> maybe_code,
}
// static
bool Builtins::IsEmbeddedBuiltin(const Code* code) {
bool Builtins::IsIsolateIndependentBuiltin(const Code* code) {
#ifdef V8_EMBEDDED_BUILTINS
const int builtin_index = code->builtin_index();
return Isolate::CurrentEmbeddedBlob() != nullptr &&
Builtins::IsBuiltinId(builtin_index) &&
return Builtins::IsBuiltinId(builtin_index) &&
Builtins::IsIsolateIndependent(builtin_index);
#else
return false;
......
......@@ -100,7 +100,7 @@ class Builtins {
bool IsBuiltinHandle(Handle<HeapObject> maybe_code, int* index) const;
// True, iff the given code object is a builtin with off-heap embedded code.
static bool IsEmbeddedBuiltin(const Code* code);
static bool IsIsolateIndependentBuiltin(const Code* code);
// Returns true iff the given builtin can be lazy-loaded from the snapshot.
// This is true in general for most builtins with the exception of a few
......
......@@ -68,7 +68,9 @@ void InitializeCode(Heap* heap, Handle<Code> code, int object_size,
code->set_raw_instruction_size(desc.instr_size);
code->set_relocation_info(*reloc_info);
code->initialize_flags(kind, has_unwinding_info, is_turbofanned, stack_slots);
const bool is_off_heap_trampoline = false;
code->initialize_flags(kind, has_unwinding_info, is_turbofanned, stack_slots,
is_off_heap_trampoline);
code->set_safepoint_table_offset(safepoint_table_offset);
code->set_handler_table_offset(handler_table_offset);
code->set_code_data_container(*data_container);
......@@ -2586,7 +2588,7 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
CHECK(isolate()->serializer_enabled());
CHECK_NOT_NULL(isolate()->embedded_blob());
CHECK_NE(0, isolate()->embedded_blob_size());
CHECK(Builtins::IsEmbeddedBuiltin(*code));
CHECK(Builtins::IsIsolateIndependentBuiltin(*code));
Handle<Code> result =
Builtins::GenerateOffHeapTrampolineFor(isolate(), off_heap_entry);
......@@ -2594,9 +2596,11 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
// The trampoline code object must inherit specific flags from the original
// builtin (e.g. the safepoint-table offset). We set them manually here.
const bool set_is_off_heap_trampoline = true;
const int stack_slots = code->has_safepoint_info() ? code->stack_slots() : 0;
result->initialize_flags(code->kind(), code->has_unwinding_info(),
code->is_turbofanned(), stack_slots);
code->is_turbofanned(), stack_slots,
set_is_off_heap_trampoline);
result->set_builtin_index(code->builtin_index());
result->set_handler_table_offset(code->handler_table_offset());
result->code_data_container()->set_kind_specific_flags(
......
......@@ -14119,21 +14119,21 @@ SafepointEntry Code::GetSafepointEntry(Address pc) {
#ifdef V8_EMBEDDED_BUILTINS
int Code::OffHeapInstructionSize() const {
DCHECK(Builtins::IsEmbeddedBuiltin(this));
DCHECK(is_off_heap_trampoline());
if (Isolate::CurrentEmbeddedBlob() == nullptr) return raw_instruction_size();
EmbeddedData d = EmbeddedData::FromBlob();
return d.InstructionSizeOfBuiltin(builtin_index());
}
Address Code::OffHeapInstructionStart() const {
DCHECK(Builtins::IsEmbeddedBuiltin(this));
DCHECK(is_off_heap_trampoline());
if (Isolate::CurrentEmbeddedBlob() == nullptr) return raw_instruction_start();
EmbeddedData d = EmbeddedData::FromBlob();
return d.InstructionStartOfBuiltin(builtin_index());
}
Address Code::OffHeapInstructionEnd() const {
DCHECK(Builtins::IsEmbeddedBuiltin(this));
DCHECK(is_off_heap_trampoline());
if (Isolate::CurrentEmbeddedBlob() == nullptr) return raw_instruction_end();
EmbeddedData d = EmbeddedData::FromBlob();
return d.InstructionStartOfBuiltin(builtin_index()) +
......@@ -14277,7 +14277,7 @@ const char* AbstractCode::Kind2String(Kind kind) {
}
#ifdef V8_EMBEDDED_BUILTINS
bool Code::IsProcessIndependent(Isolate* isolate) {
bool Code::IsIsolateIndependent(Isolate* isolate) {
constexpr int all_real_modes_mask =
(1 << (RelocInfo::LAST_REAL_RELOC_MODE + 1)) - 1;
constexpr int mode_mask = all_real_modes_mask &
......@@ -14309,7 +14309,7 @@ bool Code::IsProcessIndependent(Isolate* isolate) {
Code* target = Code::GetCodeFromTargetAddress(target_address);
CHECK(target->IsCode());
if (Builtins::IsEmbeddedBuiltin(target)) continue;
if (Builtins::IsIsolateIndependentBuiltin(target)) continue;
}
is_process_independent = false;
}
......
......@@ -226,7 +226,9 @@ void Code::set_next_code_link(Object* value) {
int Code::InstructionSize() const {
#ifdef V8_EMBEDDED_BUILTINS
if (Builtins::IsEmbeddedBuiltin(this)) return OffHeapInstructionSize();
if (is_off_heap_trampoline()) {
return OffHeapInstructionSize();
}
#endif
return raw_instruction_size();
}
......@@ -237,7 +239,9 @@ Address Code::raw_instruction_start() const {
Address Code::InstructionStart() const {
#ifdef V8_EMBEDDED_BUILTINS
if (Builtins::IsEmbeddedBuiltin(this)) return OffHeapInstructionStart();
if (is_off_heap_trampoline()) {
return OffHeapInstructionStart();
}
#endif
return raw_instruction_start();
}
......@@ -248,7 +252,9 @@ Address Code::raw_instruction_end() const {
Address Code::InstructionEnd() const {
#ifdef V8_EMBEDDED_BUILTINS
if (Builtins::IsEmbeddedBuiltin(this)) return OffHeapInstructionEnd();
if (is_off_heap_trampoline()) {
return OffHeapInstructionEnd();
}
#endif
return raw_instruction_end();
}
......@@ -315,7 +321,7 @@ Address Code::entry() const { return raw_instruction_start(); }
bool Code::contains(Address inner_pointer) {
#ifdef V8_EMBEDDED_BUILTINS
if (Builtins::IsEmbeddedBuiltin(this)) {
if (is_off_heap_trampoline()) {
return (OffHeapInstructionStart() <= inner_pointer) &&
(inner_pointer < OffHeapInstructionEnd());
}
......@@ -337,13 +343,15 @@ Code::Kind Code::kind() const {
}
void Code::initialize_flags(Kind kind, bool has_unwinding_info,
bool is_turbofanned, int stack_slots) {
bool is_turbofanned, int stack_slots,
bool is_off_heap_trampoline) {
CHECK(0 <= stack_slots && stack_slots < StackSlotsField::kMax);
static_assert(Code::NUMBER_OF_KINDS <= KindField::kMax + 1, "field overflow");
uint32_t flags = HasUnwindingInfoField::encode(has_unwinding_info) |
KindField::encode(kind) |
IsTurbofannedField::encode(is_turbofanned) |
StackSlotsField::encode(stack_slots);
StackSlotsField::encode(stack_slots) |
IsOffHeapTrampoline::encode(is_off_heap_trampoline);
WRITE_UINT32_FIELD(this, kFlagsOffset, flags);
DCHECK_IMPLIES(stack_slots != 0, has_safepoint_info());
}
......@@ -437,6 +445,10 @@ inline void Code::set_is_exception_caught(bool value) {
code_data_container()->set_kind_specific_flags(updated);
}
inline bool Code::is_off_heap_trampoline() const {
return IsOffHeapTrampoline::decode(READ_UINT32_FIELD(this, kFlagsOffset));
}
inline HandlerTable::CatchPrediction Code::GetBuiltinCatchPrediction() {
if (is_promise_rejection()) return HandlerTable::PROMISE;
if (is_exception_caught()) return HandlerTable::CAUGHT;
......
......@@ -184,6 +184,10 @@ class Code : public HeapObject {
// Use GetBuiltinCatchPrediction to access this.
inline void set_is_exception_caught(bool flag);
// [is_off_heap_trampoline]: For kind BUILTIN tells whether
// this is a trampoline to an off-heap builtin.
inline bool is_off_heap_trampoline() const;
// [constant_pool]: The constant pool for this function.
inline Address constant_pool() const;
......@@ -205,7 +209,8 @@ class Code : public HeapObject {
// Initialize the flags field. Similar to clear_padding above this ensure that
// the snapshot content is deterministic.
inline void initialize_flags(Kind kind, bool has_unwinding_info,
bool is_turbofanned, int stack_slots);
bool is_turbofanned, int stack_slots,
bool is_off_heap_trampoline);
// Convert a target address into a code object.
static inline Code* GetCodeFromTargetAddress(Address address);
......@@ -347,7 +352,7 @@ class Code : public HeapObject {
#endif // DEBUG
#ifdef V8_EMBEDDED_BUILTINS
bool IsProcessIndependent(Isolate* isolate);
bool IsIsolateIndependent(Isolate* isolate);
#endif
inline bool CanContainWeakObjects();
......@@ -418,11 +423,13 @@ class Code : public HeapObject {
V(HasUnwindingInfoField, bool, 1, _) \
V(KindField, Kind, 5, _) \
V(IsTurbofannedField, bool, 1, _) \
V(StackSlotsField, int, 24, _)
V(StackSlotsField, int, 24, _) \
V(IsOffHeapTrampoline, bool, 1, _)
DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
#undef CODE_FLAGS_BIT_FIELDS
static_assert(NUMBER_OF_KINDS <= KindField::kMax, "Code::KindField size");
static_assert(StackSlotsField::kNext <= 32, "Code::flags field exhausted");
static_assert(IsOffHeapTrampoline::kNext <= 32,
"Code::flags field exhausted");
// KindSpecificFlags layout (STUB, BUILTIN and OPTIMIZED_FUNCTION)
#define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
......
......@@ -860,7 +860,7 @@ void Serializer<AllocatorT>::ObjectSerializer::VisitOffHeapTarget(
#ifdef V8_EMBEDDED_BUILTINS
{
STATIC_ASSERT(EmbeddedData::kTableSize == Builtins::builtin_count);
CHECK(Builtins::IsEmbeddedBuiltin(host));
CHECK(Builtins::IsIsolateIndependentBuiltin(host));
Address addr = rinfo->target_off_heap_target();
CHECK_NE(kNullAddress, addr);
CHECK_NOT_NULL(
......
......@@ -346,7 +346,7 @@ void FinalizeEmbeddedCodeTargets(Isolate* isolate, EmbeddedData* blob) {
RelocInfo* rinfo = on_heap_it.rinfo();
DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
CHECK(Builtins::IsEmbeddedBuiltin(target));
CHECK(Builtins::IsIsolateIndependentBuiltin(target));
off_heap_it.rinfo()->set_target_address(
blob->InstructionStartOfBuiltin(target->builtin_index()));
......@@ -384,7 +384,7 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
// Sanity-check that the given builtin is isolate-independent and does not
// use the trampoline register in its calling convention.
if (!code->IsProcessIndependent(isolate)) {
if (!code->IsIsolateIndependent(isolate)) {
saw_unsafe_builtin = true;
fprintf(stderr, "%s is not isolate-independent.\n", Builtins::name(i));
}
......
......@@ -1515,7 +1515,7 @@ void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
// TODO(X64): Inline this
#ifdef V8_EMBEDDED_BUILTINS
if (root_array_available_ && options().isolate_independent_code &&
!Builtins::IsEmbeddedBuiltin(*code_object)) {
!Builtins::IsIsolateIndependentBuiltin(*code_object)) {
// Calls to embedded targets are initially generated as standard
// pc-relative calls below. When creating the embedded blob, call offsets
// are patched up to point directly to the off-heap instruction start.
......@@ -1589,7 +1589,7 @@ void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) {
void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
#ifdef V8_EMBEDDED_BUILTINS
if (root_array_available_ && options().isolate_independent_code &&
!Builtins::IsEmbeddedBuiltin(*code_object)) {
!Builtins::IsIsolateIndependentBuiltin(*code_object)) {
// Calls to embedded targets are initially generated as standard
// pc-relative calls below. When creating the embedded blob, call offsets
// are patched up to point directly to the off-heap instruction start.
......
......@@ -184,7 +184,7 @@ CodeMap.prototype.findInTree_ = function(tree, addr) {
*
* @private
*/
CodeMap.prototype.isEmbeddedBuiltin_ = function(entry) {
CodeMap.prototype.isIsolateIndependentBuiltin_ = function(entry) {
return entry.type == "CPP" && /v8_\w*embedded_blob_/.test(entry.name);
};
......@@ -205,7 +205,7 @@ CodeMap.prototype.findAddress = function(addr) {
result = this.findInTree_(this.libraries_, addr);
if (!result) return null;
}
if (!this.isEmbeddedBuiltin_(result.value)) {
if (!this.isIsolateIndependentBuiltin_(result.value)) {
// Embedded builtins are handled in the following dynamic section.
return { entry : result.value, offset : addr - result.key };
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment