Commit 196eaa14 authored by ishell@chromium.org's avatar ishell@chromium.org Committed by V8 LUCI CQ

[ext-code-space] Support Code-less builtins

The Code-less builtins mode is still disabled.

Bug: v8:11880
Change-Id: I725ae4187bb394f79c69d5d0d971961e3ea406e8
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3827879
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarJakob Linke <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82520}
parent 63a28360
......@@ -1091,19 +1091,27 @@ void CodeDataContainer::CodeDataContainerVerify(Isolate* isolate) {
CHECK(next_code_link().IsCodeT() || next_code_link().IsUndefined(isolate));
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
if (raw_code() != Smi::zero()) {
Code code = this->code();
#ifdef V8_EXTERNAL_CODE_SPACE
// kind and builtin_id() getters are not available on CodeDataContainer
// kind() and builtin_id() getters are not available on CodeDataContainer
// when external code space is not enabled.
CHECK_EQ(code().kind(), kind());
CHECK_EQ(code().builtin_id(), builtin_id());
CHECK_EQ(code.kind(), kind());
CHECK_EQ(code.builtin_id(), builtin_id());
if (V8_REMOVE_BUILTINS_CODE_OBJECTS) {
// When FLAG_interpreted_frames_native_stack is enabled each interpreted
// function gets its own copy of the InterpreterEntryTrampoline.
// Thus, there could be Code'ful builtins.
CHECK_IMPLIES(isolate->embedded_blob_code() && is_off_heap_trampoline(),
builtin_id() == Builtin::kInterpreterEntryTrampoline);
}
#endif // V8_EXTERNAL_CODE_SPACE
CHECK_EQ(code().code_data_container(kAcquireLoad), *this);
CHECK_EQ(code.code_data_container(kAcquireLoad), *this);
// Ensure the cached code entry point corresponds to the Code object
// associated with this CodeDataContainer.
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
if (V8_SHORT_BUILTIN_CALLS_BOOL) {
if (code().InstructionStart() == code_entry_point()) {
if (code.InstructionStart() == code_entry_point()) {
// Most common case, all good.
} else {
// When shared pointer compression cage is enabled and it has the
......@@ -1118,13 +1126,13 @@ void CodeDataContainer::CodeDataContainerVerify(Isolate* isolate) {
isolate->heap()->GcSafeFindCodeForInnerPointer(
code_entry_point());
CHECK(lookup_result.IsFound());
CHECK_EQ(lookup_result.ToCode(), code());
CHECK_EQ(lookup_result.ToCode(), code);
}
} else {
CHECK_EQ(code().InstructionStart(), code_entry_point());
CHECK_EQ(code.InstructionStart(), code_entry_point());
}
#else
CHECK_EQ(code().InstructionStart(), code_entry_point());
CHECK_EQ(code.InstructionStart(), code_entry_point());
#endif // V8_COMPRESS_POINTERS_IN_SHARED_CAGE
}
}
......
......@@ -425,31 +425,70 @@ size_t Isolate::HashIsolateForEmbeddedBlob() {
// Hash data sections of builtin code objects.
for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
++builtin) {
Code code = FromCodeT(builtins()->code(builtin));
CodeT codet = builtins()->code(builtin);
DCHECK(Internals::HasHeapObjectTag(code.ptr()));
uint8_t* const code_ptr =
reinterpret_cast<uint8_t*>(code.ptr() - kHeapObjectTag);
if (V8_REMOVE_BUILTINS_CODE_OBJECTS) {
#ifdef V8_EXTERNAL_CODE_SPACE
DCHECK(Internals::HasHeapObjectTag(codet.ptr()));
uint8_t* const code_ptr = reinterpret_cast<uint8_t*>(codet.address());
// These static asserts ensure we don't miss relevant fields. We don't
// hash code cage base and code entry point. Other data fields must
// remain the same.
static_assert(CodeDataContainer::kCodePointerFieldsStrongEndOffset ==
CodeDataContainer::kCodeCageBaseUpper32BitsOffset);
static_assert(CodeDataContainer::kCodeCageBaseUpper32BitsOffsetEnd + 1 ==
CodeDataContainer::kCodeEntryPointOffset);
static_assert(CodeDataContainer::kCodeEntryPointOffsetEnd + 1 ==
CodeDataContainer::kFlagsOffset);
static_assert(CodeDataContainer::kFlagsOffsetEnd + 1 ==
CodeDataContainer::kBuiltinIdOffset);
static_assert(CodeDataContainer::kBuiltinIdOffsetEnd + 1 ==
CodeDataContainer::kKindSpecificFlagsOffset);
static_assert(CodeDataContainer::kKindSpecificFlagsOffsetEnd + 1 ==
CodeDataContainer::kUnalignedSize);
constexpr int kStartOffset = CodeDataContainer::kFlagsOffset;
// |is_off_heap_trampoline| is false during builtins compilation (since
// the builtins are not trampolines yet) but it's true for off-heap
// builtin trampolines. The rest of the data fields should be the same.
// So we temporarily set |is_off_heap_trampoline| to true during hash
// computation.
bool is_off_heap_trampoline_sav = codet.is_off_heap_trampoline();
codet.set_is_off_heap_trampoline_for_hash(true);
for (int j = kStartOffset; j < CodeDataContainer::kUnalignedSize; j++) {
hash = base::hash_combine(hash, size_t{code_ptr[j]});
}
codet.set_is_off_heap_trampoline_for_hash(is_off_heap_trampoline_sav);
#endif // V8_EXTERNAL_CODE_SPACE
} else {
Code code = FromCodeT(codet);
DCHECK(Internals::HasHeapObjectTag(code.ptr()));
uint8_t* const code_ptr = reinterpret_cast<uint8_t*>(code.address());
// These static asserts ensure we don't miss relevant fields. We don't hash
// pointer compression base, instruction/metadata size value and flags since
// they change when creating the off-heap trampolines. Other data fields
// must remain the same.
// These static asserts ensure we don't miss relevant fields. We don't
// hash pointer compression base, instruction/metadata size value and
// flags since they change when creating the off-heap trampolines. Other
// data fields must remain the same.
#ifdef V8_EXTERNAL_CODE_SPACE
static_assert(Code::kMainCageBaseUpper32BitsOffset == Code::kDataStart);
static_assert(Code::kInstructionSizeOffset ==
Code::kMainCageBaseUpper32BitsOffsetEnd + 1);
static_assert(Code::kMainCageBaseUpper32BitsOffset == Code::kDataStart);
static_assert(Code::kInstructionSizeOffset ==
Code::kMainCageBaseUpper32BitsOffsetEnd + 1);
#else
static_assert(Code::kInstructionSizeOffset == Code::kDataStart);
static_assert(Code::kInstructionSizeOffset == Code::kDataStart);
#endif // V8_EXTERNAL_CODE_SPACE
static_assert(Code::kMetadataSizeOffset ==
Code::kInstructionSizeOffsetEnd + 1);
static_assert(Code::kFlagsOffset == Code::kMetadataSizeOffsetEnd + 1);
static_assert(Code::kBuiltinIndexOffset == Code::kFlagsOffsetEnd + 1);
static constexpr int kStartOffset = Code::kBuiltinIndexOffset;
for (int j = kStartOffset; j < Code::kUnalignedHeaderSize; j++) {
hash = base::hash_combine(hash, size_t{code_ptr[j]});
static_assert(Code::kMetadataSizeOffset ==
Code::kInstructionSizeOffsetEnd + 1);
static_assert(Code::kFlagsOffset == Code::kMetadataSizeOffsetEnd + 1);
static_assert(Code::kBuiltinIndexOffset == Code::kFlagsOffsetEnd + 1);
static constexpr int kStartOffset = Code::kBuiltinIndexOffset;
for (int j = kStartOffset; j < Code::kUnalignedHeaderSize; j++) {
hash = base::hash_combine(hash, size_t{code_ptr[j]});
}
}
}
......@@ -3758,13 +3797,12 @@ void CreateOffHeapTrampolines(Isolate* isolate) {
for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
++builtin) {
Address instruction_start = d.InstructionStartOfBuiltin(builtin);
// TODO(v8:11880): avoid roundtrips between cdc and code.
Handle<Code> trampoline = isolate->factory()->NewOffHeapTrampolineFor(
FromCodeT(builtins->code_handle(builtin), isolate), instruction_start);
Handle<CodeT> trampoline = isolate->factory()->NewOffHeapTrampolineFor(
builtins->code_handle(builtin), instruction_start);
// From this point onwards, the old builtin code object is unreachable and
// will be collected by the next GC.
builtins->set_code(builtin, ToCodeT(*trampoline));
builtins->set_code(builtin, *trampoline);
}
}
......
......@@ -2460,17 +2460,35 @@ Handle<DeoptimizationLiteralArray> Factory::NewDeoptimizationLiteralArray(
NewWeakFixedArray(length, AllocationType::kOld));
}
Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
Address off_heap_entry) {
Handle<CodeT> Factory::NewOffHeapTrampolineFor(Handle<CodeT> code,
Address off_heap_entry) {
CHECK_NOT_NULL(isolate()->embedded_blob_code());
CHECK_NE(0, isolate()->embedded_blob_code_size());
CHECK(Builtins::IsIsolateIndependentBuiltin(*code));
#ifdef V8_EXTERNAL_CODE_SPACE
if (V8_REMOVE_BUILTINS_CODE_OBJECTS) {
const int no_flags = 0;
Handle<CodeDataContainer> code_data_container =
NewCodeDataContainer(no_flags, AllocationType::kOld);
const bool set_is_off_heap_trampoline = true;
code_data_container->initialize_flags(code->kind(), code->builtin_id(),
code->is_turbofanned(),
set_is_off_heap_trampoline);
code_data_container->set_kind_specific_flags(
code->kind_specific_flags(kRelaxedLoad), kRelaxedStore);
code_data_container->set_code_entry_point(isolate(),
code->code_entry_point());
return Handle<CodeT>::cast(code_data_container);
}
#endif // V8_EXTERNAL_CODE_SPACE
bool generate_jump_to_instruction_stream =
Builtins::CodeObjectIsExecutable(code->builtin_id());
Handle<Code> result = Builtins::GenerateOffHeapTrampolineFor(
isolate(), off_heap_entry,
code->code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad),
CodeDataContainerFromCodeT(*code).kind_specific_flags(kRelaxedLoad),
generate_jump_to_instruction_stream);
// Trampolines may not contain any metadata since all metadata offsets,
......@@ -2485,7 +2503,7 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
{
DisallowGarbageCollection no_gc;
CodePageMemoryModificationScope code_allocation(*result);
Code raw_code = *code;
Code raw_code = FromCodeT(*code);
Code raw_result = *result;
const bool set_is_off_heap_trampoline = true;
......@@ -2527,7 +2545,7 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
}
}
return result;
return ToCodeT(result, isolate());
}
Handle<BytecodeArray> Factory::CopyBytecodeArray(Handle<BytecodeArray> source) {
......
......@@ -740,8 +740,8 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> {
// Allocates a new code object and initializes it as the trampoline to the
// given off-heap entry point.
Handle<Code> NewOffHeapTrampolineFor(Handle<Code> code,
Address off_heap_entry);
Handle<CodeT> NewOffHeapTrampolineFor(Handle<CodeT> code,
Address off_heap_entry);
Handle<BytecodeArray> CopyBytecodeArray(Handle<BytecodeArray>);
......
......@@ -1554,6 +1554,15 @@ void CodeDataContainer::SetCodeAndEntryPoint(Isolate* isolate_for_sandbox,
set_code_entry_point(isolate_for_sandbox, code.InstructionStart());
}
void CodeDataContainer::SetEntryPointForOffHeapBuiltin(
Isolate* isolate_for_sandbox, Address entry) {
CHECK(V8_REMOVE_BUILTINS_CODE_OBJECTS);
#ifdef V8_EXTERNAL_CODE_SPACE
DCHECK(is_off_heap_trampoline());
#endif
set_code_entry_point(isolate_for_sandbox, entry);
}
void CodeDataContainer::UpdateCodeEntryPoint(Isolate* isolate_for_sandbox,
Code code) {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
......@@ -1621,6 +1630,12 @@ bool CodeDataContainer::is_off_heap_trampoline() const {
return IsOffHeapTrampoline::decode(flags(kRelaxedLoad));
}
void CodeDataContainer::set_is_off_heap_trampoline_for_hash(bool value) {
uint16_t flags_value = flags(kRelaxedLoad);
flags_value = IsOffHeapTrampoline::update(flags_value, value);
set_flags(flags_value, kRelaxedStore);
}
bool CodeDataContainer::is_optimized_code() const {
return CodeKindIsOptimizedJSFunction(kind());
}
......
......@@ -91,6 +91,8 @@ class CodeDataContainer : public HeapObject {
inline void SetCodeAndEntryPoint(
Isolate* isolate_for_sandbox, Code code,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline void SetEntryPointForOffHeapBuiltin(Isolate* isolate_for_sandbox,
Address entry);
// Updates the value of the code entry point. The code must be equal to
// the code() value.
inline void UpdateCodeEntryPoint(Isolate* isolate_for_sandbox, Code code);
......@@ -290,12 +292,14 @@ class CodeDataContainer : public HeapObject {
// When V8_EXTERNAL_CODE_SPACE is enabled the flags field contains cached
// values of some flags of the from the respective Code object.
DECL_RELAXED_UINT16_ACCESSORS(flags)
inline void set_is_off_heap_trampoline_for_hash(bool value);
template <typename IsolateT>
friend class Deserializer;
friend Factory;
friend FactoryBase<Factory>;
friend FactoryBase<LocalFactory>;
friend Isolate;
OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
};
......
......@@ -488,8 +488,18 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map,
code_data_container.set_code_cage_base(isolate()->code_cage_base());
code_data_container.init_code_entry_point(main_thread_isolate(),
kNullAddress);
code_data_container.UpdateCodeEntryPoint(main_thread_isolate(),
code_data_container.code());
#ifdef V8_EXTERNAL_CODE_SPACE
if (V8_REMOVE_BUILTINS_CODE_OBJECTS &&
code_data_container.is_off_heap_trampoline()) {
Address entry = OffHeapInstructionStart(code_data_container,
code_data_container.builtin_id());
code_data_container.SetEntryPointForOffHeapBuiltin(main_thread_isolate(),
entry);
} else {
code_data_container.UpdateCodeEntryPoint(main_thread_isolate(),
code_data_container.code());
}
#endif
} else if (InstanceTypeChecker::IsMap(instance_type)) {
if (FLAG_log_maps) {
// Keep track of all seen Maps to log them later since they might be only
......
......@@ -940,6 +940,13 @@ void Serializer::ObjectSerializer::VisitCodePointer(HeapObject host,
PtrComprCageBase code_cage_base(isolate());
#endif
Object contents = slot.load(code_cage_base);
if (contents.IsSmi()) {
// The contents of the CodeObjectSlot being a Smi means that the host
// CodeDataContainer corresponds to Code-less embedded builtin trampoline,
// the value will be serialized as a Smi.
DCHECK_EQ(contents, Smi::zero());
return;
}
DCHECK(HAS_STRONG_HEAP_OBJECT_TAG(contents.ptr()));
DCHECK(contents.IsCode());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment