Commit 519bd47f authored by jgruber's avatar jgruber Committed by Commit Bot

[builtins] Update isolate-independent list & related fixups

With the exception of the InterpreterEntryTrampoline, all builtins are
now isolate-independent and can be embedded into the binary.

This CL updates the corresponding list and also contains a few smallish
tweaks to support having these builtins off the heap:

* wasm: copy the off-heap builtin, not its trampoline.
* Code::contains: support off-heap builtins.
* JSFunction::is_compiled: compare builtin index instead of identity
  (this is relevant during mksnapshot when we transition from the
  on-heap builtin to its off-heap representation + the trampoline).
* Remove old DCHECKs.
* A few tweaks in macro-assembler ports that have snuck in recently.

Bug: v8:6666
Change-Id: Iabf5b47ade3826a4da35b6b75a4e61614f0158b0
Reviewed-on: https://chromium-review.googlesource.com/1032777
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52935}
parent fd2d5314
......@@ -1224,7 +1224,7 @@ void TurboAssembler::EnterFrame(StackFrame::Type type,
mov(scratch, Operand(StackFrame::TypeToMarker(type)));
PushCommonFrame(scratch);
if (type == StackFrame::INTERNAL) {
mov(scratch, Operand(CodeObject()));
Move(scratch, CodeObject());
push(scratch);
}
}
......@@ -1626,7 +1626,7 @@ void MacroAssembler::MaybeDropFrames() {
// Check whether we need to drop frames to restart a function on the stack.
ExternalReference restart_fp =
ExternalReference::debug_restart_fp_address(isolate());
mov(r1, Operand(restart_fp));
Move(r1, restart_fp);
ldr(r1, MemOperand(r1));
tst(r1, r1);
Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET,
......
......@@ -2456,12 +2456,12 @@ void TurboAssembler::Prologue() {
void TurboAssembler::EnterFrame(StackFrame::Type type) {
UseScratchRegisterScope temps(this);
Register type_reg = temps.AcquireX();
Register code_reg = temps.AcquireX();
if (type == StackFrame::INTERNAL) {
Register code_reg = temps.AcquireX();
Move(code_reg, CodeObject());
Register type_reg = temps.AcquireX();
Mov(type_reg, StackFrame::TypeToMarker(type));
Mov(code_reg, Operand(CodeObject()));
Push(lr, fp, type_reg, code_reg);
Add(fp, sp, InternalFrameConstants::kFixedFrameSizeFromFp);
// sp[4] : lr
......@@ -2469,6 +2469,7 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
// sp[1] : type
// sp[0] : [code object]
} else if (type == StackFrame::WASM_COMPILED) {
Register type_reg = temps.AcquireX();
Mov(type_reg, StackFrame::TypeToMarker(type));
Push(lr, fp);
Mov(fp, sp);
......@@ -2479,6 +2480,7 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
// sp[0] : for alignment
} else {
DCHECK_EQ(type, StackFrame::CONSTRUCT);
Register type_reg = temps.AcquireX();
Mov(type_reg, StackFrame::TypeToMarker(type));
// Users of this frame type push a context pointer after the type field,
......
......@@ -483,7 +483,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Flood function if we are stepping.
ExternalReference debug_hook =
ExternalReference::debug_hook_on_function_call_address(masm->isolate());
__ mov(scratch, Operand(debug_hook));
__ Move(scratch, debug_hook);
__ ldrsb(scratch, MemOperand(scratch));
__ cmp(scratch, Operand(0));
__ b(ne, &prepare_step_in_if_stepping);
......@@ -492,7 +492,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// generator.
ExternalReference debug_suspended_generator =
ExternalReference::debug_suspended_generator_address(masm->isolate());
__ mov(scratch, Operand(debug_suspended_generator));
__ Move(scratch, debug_suspended_generator);
__ ldr(scratch, MemOperand(scratch));
__ cmp(scratch, Operand(r1));
__ b(eq, &prepare_step_in_suspended_generator);
......@@ -627,7 +627,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Setup the context (we need to use the caller context from the isolate).
ExternalReference context_address = ExternalReference::Create(
IsolateAddressId::kContextAddress, masm->isolate());
__ mov(cp, Operand(context_address));
__ Move(cp, context_address);
__ ldr(cp, MemOperand(cp));
// Push the function and the receiver onto the stack.
......@@ -1325,8 +1325,7 @@ static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
// IsSmi: Is builtin
__ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
__ Move(scratch1,
Operand(ExternalReference::builtins_address(masm->isolate())));
__ Move(scratch1, ExternalReference::builtins_address(masm->isolate()));
__ ldr(sfi_data, MemOperand::PointerAddressFromSmiKey(scratch1, sfi_data));
__ b(&done);
......@@ -1466,8 +1465,7 @@ void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
// Load the code object at builtins_table[builtin_id] into scratch1.
__ SmiUntag(scratch1);
__ Move(scratch0,
Operand(ExternalReference::builtins_address(masm->isolate())));
__ Move(scratch0, ExternalReference::builtins_address(masm->isolate()));
__ ldr(scratch1, MemOperand(scratch0, scratch1, LSL, kPointerSizeLog2));
// Check if the loaded code object has already been deserialized. This is
......
......@@ -516,14 +516,14 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
Label stepping_prepared;
ExternalReference debug_hook =
ExternalReference::debug_hook_on_function_call_address(masm->isolate());
__ Mov(x10, Operand(debug_hook));
__ Mov(x10, debug_hook);
__ Ldrsb(x10, MemOperand(x10));
__ CompareAndBranch(x10, Operand(0), ne, &prepare_step_in_if_stepping);
// Flood function if we need to continue stepping in the suspended generator.
ExternalReference debug_suspended_generator =
ExternalReference::debug_suspended_generator_address(masm->isolate());
__ Mov(x10, Operand(debug_suspended_generator));
__ Mov(x10, debug_suspended_generator);
__ Ldr(x10, MemOperand(x10));
__ CompareAndBranch(x10, Operand(x1), eq,
&prepare_step_in_suspended_generator);
......@@ -674,11 +674,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
Register scratch = x10;
Register slots_to_claim = x11;
{
NoRootArrayScope no_root_array(masm);
ProfileEntryHookStub::MaybeCallEntryHook(masm);
__ InitializeRootRegister();
}
ProfileEntryHookStub::MaybeCallEntryHook(masm);
{
// Enter an internal frame.
......@@ -1413,7 +1409,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
// Set the code slot inside the JSFunction to CompileLazy.
__ Mov(x2, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ Move(x2, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ Str(x2, FieldMemOperand(x1, JSFunction::kCodeOffset));
__ RecordWriteField(x1, JSFunction::kCodeOffset, x2, x5, kLRHasNotBeenSaved,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
......
This diff is collapsed.
......@@ -482,7 +482,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Setup the context (we need to use the caller context from the isolate).
ExternalReference context_address = ExternalReference::Create(
IsolateAddressId::kContextAddress, masm->isolate());
__ li(cp, Operand(context_address));
__ li(cp, context_address);
__ lw(cp, MemOperand(cp));
// Push the function and the receiver onto the stack.
......@@ -583,14 +583,14 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
Label stepping_prepared;
ExternalReference debug_hook =
ExternalReference::debug_hook_on_function_call_address(masm->isolate());
__ li(t1, Operand(debug_hook));
__ li(t1, debug_hook);
__ lb(t1, MemOperand(t1));
__ Branch(&prepare_step_in_if_stepping, ne, t1, Operand(zero_reg));
// Flood function if we need to continue stepping in the suspended generator.
ExternalReference debug_suspended_generator =
ExternalReference::debug_suspended_generator_address(masm->isolate());
__ li(t1, Operand(debug_suspended_generator));
__ li(t1, debug_suspended_generator);
__ lw(t1, MemOperand(t1));
__ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(t1));
__ bind(&stepping_prepared);
......
......@@ -474,14 +474,14 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
Label stepping_prepared;
ExternalReference debug_hook =
ExternalReference::debug_hook_on_function_call_address(masm->isolate());
__ li(a5, Operand(debug_hook));
__ li(a5, debug_hook);
__ Lb(a5, MemOperand(a5));
__ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg));
// Flood function if we need to continue stepping in the suspended generator.
ExternalReference debug_suspended_generator =
ExternalReference::debug_suspended_generator_address(masm->isolate());
__ li(a5, Operand(debug_suspended_generator));
__ li(a5, debug_suspended_generator);
__ Ld(a5, MemOperand(a5));
__ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5));
__ bind(&stepping_prepared);
......@@ -621,7 +621,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Setup the context (we need to use the caller context from the isolate).
ExternalReference context_address = ExternalReference::Create(
IsolateAddressId::kContextAddress, masm->isolate());
__ li(cp, Operand(context_address));
__ li(cp, context_address);
__ Ld(cp, MemOperand(cp));
// Push the function and the receiver onto the stack.
......
......@@ -1237,9 +1237,6 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
// Set the return address to the correct point in the interpreter entry
// trampoline.
Label builtin_trampoline, trampoline_loaded;
// TODO(jgruber,v8:6666): Update logic once builtin is isolate-independent.
DCHECK(
!Builtins::IsIsolateIndependent(Builtins::kInterpreterEntryTrampoline));
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
......
......@@ -4807,7 +4807,7 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
sw(t9, MemOperand(sp, stack_offset));
if (type == StackFrame::INTERNAL) {
DCHECK_EQ(stack_offset, kPointerSize);
li(t9, Operand(CodeObject()));
li(t9, CodeObject());
sw(t9, MemOperand(sp, 0));
} else {
DCHECK_EQ(stack_offset, 0);
......
......@@ -5127,7 +5127,7 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
Sd(t9, MemOperand(sp, stack_offset));
if (type == StackFrame::INTERNAL) {
DCHECK_EQ(stack_offset, kPointerSize);
li(t9, Operand(CodeObject()));
li(t9, CodeObject());
Sd(t9, MemOperand(sp, 0));
} else {
DCHECK_EQ(stack_offset, 0);
......
......@@ -2585,8 +2585,7 @@ Object* JSFunction::prototype() {
bool JSFunction::is_compiled() {
Builtins* builtins = GetIsolate()->builtins();
return code() != builtins->builtin(Builtins::kCompileLazy);
return code()->builtin_index() != Builtins::kCompileLazy;
}
ACCESSORS(JSProxy, target, Object, kTargetOffset)
......
......@@ -316,7 +316,13 @@ int Code::relocation_size() const {
Address Code::entry() const { return raw_instruction_start(); }
bool Code::contains(Address inner_pointer) {
return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
#ifdef V8_EMBEDDED_BUILTINS
if (Builtins::IsEmbeddedBuiltin(this)) {
return (OffHeapInstructionStart() <= inner_pointer) &&
(inner_pointer < OffHeapInstructionEnd());
}
#endif
return (address() <= inner_pointer) && (inner_pointer < address() + Size());
}
int Code::ExecutableSize() const {
......
......@@ -372,8 +372,7 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
!saw_unsafe_builtin,
"One or more builtins marked as isolate-independent either contains "
"isolate-dependent code or aliases the off-heap trampoline register. "
"If in doubt, ask jgruber@ or remove the affected builtin from the "
"Builtins::IsIsolateIndependent whitelist");
"If in doubt, ask jgruber@");
const uint32_t blob_size = RawDataOffset() + raw_data_size;
uint8_t* blob = new uint8_t[blob_size];
......
......@@ -474,8 +474,8 @@ WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code,
std::unique_ptr<ProtectedInstructions> protected_instructions(
new ProtectedInstructions(0));
Vector<const byte> orig_instructions(
reinterpret_cast<byte*>(code->raw_instruction_start()),
static_cast<size_t>(code->raw_instruction_size()));
reinterpret_cast<byte*>(code->InstructionStart()),
static_cast<size_t>(code->InstructionSize()));
int stack_slots = code->has_safepoint_info() ? code->stack_slots() : 0;
int safepoint_table_offset =
code->has_safepoint_info() ? code->safepoint_table_offset() : 0;
......@@ -494,7 +494,7 @@ WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code,
std::move(protected_instructions), // protected_instructions
WasmCode::kOther, // kind
WasmCode::kNoFlushICache); // flush_icache
intptr_t delta = ret->instruction_start() - code->raw_instruction_start();
intptr_t delta = ret->instruction_start() - code->InstructionStart();
int mask = RelocInfo::kApplyMask | RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment