Commit 572be6f1 authored by Michael Starzinger's avatar Michael Starzinger Committed by Commit Bot

[turbofan][ia32] Remove call to code object via slot.

R=jgruber@chromium.org
BUG=v8:6666,v8:9810

Change-Id: I972983d8e86729843f4a1bbe050e3b37a3c0c61c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1881147Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64578}
parent c2135c79
...@@ -1283,6 +1283,7 @@ namespace internal { ...@@ -1283,6 +1283,7 @@ namespace internal {
V(WasmToNumber) \ V(WasmToNumber) \
V(WasmThrow) \ V(WasmThrow) \
V(WasmRethrow) \ V(WasmRethrow) \
V(ArgumentsAdaptorTrampoline) \
V(DoubleToI) \ V(DoubleToI) \
V(WasmI64ToBigInt) \ V(WasmI64ToBigInt) \
V(WasmI32PairToBigInt) \ V(WasmI32PairToBigInt) \
......
...@@ -670,7 +670,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -670,7 +670,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
if (op->IsImmediate()) { if (op->IsImmediate()) {
Handle<Code> code = i.InputCode(0); Handle<Code> code = i.InputCode(0);
__ Call(code, RelocInfo::CODE_TARGET); __ Call(code, RelocInfo::CODE_TARGET);
} else if (op->IsRegister()) { } else {
Register reg = i.InputRegister(0); Register reg = i.InputRegister(0);
DCHECK_IMPLIES( DCHECK_IMPLIES(
HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister), HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
...@@ -681,23 +681,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -681,23 +681,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else { } else {
__ call(reg); __ call(reg);
} }
} else {
CHECK(tasm()->root_array_available());
// This is used to allow calls to the arguments adaptor trampoline from
// code that only has 5 gp registers available and cannot call through
// an immediate. This happens when the arguments adaptor trampoline is
// not an embedded builtin.
// TODO(v8:6666): Remove once only embedded builtins are supported.
__ push(eax);
frame_access_state()->IncreaseSPDelta(1);
Operand virtual_call_target_register(
kRootRegister, IsolateData::virtual_call_target_register_offset());
__ mov(eax, i.InputOperand(0));
__ LoadCodeObjectEntry(eax, eax);
__ mov(virtual_call_target_register, eax);
__ pop(eax);
frame_access_state()->IncreaseSPDelta(-1);
__ call(virtual_call_target_register);
} }
RecordCallPosition(instr); RecordCallPosition(instr);
frame_access_state()->ClearSPDelta(); frame_access_state()->ClearSPDelta();
......
...@@ -905,7 +905,6 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer, ...@@ -905,7 +905,6 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
bool call_code_immediate = (flags & kCallCodeImmediate) != 0; bool call_code_immediate = (flags & kCallCodeImmediate) != 0;
bool call_address_immediate = (flags & kCallAddressImmediate) != 0; bool call_address_immediate = (flags & kCallAddressImmediate) != 0;
bool call_use_fixed_target_reg = (flags & kCallFixedTargetRegister) != 0; bool call_use_fixed_target_reg = (flags & kCallFixedTargetRegister) != 0;
bool call_through_slot = (flags & kAllowCallThroughSlot) != 0;
switch (buffer->descriptor->kind()) { switch (buffer->descriptor->kind()) {
case CallDescriptor::kCallCodeObject: case CallDescriptor::kCallCodeObject:
// TODO(jgruber, v8:7449): The below is a hack to support tail-calls from // TODO(jgruber, v8:7449): The below is a hack to support tail-calls from
...@@ -919,8 +918,7 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer, ...@@ -919,8 +918,7 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
: call_use_fixed_target_reg : call_use_fixed_target_reg
? g.UseFixed(callee, kJavaScriptCallCodeStartRegister) ? g.UseFixed(callee, kJavaScriptCallCodeStartRegister)
: is_tail_call ? g.UseUniqueRegister(callee) : is_tail_call ? g.UseUniqueRegister(callee)
: call_through_slot ? g.UseUniqueSlot(callee) : g.UseRegister(callee));
: g.UseRegister(callee));
break; break;
case CallDescriptor::kCallAddress: case CallDescriptor::kCallAddress:
buffer->instruction_args.push_back( buffer->instruction_args.push_back(
...@@ -2802,15 +2800,6 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) { ...@@ -2802,15 +2800,6 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
// Improve constant pool and the heuristics in the register allocator // Improve constant pool and the heuristics in the register allocator
// for where to emit constants. // for where to emit constants.
CallBufferFlags call_buffer_flags(kCallCodeImmediate | kCallAddressImmediate); CallBufferFlags call_buffer_flags(kCallCodeImmediate | kCallAddressImmediate);
if (flags & CallDescriptor::kAllowCallThroughSlot) {
// TODO(v8:6666): Remove kAllowCallThroughSlot and use a pc-relative call
// instead once builtins are embedded in every build configuration.
call_buffer_flags |= kAllowCallThroughSlot;
#ifndef V8_TARGET_ARCH_32_BIT
// kAllowCallThroughSlot is only supported on ia32.
UNREACHABLE();
#endif
}
InitializeCallBuffer(node, &buffer, call_buffer_flags, false); InitializeCallBuffer(node, &buffer, call_buffer_flags, false);
EmitPrepareArguments(&(buffer.pushed_nodes), call_descriptor, node); EmitPrepareArguments(&(buffer.pushed_nodes), call_descriptor, node);
...@@ -2892,7 +2881,6 @@ void InstructionSelector::VisitTailCall(Node* node) { ...@@ -2892,7 +2881,6 @@ void InstructionSelector::VisitTailCall(Node* node) {
if (callee->flags() & CallDescriptor::kFixedTargetRegister) { if (callee->flags() & CallDescriptor::kFixedTargetRegister) {
flags |= kCallFixedTargetRegister; flags |= kCallFixedTargetRegister;
} }
DCHECK_EQ(callee->flags() & CallDescriptor::kAllowCallThroughSlot, 0);
InitializeCallBuffer(node, &buffer, flags, true, stack_param_delta); InitializeCallBuffer(node, &buffer, flags, true, stack_param_delta);
// Select the appropriate opcode based on the call type. // Select the appropriate opcode based on the call type.
......
...@@ -559,8 +559,7 @@ class V8_EXPORT_PRIVATE InstructionSelector final { ...@@ -559,8 +559,7 @@ class V8_EXPORT_PRIVATE InstructionSelector final {
kCallCodeImmediate = 1u << 0, kCallCodeImmediate = 1u << 0,
kCallAddressImmediate = 1u << 1, kCallAddressImmediate = 1u << 1,
kCallTail = 1u << 2, kCallTail = 1u << 2,
kCallFixedTargetRegister = 1u << 3, kCallFixedTargetRegister = 1u << 3
kAllowCallThroughSlot = 1u << 4
}; };
using CallBufferFlags = base::Flags<CallBufferFlag>; using CallBufferFlags = base::Flags<CallBufferFlag>;
......
...@@ -210,14 +210,13 @@ class V8_EXPORT_PRIVATE CallDescriptor final ...@@ -210,14 +210,13 @@ class V8_EXPORT_PRIVATE CallDescriptor final
// Use the kJavaScriptCallCodeStartRegister (fixed) register for the // Use the kJavaScriptCallCodeStartRegister (fixed) register for the
// indirect target address when calling. // indirect target address when calling.
kFixedTargetRegister = 1u << 7, kFixedTargetRegister = 1u << 7,
kAllowCallThroughSlot = 1u << 8, kCallerSavedRegisters = 1u << 8,
kCallerSavedRegisters = 1u << 9,
// The kCallerSavedFPRegisters only matters (and set) when the more general // The kCallerSavedFPRegisters only matters (and set) when the more general
// flag for kCallerSavedRegisters above is also set. // flag for kCallerSavedRegisters above is also set.
kCallerSavedFPRegisters = 1u << 10, kCallerSavedFPRegisters = 1u << 9,
// AIX has a function descriptor by default but it can be disabled for a // AIX has a function descriptor by default but it can be disabled for a
// certain CFunction call (only used for Kind::kCallAddress). // certain CFunction call (only used for Kind::kCallAddress).
kNoFunctionDescriptor = 1u << 11, kNoFunctionDescriptor = 1u << 10,
}; };
using Flags = base::Flags<Flag>; using Flags = base::Flags<Flag>;
......
...@@ -5872,8 +5872,9 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder { ...@@ -5872,8 +5872,9 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
LOAD_RAW(callable_node, LOAD_RAW(callable_node,
wasm::ObjectAccess::ContextOffsetInTaggedJSFunction(), wasm::ObjectAccess::ContextOffsetInTaggedJSFunction(),
MachineType::TypeCompressedTaggedPointer()); MachineType::TypeCompressedTaggedPointer());
args[pos++] = BuildLoadBuiltinFromIsolateRoot( args[pos++] = mcgraph()->RelocatableIntPtrConstant(
Builtins::kArgumentsAdaptorTrampoline); wasm::WasmCode::kArgumentsAdaptorTrampoline,
RelocInfo::WASM_STUB_CALL);
args[pos++] = callable_node; // target callable args[pos++] = callable_node; // target callable
args[pos++] = undefined_node; // new target args[pos++] = undefined_node; // new target
args[pos++] = mcgraph()->Int32Constant(wasm_count); // argument count args[pos++] = mcgraph()->Int32Constant(wasm_count); // argument count
...@@ -5901,16 +5902,10 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder { ...@@ -5901,16 +5902,10 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
args[pos++] = undefined_node; args[pos++] = undefined_node;
} }
#ifdef V8_TARGET_ARCH_IA32
// TODO(v8:6666): Remove kAllowCallThroughSlot and use a pc-relative
// call instead once builtins are embedded in every build configuration.
CallDescriptor::Flags flags = CallDescriptor::kAllowCallThroughSlot;
#else
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
#endif
auto call_descriptor = Linkage::GetStubCallDescriptor( auto call_descriptor = Linkage::GetStubCallDescriptor(
mcgraph()->zone(), ArgumentsAdaptorDescriptor{}, 1 + wasm_count, mcgraph()->zone(), ArgumentsAdaptorDescriptor{}, 1 + wasm_count,
flags, Operator::kNoProperties); CallDescriptor::kNoFlags, Operator::kNoProperties,
StubCallMode::kCallWasmRuntimeStub);
// Convert wasm numbers to JS values. // Convert wasm numbers to JS values.
pos = AddArgumentNodes(VectorOf(args), pos, wasm_count, sig_); pos = AddArgumentNodes(VectorOf(args), pos, wasm_count, sig_);
......
...@@ -74,11 +74,6 @@ class IsolateData final { ...@@ -74,11 +74,6 @@ class IsolateData final {
return builtins_table_offset() + id * kSystemPointerSize; return builtins_table_offset() + id * kSystemPointerSize;
} }
// Root-register-relative offset of the virtual call target register value.
static constexpr int virtual_call_target_register_offset() {
return kVirtualCallTargetRegisterOffset - kIsolateRootBias;
}
// The FP and PC that are saved right before TurboAssembler::CallCFunction. // The FP and PC that are saved right before TurboAssembler::CallCFunction.
Address* fast_c_call_caller_fp_address() { return &fast_c_call_caller_fp_; } Address* fast_c_call_caller_fp_address() { return &fast_c_call_caller_fp_; }
Address* fast_c_call_caller_pc_address() { return &fast_c_call_caller_pc_; } Address* fast_c_call_caller_pc_address() { return &fast_c_call_caller_pc_; }
...@@ -131,7 +126,6 @@ class IsolateData final { ...@@ -131,7 +126,6 @@ class IsolateData final {
V(kThreadLocalTopOffset, ThreadLocalTop::kSizeInBytes) \ V(kThreadLocalTopOffset, ThreadLocalTop::kSizeInBytes) \
V(kBuiltinEntryTableOffset, Builtins::builtin_count* kSystemPointerSize) \ V(kBuiltinEntryTableOffset, Builtins::builtin_count* kSystemPointerSize) \
V(kBuiltinsTableOffset, Builtins::builtin_count* kSystemPointerSize) \ V(kBuiltinsTableOffset, Builtins::builtin_count* kSystemPointerSize) \
V(kVirtualCallTargetRegisterOffset, kSystemPointerSize) \
V(kStackIsIterableOffset, kUInt8Size) \ V(kStackIsIterableOffset, kUInt8Size) \
/* This padding aligns IsolateData size by 8 bytes. */ \ /* This padding aligns IsolateData size by 8 bytes. */ \
V(kPaddingOffset, \ V(kPaddingOffset, \
...@@ -184,11 +178,6 @@ class IsolateData final { ...@@ -184,11 +178,6 @@ class IsolateData final {
// The entries in this array are tagged pointers to Code objects. // The entries in this array are tagged pointers to Code objects.
Address builtins_[Builtins::builtin_count] = {}; Address builtins_[Builtins::builtin_count] = {};
// For isolate-independent calls on ia32.
// TODO(v8:6666): Remove once wasm supports pc-relative jumps to builtins on
// ia32 (otherwise the arguments adaptor call runs out of registers).
void* virtual_call_target_register_ = nullptr;
// Whether the SafeStackFrameIterator can successfully iterate the current // Whether the SafeStackFrameIterator can successfully iterate the current
// stack. Only valid values are 0 or 1. // stack. Only valid values are 0 or 1.
uint8_t stack_is_iterable_ = 1; uint8_t stack_is_iterable_ = 1;
...@@ -227,8 +216,6 @@ void IsolateData::AssertPredictableLayout() { ...@@ -227,8 +216,6 @@ void IsolateData::AssertPredictableLayout() {
STATIC_ASSERT(offsetof(IsolateData, thread_local_top_) == STATIC_ASSERT(offsetof(IsolateData, thread_local_top_) ==
kThreadLocalTopOffset); kThreadLocalTopOffset);
STATIC_ASSERT(offsetof(IsolateData, builtins_) == kBuiltinsTableOffset); STATIC_ASSERT(offsetof(IsolateData, builtins_) == kBuiltinsTableOffset);
STATIC_ASSERT(offsetof(IsolateData, virtual_call_target_register_) ==
kVirtualCallTargetRegisterOffset);
STATIC_ASSERT(offsetof(IsolateData, external_memory_) == STATIC_ASSERT(offsetof(IsolateData, external_memory_) ==
kExternalMemoryOffset); kExternalMemoryOffset);
STATIC_ASSERT(offsetof(IsolateData, external_memory_limit_) == STATIC_ASSERT(offsetof(IsolateData, external_memory_limit_) ==
......
...@@ -200,16 +200,6 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) { ...@@ -200,16 +200,6 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
saw_unsafe_builtin = true; saw_unsafe_builtin = true;
fprintf(stderr, "%s is not isolate-independent.\n", Builtins::name(i)); fprintf(stderr, "%s is not isolate-independent.\n", Builtins::name(i));
} }
if (Builtins::IsWasmRuntimeStub(i) &&
RelocInfo::RequiresRelocation(code)) {
// Wasm additionally requires that its runtime stubs must be
// individually PIC (i.e. we must be able to copy each stub outside the
// embedded area without relocations). In particular, that means
// pc-relative calls to other builtins are disallowed.
saw_unsafe_builtin = true;
fprintf(stderr, "%s is a wasm runtime stub but needs relocation.\n",
Builtins::name(i));
}
if (BuiltinAliasesOffHeapTrampolineRegister(isolate, code)) { if (BuiltinAliasesOffHeapTrampolineRegister(isolate, code)) {
saw_unsafe_builtin = true; saw_unsafe_builtin = true;
fprintf(stderr, "%s aliases the off-heap trampoline register.\n", fprintf(stderr, "%s aliases the off-heap trampoline register.\n",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment