Commit 572be6f1 authored by Michael Starzinger's avatar Michael Starzinger Committed by Commit Bot

[turbofan][ia32] Remove call to code object via slot.

R=jgruber@chromium.org
BUG=v8:6666,v8:9810

Change-Id: I972983d8e86729843f4a1bbe050e3b37a3c0c61c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1881147Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64578}
parent c2135c79
......@@ -1283,6 +1283,7 @@ namespace internal {
V(WasmToNumber) \
V(WasmThrow) \
V(WasmRethrow) \
V(ArgumentsAdaptorTrampoline) \
V(DoubleToI) \
V(WasmI64ToBigInt) \
V(WasmI32PairToBigInt) \
......
......@@ -670,7 +670,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
if (op->IsImmediate()) {
Handle<Code> code = i.InputCode(0);
__ Call(code, RelocInfo::CODE_TARGET);
} else if (op->IsRegister()) {
} else {
Register reg = i.InputRegister(0);
DCHECK_IMPLIES(
HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
......@@ -681,23 +681,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else {
__ call(reg);
}
} else {
CHECK(tasm()->root_array_available());
// This is used to allow calls to the arguments adaptor trampoline from
// code that only has 5 gp registers available and cannot call through
// an immediate. This happens when the arguments adaptor trampoline is
// not an embedded builtin.
// TODO(v8:6666): Remove once only embedded builtins are supported.
__ push(eax);
frame_access_state()->IncreaseSPDelta(1);
Operand virtual_call_target_register(
kRootRegister, IsolateData::virtual_call_target_register_offset());
__ mov(eax, i.InputOperand(0));
__ LoadCodeObjectEntry(eax, eax);
__ mov(virtual_call_target_register, eax);
__ pop(eax);
frame_access_state()->IncreaseSPDelta(-1);
__ call(virtual_call_target_register);
}
RecordCallPosition(instr);
frame_access_state()->ClearSPDelta();
......
......@@ -905,7 +905,6 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
bool call_code_immediate = (flags & kCallCodeImmediate) != 0;
bool call_address_immediate = (flags & kCallAddressImmediate) != 0;
bool call_use_fixed_target_reg = (flags & kCallFixedTargetRegister) != 0;
bool call_through_slot = (flags & kAllowCallThroughSlot) != 0;
switch (buffer->descriptor->kind()) {
case CallDescriptor::kCallCodeObject:
// TODO(jgruber, v8:7449): The below is a hack to support tail-calls from
......@@ -919,8 +918,7 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
: call_use_fixed_target_reg
? g.UseFixed(callee, kJavaScriptCallCodeStartRegister)
: is_tail_call ? g.UseUniqueRegister(callee)
: call_through_slot ? g.UseUniqueSlot(callee)
: g.UseRegister(callee));
: g.UseRegister(callee));
break;
case CallDescriptor::kCallAddress:
buffer->instruction_args.push_back(
......@@ -2802,15 +2800,6 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
// Improve constant pool and the heuristics in the register allocator
// for where to emit constants.
CallBufferFlags call_buffer_flags(kCallCodeImmediate | kCallAddressImmediate);
if (flags & CallDescriptor::kAllowCallThroughSlot) {
// TODO(v8:6666): Remove kAllowCallThroughSlot and use a pc-relative call
// instead once builtins are embedded in every build configuration.
call_buffer_flags |= kAllowCallThroughSlot;
#ifndef V8_TARGET_ARCH_32_BIT
// kAllowCallThroughSlot is only supported on ia32.
UNREACHABLE();
#endif
}
InitializeCallBuffer(node, &buffer, call_buffer_flags, false);
EmitPrepareArguments(&(buffer.pushed_nodes), call_descriptor, node);
......@@ -2892,7 +2881,6 @@ void InstructionSelector::VisitTailCall(Node* node) {
if (callee->flags() & CallDescriptor::kFixedTargetRegister) {
flags |= kCallFixedTargetRegister;
}
DCHECK_EQ(callee->flags() & CallDescriptor::kAllowCallThroughSlot, 0);
InitializeCallBuffer(node, &buffer, flags, true, stack_param_delta);
// Select the appropriate opcode based on the call type.
......
......@@ -559,8 +559,7 @@ class V8_EXPORT_PRIVATE InstructionSelector final {
kCallCodeImmediate = 1u << 0,
kCallAddressImmediate = 1u << 1,
kCallTail = 1u << 2,
kCallFixedTargetRegister = 1u << 3,
kAllowCallThroughSlot = 1u << 4
kCallFixedTargetRegister = 1u << 3
};
using CallBufferFlags = base::Flags<CallBufferFlag>;
......
......@@ -210,14 +210,13 @@ class V8_EXPORT_PRIVATE CallDescriptor final
// Use the kJavaScriptCallCodeStartRegister (fixed) register for the
// indirect target address when calling.
kFixedTargetRegister = 1u << 7,
kAllowCallThroughSlot = 1u << 8,
kCallerSavedRegisters = 1u << 9,
kCallerSavedRegisters = 1u << 8,
// The kCallerSavedFPRegisters only matters (and set) when the more general
// flag for kCallerSavedRegisters above is also set.
kCallerSavedFPRegisters = 1u << 10,
kCallerSavedFPRegisters = 1u << 9,
// AIX has a function descriptor by default but it can be disabled for a
// certain CFunction call (only used for Kind::kCallAddress).
kNoFunctionDescriptor = 1u << 11,
kNoFunctionDescriptor = 1u << 10,
};
using Flags = base::Flags<Flag>;
......
......@@ -5872,8 +5872,9 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
LOAD_RAW(callable_node,
wasm::ObjectAccess::ContextOffsetInTaggedJSFunction(),
MachineType::TypeCompressedTaggedPointer());
args[pos++] = BuildLoadBuiltinFromIsolateRoot(
Builtins::kArgumentsAdaptorTrampoline);
args[pos++] = mcgraph()->RelocatableIntPtrConstant(
wasm::WasmCode::kArgumentsAdaptorTrampoline,
RelocInfo::WASM_STUB_CALL);
args[pos++] = callable_node; // target callable
args[pos++] = undefined_node; // new target
args[pos++] = mcgraph()->Int32Constant(wasm_count); // argument count
......@@ -5901,16 +5902,10 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
args[pos++] = undefined_node;
}
#ifdef V8_TARGET_ARCH_IA32
// TODO(v8:6666): Remove kAllowCallThroughSlot and use a pc-relative
// call instead once builtins are embedded in every build configuration.
CallDescriptor::Flags flags = CallDescriptor::kAllowCallThroughSlot;
#else
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
#endif
auto call_descriptor = Linkage::GetStubCallDescriptor(
mcgraph()->zone(), ArgumentsAdaptorDescriptor{}, 1 + wasm_count,
flags, Operator::kNoProperties);
CallDescriptor::kNoFlags, Operator::kNoProperties,
StubCallMode::kCallWasmRuntimeStub);
// Convert wasm numbers to JS values.
pos = AddArgumentNodes(VectorOf(args), pos, wasm_count, sig_);
......
......@@ -74,11 +74,6 @@ class IsolateData final {
return builtins_table_offset() + id * kSystemPointerSize;
}
// Root-register-relative offset of the virtual call target register value.
static constexpr int virtual_call_target_register_offset() {
return kVirtualCallTargetRegisterOffset - kIsolateRootBias;
}
// The FP and PC that are saved right before TurboAssembler::CallCFunction.
Address* fast_c_call_caller_fp_address() { return &fast_c_call_caller_fp_; }
Address* fast_c_call_caller_pc_address() { return &fast_c_call_caller_pc_; }
......@@ -131,7 +126,6 @@ class IsolateData final {
V(kThreadLocalTopOffset, ThreadLocalTop::kSizeInBytes) \
V(kBuiltinEntryTableOffset, Builtins::builtin_count* kSystemPointerSize) \
V(kBuiltinsTableOffset, Builtins::builtin_count* kSystemPointerSize) \
V(kVirtualCallTargetRegisterOffset, kSystemPointerSize) \
V(kStackIsIterableOffset, kUInt8Size) \
/* This padding aligns IsolateData size by 8 bytes. */ \
V(kPaddingOffset, \
......@@ -184,11 +178,6 @@ class IsolateData final {
// The entries in this array are tagged pointers to Code objects.
Address builtins_[Builtins::builtin_count] = {};
// For isolate-independent calls on ia32.
// TODO(v8:6666): Remove once wasm supports pc-relative jumps to builtins on
// ia32 (otherwise the arguments adaptor call runs out of registers).
void* virtual_call_target_register_ = nullptr;
// Whether the SafeStackFrameIterator can successfully iterate the current
// stack. Only valid values are 0 or 1.
uint8_t stack_is_iterable_ = 1;
......@@ -227,8 +216,6 @@ void IsolateData::AssertPredictableLayout() {
STATIC_ASSERT(offsetof(IsolateData, thread_local_top_) ==
kThreadLocalTopOffset);
STATIC_ASSERT(offsetof(IsolateData, builtins_) == kBuiltinsTableOffset);
STATIC_ASSERT(offsetof(IsolateData, virtual_call_target_register_) ==
kVirtualCallTargetRegisterOffset);
STATIC_ASSERT(offsetof(IsolateData, external_memory_) ==
kExternalMemoryOffset);
STATIC_ASSERT(offsetof(IsolateData, external_memory_limit_) ==
......
......@@ -200,16 +200,6 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
saw_unsafe_builtin = true;
fprintf(stderr, "%s is not isolate-independent.\n", Builtins::name(i));
}
if (Builtins::IsWasmRuntimeStub(i) &&
RelocInfo::RequiresRelocation(code)) {
// Wasm additionally requires that its runtime stubs must be
// individually PIC (i.e. we must be able to copy each stub outside the
// embedded area without relocations). In particular, that means
// pc-relative calls to other builtins are disallowed.
saw_unsafe_builtin = true;
fprintf(stderr, "%s is a wasm runtime stub but needs relocation.\n",
Builtins::name(i));
}
if (BuiltinAliasesOffHeapTrampolineRegister(isolate, code)) {
saw_unsafe_builtin = true;
fprintf(stderr, "%s aliases the off-heap trampoline register.\n",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment