Commit 440bda10 authored by jgruber's avatar jgruber Committed by Commit Bot

[ia32] Reduce TFS register argument count to 3

To support all possible cases, we must limit the number of register
args for TFS builtins on ia32 to 3. Out of the 6 allocatable
registers, esi is taken as the context register and ebx is the root
register. One register must remain available to store the jump/call
target. Thus 3 registers remain for arguments.

The reason this applies to TFS builtins specifically is because this
becomes relevant for builtins used as targets of Torque function
pointers (which must have a register available to store the target).

Bug: v8:6666
Change-Id: I17d9450cc29c983ddaffc2deb36f45c1c414e166
Reviewed-on: https://chromium-review.googlesource.com/1209287
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55759}
parent 5c244ca5
This diff is collapsed.
......@@ -269,9 +269,10 @@ Operator const* GraphAssembler::ToNumberOperator() {
Callable callable =
Builtins::CallableFor(jsgraph()->isolate(), Builtins::kToNumber);
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor =
Linkage::GetStubCallDescriptor(graph()->zone(), callable.descriptor(),
0, flags, Operator::kEliminatable);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), flags,
Operator::kEliminatable);
to_number_operator_.set(common()->Call(call_descriptor));
}
return to_number_operator_.get();
......
......@@ -865,7 +865,8 @@ Reduction JSCallReducer::ReduceReflectGet(Node* node) {
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kGetProperty);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0,
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNeedsFrameState, Operator::kNoProperties);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
vtrue = etrue = if_true =
......@@ -2585,7 +2586,8 @@ Reduction JSCallReducer::ReduceArrayIndexOfIncludes(
: GetCallableForArrayIncludes(receiver_map->elements_kind(),
isolate());
CallDescriptor const* const desc = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags,
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
Operator::kEliminatable);
// The stub expects the following arguments: the receiver array, its elements,
// the search_element, the array length, and the index to start searching
......@@ -4835,7 +4837,8 @@ Reduction JSCallReducer::ReduceArrayPrototypeSlice(Node* node) {
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kCloneFastJSArray);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags,
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
Operator::kNoThrow | Operator::kNoDeopt);
// Calls to Builtins::kCloneFastJSArray produce COW arrays
......@@ -6493,8 +6496,9 @@ Reduction JSCallReducer::ReduceCollectionIteratorPrototypeNext(
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kOrderedHashTableHealIndex);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags,
Operator::kEliminatable);
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNoFlags, Operator::kEliminatable);
index = effect =
graph()->NewNode(common()->Call(call_descriptor),
jsgraph()->HeapConstant(callable.code()), table, index,
......
......@@ -650,7 +650,8 @@ Reduction JSTypedLowering::ReduceJSAdd(Node* node) {
Callable const callable =
CodeFactory::StringAdd(isolate(), flags, NOT_TENURED);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0,
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNeedsFrameState, properties);
DCHECK_EQ(1, OperatorProperties::GetFrameStateInputCount(node->op()));
node->InsertInput(graph()->zone(), 0,
......@@ -1079,7 +1080,8 @@ Reduction JSTypedLowering::ReduceJSToObject(Node* node) {
// Convert {receiver} using the ToObjectStub.
Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0,
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNeedsFrameState, node->op()->properties());
rfalse = efalse = if_false =
graph()->NewNode(common()->Call(call_descriptor),
......@@ -1802,7 +1804,8 @@ Reduction JSTypedLowering::ReduceJSForInNext(Node* node) {
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kForInFilter);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0,
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNeedsFrameState);
vfalse = efalse = if_false =
graph()->NewNode(common()->Call(call_descriptor),
......
......@@ -339,6 +339,11 @@ CallDescriptor* Linkage::GetJSCallDescriptor(Zone* zone, bool is_osr,
}
// TODO(turbofan): cache call descriptors for code stub calls.
// TODO(jgruber): Clean up stack parameter count handling. The descriptor
// already knows the formal stack parameter count and ideally only additional
// stack parameters should be passed into this method. All call-sites should
// be audited for correctness (e.g. many used to assume a stack parameter count
// of 0).
CallDescriptor* Linkage::GetStubCallDescriptor(
Zone* zone, const CallInterfaceDescriptor& descriptor,
int stack_parameter_count, CallDescriptor::Flags flags,
......@@ -350,6 +355,8 @@ CallDescriptor* Linkage::GetStubCallDescriptor(
const size_t parameter_count =
static_cast<size_t>(js_parameter_count + context_count);
DCHECK_GE(stack_parameter_count, descriptor.GetStackParameterCount());
size_t return_count = descriptor.GetReturnCount();
LocationSignature::Builder locations(zone, return_count, parameter_count);
......
......@@ -238,8 +238,9 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
: __
AllocateInOldSpaceStubConstant();
if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), AllocateDescriptor{}, 0,
graph()->zone(), descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kCanUseRoots, Operator::kNoThrow);
allocate_operator_.set(common()->Call(call_descriptor));
}
......@@ -294,8 +295,9 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
: __
AllocateInOldSpaceStubConstant();
if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), AllocateDescriptor{}, 0,
graph()->zone(), descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kCanUseRoots, Operator::kNoThrow);
allocate_operator_.set(common()->Call(call_descriptor));
}
......
......@@ -2443,12 +2443,7 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) {
// due to register pressure when kRootRegister is not allocatable. Either
// refactor these builtins or fix register allocation in these cases.
} else if (Builtins::IsBuiltinId(data->info()->builtin_index()) &&
data->info()->builtin_index() != Builtins::kWasmArgumentsAdaptor &&
data->info()->builtin_index() != Builtins::kCopyFromTempArray &&
data->info()->builtin_index() != Builtins::kCopyWithinSortArray &&
data->info()->builtin_index() != Builtins::kBinaryInsertionSort &&
data->info()->builtin_index() != Builtins::kMergeAt &&
data->info()->builtin_index() != Builtins::kArrayTimSort) {
data->info()->builtin_index() != Builtins::kWasmArgumentsAdaptor) {
// TODO(v8:6666): Extend support to user code. Ensure that
// it is mutually exclusive with the Poisoning configuration above; and that
// it cooperates with restricted allocatable registers above.
......
......@@ -3972,9 +3972,10 @@ Operator const* SimplifiedLowering::ToNumberOperator() {
if (!to_number_operator_.is_set()) {
Callable callable = Builtins::CallableFor(isolate(), Builtins::kToNumber);
CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState;
auto call_descriptor =
Linkage::GetStubCallDescriptor(graph()->zone(), callable.descriptor(),
0, flags, Operator::kNoProperties);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), flags,
Operator::kNoProperties);
to_number_operator_.set(common()->Call(call_descriptor));
}
return to_number_operator_.get();
......@@ -3985,9 +3986,10 @@ Operator const* SimplifiedLowering::ToNumberConvertBigIntOperator() {
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kToNumberConvertBigInt);
CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState;
auto call_descriptor =
Linkage::GetStubCallDescriptor(graph()->zone(), callable.descriptor(),
0, flags, Operator::kNoProperties);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), flags,
Operator::kNoProperties);
to_number_convert_big_int_operator_.set(common()->Call(call_descriptor));
}
return to_number_convert_big_int_operator_.get();
......@@ -3997,9 +3999,10 @@ Operator const* SimplifiedLowering::ToNumericOperator() {
if (!to_numeric_operator_.is_set()) {
Callable callable = Builtins::CallableFor(isolate(), Builtins::kToNumeric);
CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState;
auto call_descriptor =
Linkage::GetStubCallDescriptor(graph()->zone(), callable.descriptor(),
0, flags, Operator::kNoProperties);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), flags,
Operator::kNoProperties);
to_numeric_operator_.set(common()->Call(call_descriptor));
}
return to_numeric_operator_.get();
......
......@@ -307,17 +307,31 @@ class V8_EXPORT_PRIVATE CallInterfaceDescriptor {
static inline CallDescriptors::Key key();
#if defined(V8_TARGET_ARCH_IA32)
// To support all possible cases, we must limit the number of register args for
// TFS builtins on ia32 to 3. Out of the 6 allocatable registers, esi is taken
// as the context register and ebx is the root register. One register must
// remain available to store the jump/call target. Thus 3 registers remain for
// arguments. The reason this applies to TFS builtins specifically is because
// this becomes relevant for builtins used as targets of Torque function
// pointers (which must have a register available to store the target).
// TODO(jgruber): Ideally we should just decrement kMaxBuiltinRegisterParams but
// that comes with its own set of complications. It's possible, but requires
// refactoring the calling convention of other existing stubs.
constexpr int kMaxBuiltinRegisterParams = 4;
constexpr int kMaxTFSBuiltinRegisterParams = 3;
#else
constexpr int kMaxBuiltinRegisterParams = 5;
constexpr int kMaxTFSBuiltinRegisterParams = kMaxBuiltinRegisterParams;
#endif
STATIC_ASSERT(kMaxTFSBuiltinRegisterParams <= kMaxBuiltinRegisterParams);
#define DECLARE_DEFAULT_DESCRIPTOR(name, base) \
DECLARE_DESCRIPTOR_WITH_BASE(name, base) \
protected: \
static const int kRegisterParams = \
kParameterCount > kMaxBuiltinRegisterParams ? kMaxBuiltinRegisterParams \
: kParameterCount; \
kParameterCount > kMaxTFSBuiltinRegisterParams \
? kMaxTFSBuiltinRegisterParams \
: kParameterCount; \
static const int kStackParams = kParameterCount - kRegisterParams; \
void InitializePlatformSpecific(CallInterfaceDescriptorData* data) \
override { \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment