Commit 440bda10 authored by jgruber's avatar jgruber Committed by Commit Bot

[ia32] Reduce TFS register argument count to 3

To support all possible cases, we must limit the number of register
args for TFS builtins on ia32 to 3. Out of the 6 allocatable
registers, esi is taken as the context register and ebx is the root
register. One register must remain available to store the jump/call
target. Thus 3 registers remain for arguments.

The reason this applies to TFS builtins specifically is because this
becomes relevant for builtins used as targets of Torque function
pointers (which must have a register available to store the target).

Bug: v8:6666
Change-Id: I17d9450cc29c983ddaffc2deb36f45c1c414e166
Reviewed-on: https://chromium-review.googlesource.com/1209287
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55759}
parent 5c244ca5
This diff is collapsed.
...@@ -269,9 +269,10 @@ Operator const* GraphAssembler::ToNumberOperator() { ...@@ -269,9 +269,10 @@ Operator const* GraphAssembler::ToNumberOperator() {
Callable callable = Callable callable =
Builtins::CallableFor(jsgraph()->isolate(), Builtins::kToNumber); Builtins::CallableFor(jsgraph()->isolate(), Builtins::kToNumber);
CallDescriptor::Flags flags = CallDescriptor::kNoFlags; CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = auto call_descriptor = Linkage::GetStubCallDescriptor(
Linkage::GetStubCallDescriptor(graph()->zone(), callable.descriptor(), graph()->zone(), callable.descriptor(),
0, flags, Operator::kEliminatable); callable.descriptor().GetStackParameterCount(), flags,
Operator::kEliminatable);
to_number_operator_.set(common()->Call(call_descriptor)); to_number_operator_.set(common()->Call(call_descriptor));
} }
return to_number_operator_.get(); return to_number_operator_.get();
......
...@@ -865,7 +865,8 @@ Reduction JSCallReducer::ReduceReflectGet(Node* node) { ...@@ -865,7 +865,8 @@ Reduction JSCallReducer::ReduceReflectGet(Node* node) {
Callable callable = Callable callable =
Builtins::CallableFor(isolate(), Builtins::kGetProperty); Builtins::CallableFor(isolate(), Builtins::kGetProperty);
auto call_descriptor = Linkage::GetStubCallDescriptor( auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNeedsFrameState, Operator::kNoProperties); CallDescriptor::kNeedsFrameState, Operator::kNoProperties);
Node* stub_code = jsgraph()->HeapConstant(callable.code()); Node* stub_code = jsgraph()->HeapConstant(callable.code());
vtrue = etrue = if_true = vtrue = etrue = if_true =
...@@ -2585,7 +2586,8 @@ Reduction JSCallReducer::ReduceArrayIndexOfIncludes( ...@@ -2585,7 +2586,8 @@ Reduction JSCallReducer::ReduceArrayIndexOfIncludes(
: GetCallableForArrayIncludes(receiver_map->elements_kind(), : GetCallableForArrayIncludes(receiver_map->elements_kind(),
isolate()); isolate());
CallDescriptor const* const desc = Linkage::GetStubCallDescriptor( CallDescriptor const* const desc = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags, graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
Operator::kEliminatable); Operator::kEliminatable);
// The stub expects the following arguments: the receiver array, its elements, // The stub expects the following arguments: the receiver array, its elements,
// the search_element, the array length, and the index to start searching // the search_element, the array length, and the index to start searching
...@@ -4835,7 +4837,8 @@ Reduction JSCallReducer::ReduceArrayPrototypeSlice(Node* node) { ...@@ -4835,7 +4837,8 @@ Reduction JSCallReducer::ReduceArrayPrototypeSlice(Node* node) {
Callable callable = Callable callable =
Builtins::CallableFor(isolate(), Builtins::kCloneFastJSArray); Builtins::CallableFor(isolate(), Builtins::kCloneFastJSArray);
auto call_descriptor = Linkage::GetStubCallDescriptor( auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags, graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
Operator::kNoThrow | Operator::kNoDeopt); Operator::kNoThrow | Operator::kNoDeopt);
// Calls to Builtins::kCloneFastJSArray produce COW arrays // Calls to Builtins::kCloneFastJSArray produce COW arrays
...@@ -6493,8 +6496,9 @@ Reduction JSCallReducer::ReduceCollectionIteratorPrototypeNext( ...@@ -6493,8 +6496,9 @@ Reduction JSCallReducer::ReduceCollectionIteratorPrototypeNext(
Callable const callable = Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kOrderedHashTableHealIndex); Builtins::CallableFor(isolate(), Builtins::kOrderedHashTableHealIndex);
auto call_descriptor = Linkage::GetStubCallDescriptor( auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags, graph()->zone(), callable.descriptor(),
Operator::kEliminatable); callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNoFlags, Operator::kEliminatable);
index = effect = index = effect =
graph()->NewNode(common()->Call(call_descriptor), graph()->NewNode(common()->Call(call_descriptor),
jsgraph()->HeapConstant(callable.code()), table, index, jsgraph()->HeapConstant(callable.code()), table, index,
......
...@@ -650,7 +650,8 @@ Reduction JSTypedLowering::ReduceJSAdd(Node* node) { ...@@ -650,7 +650,8 @@ Reduction JSTypedLowering::ReduceJSAdd(Node* node) {
Callable const callable = Callable const callable =
CodeFactory::StringAdd(isolate(), flags, NOT_TENURED); CodeFactory::StringAdd(isolate(), flags, NOT_TENURED);
auto call_descriptor = Linkage::GetStubCallDescriptor( auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNeedsFrameState, properties); CallDescriptor::kNeedsFrameState, properties);
DCHECK_EQ(1, OperatorProperties::GetFrameStateInputCount(node->op())); DCHECK_EQ(1, OperatorProperties::GetFrameStateInputCount(node->op()));
node->InsertInput(graph()->zone(), 0, node->InsertInput(graph()->zone(), 0,
...@@ -1079,7 +1080,8 @@ Reduction JSTypedLowering::ReduceJSToObject(Node* node) { ...@@ -1079,7 +1080,8 @@ Reduction JSTypedLowering::ReduceJSToObject(Node* node) {
// Convert {receiver} using the ToObjectStub. // Convert {receiver} using the ToObjectStub.
Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject); Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
auto call_descriptor = Linkage::GetStubCallDescriptor( auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNeedsFrameState, node->op()->properties()); CallDescriptor::kNeedsFrameState, node->op()->properties());
rfalse = efalse = if_false = rfalse = efalse = if_false =
graph()->NewNode(common()->Call(call_descriptor), graph()->NewNode(common()->Call(call_descriptor),
...@@ -1802,7 +1804,8 @@ Reduction JSTypedLowering::ReduceJSForInNext(Node* node) { ...@@ -1802,7 +1804,8 @@ Reduction JSTypedLowering::ReduceJSForInNext(Node* node) {
Callable const callable = Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kForInFilter); Builtins::CallableFor(isolate(), Builtins::kForInFilter);
auto call_descriptor = Linkage::GetStubCallDescriptor( auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(), 0, graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
CallDescriptor::kNeedsFrameState); CallDescriptor::kNeedsFrameState);
vfalse = efalse = if_false = vfalse = efalse = if_false =
graph()->NewNode(common()->Call(call_descriptor), graph()->NewNode(common()->Call(call_descriptor),
......
...@@ -339,6 +339,11 @@ CallDescriptor* Linkage::GetJSCallDescriptor(Zone* zone, bool is_osr, ...@@ -339,6 +339,11 @@ CallDescriptor* Linkage::GetJSCallDescriptor(Zone* zone, bool is_osr,
} }
// TODO(turbofan): cache call descriptors for code stub calls. // TODO(turbofan): cache call descriptors for code stub calls.
// TODO(jgruber): Clean up stack parameter count handling. The descriptor
// already knows the formal stack parameter count and ideally only additional
// stack parameters should be passed into this method. All call-sites should
// be audited for correctness (e.g. many used to assume a stack parameter count
// of 0).
CallDescriptor* Linkage::GetStubCallDescriptor( CallDescriptor* Linkage::GetStubCallDescriptor(
Zone* zone, const CallInterfaceDescriptor& descriptor, Zone* zone, const CallInterfaceDescriptor& descriptor,
int stack_parameter_count, CallDescriptor::Flags flags, int stack_parameter_count, CallDescriptor::Flags flags,
...@@ -350,6 +355,8 @@ CallDescriptor* Linkage::GetStubCallDescriptor( ...@@ -350,6 +355,8 @@ CallDescriptor* Linkage::GetStubCallDescriptor(
const size_t parameter_count = const size_t parameter_count =
static_cast<size_t>(js_parameter_count + context_count); static_cast<size_t>(js_parameter_count + context_count);
DCHECK_GE(stack_parameter_count, descriptor.GetStackParameterCount());
size_t return_count = descriptor.GetReturnCount(); size_t return_count = descriptor.GetReturnCount();
LocationSignature::Builder locations(zone, return_count, parameter_count); LocationSignature::Builder locations(zone, return_count, parameter_count);
......
...@@ -238,8 +238,9 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node, ...@@ -238,8 +238,9 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
: __ : __
AllocateInOldSpaceStubConstant(); AllocateInOldSpaceStubConstant();
if (!allocate_operator_.is_set()) { if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor( auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), AllocateDescriptor{}, 0, graph()->zone(), descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kCanUseRoots, Operator::kNoThrow); CallDescriptor::kCanUseRoots, Operator::kNoThrow);
allocate_operator_.set(common()->Call(call_descriptor)); allocate_operator_.set(common()->Call(call_descriptor));
} }
...@@ -294,8 +295,9 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node, ...@@ -294,8 +295,9 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
: __ : __
AllocateInOldSpaceStubConstant(); AllocateInOldSpaceStubConstant();
if (!allocate_operator_.is_set()) { if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor( auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), AllocateDescriptor{}, 0, graph()->zone(), descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kCanUseRoots, Operator::kNoThrow); CallDescriptor::kCanUseRoots, Operator::kNoThrow);
allocate_operator_.set(common()->Call(call_descriptor)); allocate_operator_.set(common()->Call(call_descriptor));
} }
......
...@@ -2443,12 +2443,7 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) { ...@@ -2443,12 +2443,7 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) {
// due to register pressure when kRootRegister is not allocatable. Either // due to register pressure when kRootRegister is not allocatable. Either
// refactor these builtins or fix register allocation in these cases. // refactor these builtins or fix register allocation in these cases.
} else if (Builtins::IsBuiltinId(data->info()->builtin_index()) && } else if (Builtins::IsBuiltinId(data->info()->builtin_index()) &&
data->info()->builtin_index() != Builtins::kWasmArgumentsAdaptor && data->info()->builtin_index() != Builtins::kWasmArgumentsAdaptor) {
data->info()->builtin_index() != Builtins::kCopyFromTempArray &&
data->info()->builtin_index() != Builtins::kCopyWithinSortArray &&
data->info()->builtin_index() != Builtins::kBinaryInsertionSort &&
data->info()->builtin_index() != Builtins::kMergeAt &&
data->info()->builtin_index() != Builtins::kArrayTimSort) {
// TODO(v8:6666): Extend support to user code. Ensure that // TODO(v8:6666): Extend support to user code. Ensure that
// it is mutually exclusive with the Poisoning configuration above; and that // it is mutually exclusive with the Poisoning configuration above; and that
// it cooperates with restricted allocatable registers above. // it cooperates with restricted allocatable registers above.
......
...@@ -3972,9 +3972,10 @@ Operator const* SimplifiedLowering::ToNumberOperator() { ...@@ -3972,9 +3972,10 @@ Operator const* SimplifiedLowering::ToNumberOperator() {
if (!to_number_operator_.is_set()) { if (!to_number_operator_.is_set()) {
Callable callable = Builtins::CallableFor(isolate(), Builtins::kToNumber); Callable callable = Builtins::CallableFor(isolate(), Builtins::kToNumber);
CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState; CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState;
auto call_descriptor = auto call_descriptor = Linkage::GetStubCallDescriptor(
Linkage::GetStubCallDescriptor(graph()->zone(), callable.descriptor(), graph()->zone(), callable.descriptor(),
0, flags, Operator::kNoProperties); callable.descriptor().GetStackParameterCount(), flags,
Operator::kNoProperties);
to_number_operator_.set(common()->Call(call_descriptor)); to_number_operator_.set(common()->Call(call_descriptor));
} }
return to_number_operator_.get(); return to_number_operator_.get();
...@@ -3985,9 +3986,10 @@ Operator const* SimplifiedLowering::ToNumberConvertBigIntOperator() { ...@@ -3985,9 +3986,10 @@ Operator const* SimplifiedLowering::ToNumberConvertBigIntOperator() {
Callable callable = Callable callable =
Builtins::CallableFor(isolate(), Builtins::kToNumberConvertBigInt); Builtins::CallableFor(isolate(), Builtins::kToNumberConvertBigInt);
CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState; CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState;
auto call_descriptor = auto call_descriptor = Linkage::GetStubCallDescriptor(
Linkage::GetStubCallDescriptor(graph()->zone(), callable.descriptor(), graph()->zone(), callable.descriptor(),
0, flags, Operator::kNoProperties); callable.descriptor().GetStackParameterCount(), flags,
Operator::kNoProperties);
to_number_convert_big_int_operator_.set(common()->Call(call_descriptor)); to_number_convert_big_int_operator_.set(common()->Call(call_descriptor));
} }
return to_number_convert_big_int_operator_.get(); return to_number_convert_big_int_operator_.get();
...@@ -3997,9 +3999,10 @@ Operator const* SimplifiedLowering::ToNumericOperator() { ...@@ -3997,9 +3999,10 @@ Operator const* SimplifiedLowering::ToNumericOperator() {
if (!to_numeric_operator_.is_set()) { if (!to_numeric_operator_.is_set()) {
Callable callable = Builtins::CallableFor(isolate(), Builtins::kToNumeric); Callable callable = Builtins::CallableFor(isolate(), Builtins::kToNumeric);
CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState; CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState;
auto call_descriptor = auto call_descriptor = Linkage::GetStubCallDescriptor(
Linkage::GetStubCallDescriptor(graph()->zone(), callable.descriptor(), graph()->zone(), callable.descriptor(),
0, flags, Operator::kNoProperties); callable.descriptor().GetStackParameterCount(), flags,
Operator::kNoProperties);
to_numeric_operator_.set(common()->Call(call_descriptor)); to_numeric_operator_.set(common()->Call(call_descriptor));
} }
return to_numeric_operator_.get(); return to_numeric_operator_.get();
......
...@@ -307,17 +307,31 @@ class V8_EXPORT_PRIVATE CallInterfaceDescriptor { ...@@ -307,17 +307,31 @@ class V8_EXPORT_PRIVATE CallInterfaceDescriptor {
static inline CallDescriptors::Key key(); static inline CallDescriptors::Key key();
#if defined(V8_TARGET_ARCH_IA32) #if defined(V8_TARGET_ARCH_IA32)
// To support all possible cases, we must limit the number of register args for
// TFS builtins on ia32 to 3. Out of the 6 allocatable registers, esi is taken
// as the context register and ebx is the root register. One register must
// remain available to store the jump/call target. Thus 3 registers remain for
// arguments. The reason this applies to TFS builtins specifically is because
// this becomes relevant for builtins used as targets of Torque function
// pointers (which must have a register available to store the target).
// TODO(jgruber): Ideally we should just decrement kMaxBuiltinRegisterParams but
// that comes with its own set of complications. It's possible, but requires
// refactoring the calling convention of other existing stubs.
constexpr int kMaxBuiltinRegisterParams = 4; constexpr int kMaxBuiltinRegisterParams = 4;
constexpr int kMaxTFSBuiltinRegisterParams = 3;
#else #else
constexpr int kMaxBuiltinRegisterParams = 5; constexpr int kMaxBuiltinRegisterParams = 5;
constexpr int kMaxTFSBuiltinRegisterParams = kMaxBuiltinRegisterParams;
#endif #endif
STATIC_ASSERT(kMaxTFSBuiltinRegisterParams <= kMaxBuiltinRegisterParams);
#define DECLARE_DEFAULT_DESCRIPTOR(name, base) \ #define DECLARE_DEFAULT_DESCRIPTOR(name, base) \
DECLARE_DESCRIPTOR_WITH_BASE(name, base) \ DECLARE_DESCRIPTOR_WITH_BASE(name, base) \
protected: \ protected: \
static const int kRegisterParams = \ static const int kRegisterParams = \
kParameterCount > kMaxBuiltinRegisterParams ? kMaxBuiltinRegisterParams \ kParameterCount > kMaxTFSBuiltinRegisterParams \
: kParameterCount; \ ? kMaxTFSBuiltinRegisterParams \
: kParameterCount; \
static const int kStackParams = kParameterCount - kRegisterParams; \ static const int kStackParams = kParameterCount - kRegisterParams; \
void InitializePlatformSpecific(CallInterfaceDescriptorData* data) \ void InitializePlatformSpecific(CallInterfaceDescriptorData* data) \
override { \ override { \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment