Commit 02a891a8 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[csa][builtins] Port AllocateIn[New,Old]Space builtins to CSA.

Bug: v8:5269
Change-Id: I78678aee42b2ae930b995cd194b4d20516e0d229
Reviewed-on: https://chromium-review.googlesource.com/1098929
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53730}
parent 2dda64aa
...@@ -2150,31 +2150,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { ...@@ -2150,31 +2150,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
} }
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r1 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(r1);
__ Push(r1);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r1 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(r1);
__ Move(r2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(r1, r2);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
// static // static
void Builtins::Generate_Abort(MacroAssembler* masm) { void Builtins::Generate_Abort(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
......
...@@ -2540,33 +2540,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { ...@@ -2540,33 +2540,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
} }
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
// ----------- S t a t e -------------
// -- x1 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(x1);
__ PushArgument(x1);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_AllocateInOldSpace");
// ----------- S t a t e -------------
// -- x1 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(x1);
__ Move(x2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(x1, x2);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
// static // static
void Builtins::Generate_Abort(MacroAssembler* masm) { void Builtins::Generate_Abort(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_Abort"); ASM_LOCATION("Builtins::Generate_Abort");
......
...@@ -159,8 +159,8 @@ namespace internal { ...@@ -159,8 +159,8 @@ namespace internal {
API(HandleApiCallAsConstructor) \ API(HandleApiCallAsConstructor) \
\ \
/* Adapters for Turbofan into runtime */ \ /* Adapters for Turbofan into runtime */ \
ASM(AllocateInNewSpace) \ TFC(AllocateInNewSpace, Allocate, 1) \
ASM(AllocateInOldSpace) \ TFC(AllocateInOldSpace, Allocate, 1) \
\ \
/* TurboFan support builtins */ \ /* TurboFan support builtins */ \
TFS(CopyFastSmiOrObjectElements, kObject) \ TFS(CopyFastSmiOrObjectElements, kObject) \
......
...@@ -1088,6 +1088,23 @@ TF_BUILTIN(RunMicrotasks, InternalBuiltinsAssembler) { ...@@ -1088,6 +1088,23 @@ TF_BUILTIN(RunMicrotasks, InternalBuiltinsAssembler) {
} }
} }
TF_BUILTIN(AllocateInNewSpace, CodeStubAssembler) {
TNode<Int32T> requested_size =
UncheckedCast<Int32T>(Parameter(Descriptor::kRequestedSize));
TailCallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
SmiFromInt32(requested_size));
}
TF_BUILTIN(AllocateInOldSpace, CodeStubAssembler) {
TNode<Int32T> requested_size =
UncheckedCast<Int32T>(Parameter(Descriptor::kRequestedSize));
int flags = AllocateTargetSpace::encode(OLD_SPACE);
TailCallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
SmiFromInt32(requested_size), SmiConstant(flags));
}
TF_BUILTIN(AbortJS, CodeStubAssembler) { TF_BUILTIN(AbortJS, CodeStubAssembler) {
Node* message = Parameter(Descriptor::kObject); Node* message = Parameter(Descriptor::kObject);
Node* reason = SmiConstant(0); Node* reason = SmiConstant(0);
......
...@@ -2294,35 +2294,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { ...@@ -2294,35 +2294,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
} }
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- edx : requested object size (untagged)
// -- esp[0] : return address
// -----------------------------------
__ SmiTag(edx);
__ PopReturnAddressTo(ecx);
__ Push(edx);
__ PushReturnAddressFrom(ecx);
__ Move(esi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- edx : requested object size (untagged)
// -- esp[0] : return address
// -----------------------------------
__ SmiTag(edx);
__ PopReturnAddressTo(ecx);
__ Push(edx);
__ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ PushReturnAddressFrom(ecx);
__ Move(esi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
// static // static
void Builtins::Generate_Abort(MacroAssembler* masm) { void Builtins::Generate_Abort(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
......
...@@ -2204,31 +2204,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { ...@@ -2204,31 +2204,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
} }
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : requested object size (untagged)
// -- ra : return address
// -----------------------------------
__ SmiTag(a0);
__ Push(a0);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : requested object size (untagged)
// -- ra : return address
// -----------------------------------
__ SmiTag(a0);
__ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(a0, a1);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
// static // static
void Builtins::Generate_Abort(MacroAssembler* masm) { void Builtins::Generate_Abort(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
......
...@@ -2219,31 +2219,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { ...@@ -2219,31 +2219,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
} }
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : requested object size (untagged)
// -- ra : return address
// -----------------------------------
__ SmiTag(a0);
__ Push(a0);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : requested object size (untagged)
// -- ra : return address
// -----------------------------------
__ SmiTag(a0);
__ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(a0, a1);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
// static // static
void Builtins::Generate_Abort(MacroAssembler* masm) { void Builtins::Generate_Abort(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
......
...@@ -2223,31 +2223,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { ...@@ -2223,31 +2223,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
} }
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r4 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(r4);
__ Push(r4);
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r4 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(r4);
__ LoadSmiLiteral(r5, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(r4, r5);
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
// static // static
void Builtins::Generate_Abort(MacroAssembler* masm) { void Builtins::Generate_Abort(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
......
...@@ -2229,31 +2229,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { ...@@ -2229,31 +2229,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
} }
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r3 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(r3);
__ Push(r3);
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r3 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(r3);
__ LoadSmiLiteral(r4, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(r3, r4);
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
// static // static
void Builtins::Generate_Abort(MacroAssembler* masm) { void Builtins::Generate_Abort(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
......
...@@ -1732,35 +1732,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { ...@@ -1732,35 +1732,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
__ PushReturnAddressFrom(rcx); __ PushReturnAddressFrom(rcx);
} }
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rdx : requested object size (untagged)
// -- rsp[0] : return address
// -----------------------------------
__ SmiTag(rdx, rdx);
__ PopReturnAddressTo(rcx);
__ Push(rdx);
__ PushReturnAddressFrom(rcx);
__ Move(rsi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rdx : requested object size (untagged)
// -- rsp[0] : return address
// -----------------------------------
__ SmiTag(rdx, rdx);
__ PopReturnAddressTo(rcx);
__ Push(rdx);
__ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ PushReturnAddressFrom(rcx);
__ Move(rsi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
// static // static
void Builtins::Generate_Abort(MacroAssembler* masm) { void Builtins::Generate_Abort(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
......
...@@ -405,29 +405,19 @@ CallDescriptor* Linkage::GetStubCallDescriptor( ...@@ -405,29 +405,19 @@ CallDescriptor* Linkage::GetStubCallDescriptor(
descriptor.DebugName(isolate), descriptor.allocatable_registers()); descriptor.DebugName(isolate), descriptor.allocatable_registers());
} }
// TODO(ishell): remove this once CallInterfaceDescriptors support
// Linkage::kNoContext and Operator::kNoThrow flags.
// static // static
CallDescriptor* Linkage::GetAllocateCallDescriptor(Zone* zone) { CallDescriptor* Linkage::GetAllocateCallDescriptor(Isolate* isolate,
LocationSignature::Builder locations(zone, 1, 1); Zone* zone) {
return GetStubCallDescriptor(isolate, zone,
locations.AddParam(regloc(kAllocateSizeRegister, MachineType::Int32())); AllocateDescriptor(isolate), // descriptor
0, // stack_parameter_count
locations.AddReturn(regloc(kReturnRegister0, MachineType::AnyTagged())); CallDescriptor::kCanUseRoots, // flags
Operator::kNoThrow, // properties
// The target for allocate calls is a code object. MachineType::AnyTagged(), // return_type
MachineType target_type = MachineType::AnyTagged(); 1, // return_count
LinkageLocation target_loc = Linkage::kNoContext); // context_spec
LinkageLocation::ForAnyRegister(MachineType::AnyTagged());
return new (zone) CallDescriptor( // --
CallDescriptor::kCallCodeObject, // kind
target_type, // target MachineType
target_loc, // target location
locations.Build(), // location_sig
0, // stack_parameter_count
Operator::kNoThrow, // properties
kNoCalleeSaved, // callee-saved registers
kNoCalleeSaved, // callee-saved fp
CallDescriptor::kCanUseRoots, // flags
"Allocate");
} }
// static // static
......
...@@ -397,7 +397,8 @@ class V8_EXPORT_PRIVATE Linkage : public NON_EXPORTED_BASE(ZoneObject) { ...@@ -397,7 +397,8 @@ class V8_EXPORT_PRIVATE Linkage : public NON_EXPORTED_BASE(ZoneObject) {
size_t return_count = 1, size_t return_count = 1,
ContextSpecification context_spec = kPassContext); ContextSpecification context_spec = kPassContext);
static CallDescriptor* GetAllocateCallDescriptor(Zone* zone); static CallDescriptor* GetAllocateCallDescriptor(Isolate* isolate,
Zone* zone);
static CallDescriptor* GetBytecodeDispatchCallDescriptor( static CallDescriptor* GetBytecodeDispatchCallDescriptor(
Isolate* isolate, Zone* zone, const CallInterfaceDescriptor& descriptor, Isolate* isolate, Zone* zone, const CallInterfaceDescriptor& descriptor,
int stack_parameter_count); int stack_parameter_count);
......
...@@ -237,7 +237,7 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node, ...@@ -237,7 +237,7 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
AllocateInOldSpaceStubConstant(); AllocateInOldSpaceStubConstant();
if (!allocate_operator_.is_set()) { if (!allocate_operator_.is_set()) {
auto call_descriptor = auto call_descriptor =
Linkage::GetAllocateCallDescriptor(graph()->zone()); Linkage::GetAllocateCallDescriptor(isolate(), graph()->zone());
allocate_operator_.set(common()->Call(call_descriptor)); allocate_operator_.set(common()->Call(call_descriptor));
} }
Node* vfalse = __ Call(allocate_operator_.get(), target, size); Node* vfalse = __ Call(allocate_operator_.get(), target, size);
...@@ -292,7 +292,7 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node, ...@@ -292,7 +292,7 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
AllocateInOldSpaceStubConstant(); AllocateInOldSpaceStubConstant();
if (!allocate_operator_.is_set()) { if (!allocate_operator_.is_set()) {
auto call_descriptor = auto call_descriptor =
Linkage::GetAllocateCallDescriptor(graph()->zone()); Linkage::GetAllocateCallDescriptor(isolate(), graph()->zone());
allocate_operator_.set(common()->Call(call_descriptor)); allocate_operator_.set(common()->Call(call_descriptor));
} }
__ Goto(&done, __ Call(allocate_operator_.get(), target, size)); __ Goto(&done, __ Call(allocate_operator_.get(), target, size));
......
...@@ -60,6 +60,19 @@ void VoidDescriptor::InitializePlatformSpecific( ...@@ -60,6 +60,19 @@ void VoidDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(0, nullptr); data->InitializePlatformSpecific(0, nullptr);
} }
void AllocateDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {kAllocateSizeRegister};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void AllocateDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) {
MachineType machine_types[] = {MachineType::Int32()};
data->InitializePlatformIndependent(arraysize(machine_types), 0,
machine_types);
}
void FastNewFunctionContextDescriptor::InitializePlatformIndependent( void FastNewFunctionContextDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
MachineType machine_types[] = {MachineType::AnyTagged(), MachineType machine_types[] = {MachineType::AnyTagged(),
......
...@@ -18,6 +18,7 @@ namespace internal { ...@@ -18,6 +18,7 @@ namespace internal {
class PlatformInterfaceDescriptor; class PlatformInterfaceDescriptor;
#define INTERFACE_DESCRIPTOR_LIST(V) \ #define INTERFACE_DESCRIPTOR_LIST(V) \
V(Allocate) \
V(Void) \ V(Void) \
V(ContextOnly) \ V(ContextOnly) \
V(Load) \ V(Load) \
...@@ -333,6 +334,14 @@ class V8_EXPORT_PRIVATE VoidDescriptor : public CallInterfaceDescriptor { ...@@ -333,6 +334,14 @@ class V8_EXPORT_PRIVATE VoidDescriptor : public CallInterfaceDescriptor {
DECLARE_DESCRIPTOR(VoidDescriptor, CallInterfaceDescriptor) DECLARE_DESCRIPTOR(VoidDescriptor, CallInterfaceDescriptor)
}; };
class AllocateDescriptor : public CallInterfaceDescriptor {
public:
// No context parameter
enum ParameterIndices { kRequestedSize, kParameterCount };
DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(AllocateDescriptor,
CallInterfaceDescriptor)
};
class ContextOnlyDescriptor : public CallInterfaceDescriptor { class ContextOnlyDescriptor : public CallInterfaceDescriptor {
public: public:
DECLARE_DESCRIPTOR(ContextOnlyDescriptor, CallInterfaceDescriptor) DECLARE_DESCRIPTOR(ContextOnlyDescriptor, CallInterfaceDescriptor)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment