Commit 96e40682 authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

Change the allocator interface from Space to Generation.

Bug: v8:8945
Change-Id: I0e1b0d6751efdb468e603df21af4d36972b8b90b
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1505455
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Reviewed-by: 's avatarYang Guo <yangguo@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60090}
parent 99676c0a
......@@ -172,8 +172,8 @@ namespace internal {
API(HandleApiCallAsConstructor) \
\
/* Adapters for Turbofan into runtime */ \
TFC(AllocateInNewSpace, Allocate) \
TFC(AllocateInOldSpace, Allocate) \
TFC(AllocateInYoungGeneration, Allocate) \
TFC(AllocateInOldGeneration, Allocate) \
\
/* TurboFan support builtins */ \
TFS(CopyFastSmiOrObjectElements, kObject) \
......
......@@ -680,21 +680,20 @@ TF_BUILTIN(AdaptorWithBuiltinExitFrame, InternalBuiltinsAssembler) {
GenerateAdaptorWithExitFrameType<Descriptor>(Builtins::BUILTIN_EXIT);
}
TF_BUILTIN(AllocateInNewSpace, CodeStubAssembler) {
TF_BUILTIN(AllocateInYoungGeneration, CodeStubAssembler) {
TNode<IntPtrT> requested_size =
UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
TailCallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
SmiFromIntPtr(requested_size));
}
TF_BUILTIN(AllocateInOldSpace, CodeStubAssembler) {
TF_BUILTIN(AllocateInOldGeneration, CodeStubAssembler) {
TNode<IntPtrT> requested_size =
UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
int flags = AllocateTargetSpace::encode(OLD_SPACE);
TailCallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
SmiFromIntPtr(requested_size), SmiConstant(flags));
TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
SmiFromIntPtr(requested_size), SmiConstant(0));
}
TF_BUILTIN(Abort, CodeStubAssembler) {
......
......@@ -1108,11 +1108,16 @@ TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
Label next(this);
GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
TNode<Smi> runtime_flags = SmiConstant(
Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
result = CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
SmiTag(size_in_bytes), runtime_flags);
if (FLAG_young_generation_large_objects) {
result = CallRuntime(Runtime::kAllocateInYoungGeneration,
NoContextConstant(), SmiTag(size_in_bytes));
} else {
TNode<Smi> alignment_flag = SmiConstant(Smi::FromInt(
AllocateDoubleAlignFlag::encode(needs_double_alignment)));
result =
CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
SmiTag(size_in_bytes), alignment_flag);
}
Goto(&out);
BIND(&next);
......@@ -1140,13 +1145,13 @@ TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
{
if (flags & kPretenured) {
TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
AllocateDoubleAlignFlag::encode(needs_double_alignment) |
AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
result = CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
SmiTag(size_in_bytes), runtime_flags);
AllocateDoubleAlignFlag::encode(needs_double_alignment)));
result =
CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
SmiTag(size_in_bytes), runtime_flags);
} else {
result = CallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
SmiTag(size_in_bytes));
result = CallRuntime(Runtime::kAllocateInYoungGeneration,
NoContextConstant(), SmiTag(size_in_bytes));
}
Goto(&out);
}
......
......@@ -102,8 +102,8 @@ namespace compiler {
V(FixedArrayMapConstant) \
V(FixedDoubleArrayMapConstant) \
V(ToNumberBuiltinConstant) \
V(AllocateInNewSpaceStubConstant) \
V(AllocateInOldSpaceStubConstant)
V(AllocateInYoungGenerationStubConstant) \
V(AllocateInOldGenerationStubConstant)
class GraphAssembler;
......
......@@ -125,11 +125,11 @@ void JSGraph::GetCachedNodes(NodeVector* nodes) {
#undef DO_CACHED_FIELD
}
DEFINE_GETTER(AllocateInNewSpaceStubConstant,
HeapConstant(BUILTIN_CODE(isolate(), AllocateInNewSpace)))
DEFINE_GETTER(AllocateInYoungGenerationStubConstant,
HeapConstant(BUILTIN_CODE(isolate(), AllocateInYoungGeneration)))
DEFINE_GETTER(AllocateInOldSpaceStubConstant,
HeapConstant(BUILTIN_CODE(isolate(), AllocateInOldSpace)))
DEFINE_GETTER(AllocateInOldGenerationStubConstant,
HeapConstant(BUILTIN_CODE(isolate(), AllocateInOldGeneration)))
DEFINE_GETTER(ArrayConstructorStubConstant,
HeapConstant(BUILTIN_CODE(isolate(), ArrayConstructorImpl)))
......
......@@ -80,31 +80,31 @@ class V8_EXPORT_PRIVATE JSGraph : public MachineGraph {
void GetCachedNodes(NodeVector* nodes);
// Cached global nodes.
#define CACHED_GLOBAL_LIST(V) \
V(AllocateInNewSpaceStubConstant) \
V(AllocateInOldSpaceStubConstant) \
V(ArrayConstructorStubConstant) \
V(BigIntMapConstant) \
V(BooleanMapConstant) \
V(ToNumberBuiltinConstant) \
V(EmptyFixedArrayConstant) \
V(EmptyStringConstant) \
V(FixedArrayMapConstant) \
V(PropertyArrayMapConstant) \
V(FixedDoubleArrayMapConstant) \
V(HeapNumberMapConstant) \
V(OptimizedOutConstant) \
V(StaleRegisterConstant) \
V(UndefinedConstant) \
V(TheHoleConstant) \
V(TrueConstant) \
V(FalseConstant) \
V(NullConstant) \
V(ZeroConstant) \
V(OneConstant) \
V(NaNConstant) \
V(MinusOneConstant) \
V(EmptyStateValues) \
#define CACHED_GLOBAL_LIST(V) \
V(AllocateInYoungGenerationStubConstant) \
V(AllocateInOldGenerationStubConstant) \
V(ArrayConstructorStubConstant) \
V(BigIntMapConstant) \
V(BooleanMapConstant) \
V(ToNumberBuiltinConstant) \
V(EmptyFixedArrayConstant) \
V(EmptyStringConstant) \
V(FixedArrayMapConstant) \
V(PropertyArrayMapConstant) \
V(FixedDoubleArrayMapConstant) \
V(HeapNumberMapConstant) \
V(OptimizedOutConstant) \
V(StaleRegisterConstant) \
V(UndefinedConstant) \
V(TheHoleConstant) \
V(TrueConstant) \
V(FalseConstant) \
V(NullConstant) \
V(ZeroConstant) \
V(OneConstant) \
V(NaNConstant) \
V(MinusOneConstant) \
V(EmptyStateValues) \
V(SingleDeadTypedStateValues)
// Cached global node accessor methods.
......
......@@ -1657,8 +1657,8 @@ void JSHeapBroker::SerializeShareableObjects() {
Builtins* const b = isolate()->builtins();
{
Builtins::Name builtins[] = {
Builtins::kAllocateInNewSpace,
Builtins::kAllocateInOldSpace,
Builtins::kAllocateInYoungGeneration,
Builtins::kAllocateInOldGeneration,
Builtins::kArgumentsAdaptorTrampoline,
Builtins::kArrayConstructorImpl,
Builtins::kCallFunctionForwardVarargs,
......
......@@ -175,7 +175,7 @@ bool Linkage::NeedsFrameStateInput(Runtime::FunctionId function) {
// not to call into arbitrary JavaScript, not to throw, and not to lazily
// deoptimize are whitelisted here and can be called without a FrameState.
case Runtime::kAbort:
case Runtime::kAllocateInTargetSpace:
case Runtime::kAllocateInOldGeneration:
case Runtime::kCreateIterResultObject:
case Runtime::kIncBlockCounter:
case Runtime::kIsFunction:
......
......@@ -274,10 +274,11 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
__ Bind(&call_runtime);
{
Node* target =
pretenure == NOT_TENURED ? __ AllocateInNewSpaceStubConstant()
: __
AllocateInOldSpaceStubConstant();
Node* target = pretenure == NOT_TENURED
? __
AllocateInYoungGenerationStubConstant()
: __
AllocateInOldGenerationStubConstant();
if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor(
......@@ -332,9 +333,9 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
__ Bind(&call_runtime);
Node* target =
pretenure == NOT_TENURED ? __ AllocateInNewSpaceStubConstant()
pretenure == NOT_TENURED ? __ AllocateInYoungGenerationStubConstant()
: __
AllocateInOldSpaceStubConstant();
AllocateInOldGenerationStubConstant();
if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor(
......
......@@ -300,8 +300,8 @@ bool IntrinsicHasNoSideEffect(Runtime::FunctionId id) {
V(CreateObjectLiteralWithoutAllocationSite) \
V(CreateRegExpLiteral) \
/* Called from builtins */ \
V(AllocateInNewSpace) \
V(AllocateInTargetSpace) \
V(AllocateInYoungGeneration) \
V(AllocateInOldGeneration) \
V(AllocateSeqOneByteString) \
V(AllocateSeqTwoByteString) \
V(ArrayIncludes_Slow) \
......@@ -770,8 +770,8 @@ DebugInfo::SideEffectState BuiltinGetSideEffectState(Builtins::Name id) {
case Builtins::kRegExpConstructor:
// Internal.
case Builtins::kStrictPoisonPillThrower:
case Builtins::kAllocateInNewSpace:
case Builtins::kAllocateInOldSpace:
case Builtins::kAllocateInYoungGeneration:
case Builtins::kAllocateInOldGeneration:
return DebugInfo::kHasNoSideEffect;
// Set builtins.
......
......@@ -208,15 +208,10 @@ HeapObject Factory::New(Handle<Map> map, PretenureFlag pretenure) {
}
Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align,
AllocationSpace space) {
AllocationType type) {
AllocationAlignment alignment = double_align ? kDoubleAligned : kWordAligned;
Heap* heap = isolate()->heap();
HeapObject result = heap->AllocateRawWithRetryOrFail(
size, Heap::SelectType(space), alignment);
#ifdef DEBUG
MemoryChunk* chunk = MemoryChunk::FromHeapObject(result);
DCHECK(chunk->owner()->identity() == space);
#endif
HeapObject result = heap->AllocateRawWithRetryOrFail(size, type, alignment);
heap->CreateFillerObjectAt(result->address(), size, ClearRecordedSlots::kNo);
return Handle<HeapObject>(result, isolate());
}
......
......@@ -507,10 +507,10 @@ class V8_EXPORT_PRIVATE Factory {
Map InitializeMap(Map map, InstanceType type, int instance_size,
ElementsKind elements_kind, int inobject_properties);
// Allocate a block of memory in the given space (filled with a filler).
// Used as a fall-back for generated code when the space is full.
// Allocate a block of memory of the given AllocationType (filled with a
// filler). Used as a fall-back for generated code when the space is full.
Handle<HeapObject> NewFillerObject(int size, bool double_align,
AllocationSpace space);
AllocationType type);
Handle<JSObject> NewFunctionPrototype(Handle<JSFunction> function);
......
......@@ -276,17 +276,19 @@ RUNTIME_FUNCTION(Runtime_Interrupt) {
return isolate->stack_guard()->HandleInterrupts();
}
RUNTIME_FUNCTION(Runtime_AllocateInNewSpace) {
RUNTIME_FUNCTION(Runtime_AllocateInYoungGeneration) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_SMI_ARG_CHECKED(size, 0);
CHECK(IsAligned(size, kTaggedSize));
CHECK_GT(size, 0);
CHECK_LE(size, kMaxRegularHeapObjectSize);
return *isolate->factory()->NewFillerObject(size, false, NEW_SPACE);
CHECK(FLAG_young_generation_large_objects ||
size <= kMaxRegularHeapObjectSize);
return *isolate->factory()->NewFillerObject(size, false,
AllocationType::kYoung);
}
RUNTIME_FUNCTION(Runtime_AllocateInTargetSpace) {
RUNTIME_FUNCTION(Runtime_AllocateInOldGeneration) {
HandleScope scope(isolate);
DCHECK_EQ(2, args.length());
CONVERT_SMI_ARG_CHECKED(size, 0);
......@@ -294,12 +296,8 @@ RUNTIME_FUNCTION(Runtime_AllocateInTargetSpace) {
CHECK(IsAligned(size, kTaggedSize));
CHECK_GT(size, 0);
bool double_align = AllocateDoubleAlignFlag::decode(flags);
AllocationSpace space = AllocateTargetSpace::decode(flags);
CHECK(size <= kMaxRegularHeapObjectSize || space == LO_SPACE);
if (FLAG_young_generation_large_objects && space == LO_SPACE) {
space = NEW_LO_SPACE;
}
return *isolate->factory()->NewFillerObject(size, double_align, space);
return *isolate->factory()->NewFillerObject(size, double_align,
AllocationType::kOld);
}
RUNTIME_FUNCTION(Runtime_AllocateSeqOneByteString) {
......
......@@ -206,8 +206,8 @@ namespace internal {
#define FOR_EACH_INTRINSIC_INTERNAL(F, I) \
F(AccessCheck, 1, 1) \
F(AllocateInNewSpace, 1, 1) \
F(AllocateInTargetSpace, 2, 1) \
F(AllocateInYoungGeneration, 1, 1) \
F(AllocateInOldGeneration, 2, 1) \
F(AllocateSeqOneByteString, 1, 1) \
F(AllocateSeqTwoByteString, 1, 1) \
F(AllowDynamicFunction, 1, 1) \
......@@ -762,7 +762,6 @@ V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, Runtime::FunctionId);
// Constants used by interface to runtime functions.
class AllocateDoubleAlignFlag : public BitField<bool, 0, 1> {};
class AllocateTargetSpace : public BitField<AllocationSpace, 1, 3> {};
class DeclareGlobalsEvalFlag : public BitField<bool, 0, 1> {};
class DeclareGlobalsNativeFlag : public BitField<bool, 1, 1> {};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment