Commit df060b03 authored by Camillo Bruni's avatar Camillo Bruni Committed by Commit Bot

[CSA] Adding AllocateInNewSpace helper

This makes the assumption about new-space allocation in the CSA more clear.
Additionally AllocateInNewSpace asserts that the allocation will fit in the
new-space in a debug build.

Change-Id: Ica5e7e12656dcdaa2c739b3d300fdcbaeb2355a2
Reviewed-on: https://chromium-review.googlesource.com/448043Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Reviewed-by: 's avatarSathya Gunasekaran <gsathya@chromium.org>
Commit-Queue: Camillo Bruni <cbruni@chromium.org>
Cr-Commit-Position: refs/heads/master@{#43557}
parent 386e5a11
...@@ -338,7 +338,7 @@ Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext( ...@@ -338,7 +338,7 @@ Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
Node* size = GetFixedArrayAllocationSize(length, FAST_ELEMENTS, mode); Node* size = GetFixedArrayAllocationSize(length, FAST_ELEMENTS, mode);
// Create a new closure from the given function info in new space // Create a new closure from the given function info in new space
Node* function_context = Allocate(size); Node* function_context = AllocateInNewSpace(size);
Heap::RootListIndex context_type; Heap::RootListIndex context_type;
switch (scope_type) { switch (scope_type) {
...@@ -690,7 +690,7 @@ Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject( ...@@ -690,7 +690,7 @@ Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject(
Node* size_in_words = WordShr(object_size, kPointerSizeLog2); Node* size_in_words = WordShr(object_size, kPointerSizeLog2);
GotoIfNot(WordEqual(instance_size, size_in_words), call_runtime); GotoIfNot(WordEqual(instance_size, size_in_words), call_runtime);
Node* copy = Allocate(allocation_size); Node* copy = AllocateInNewSpace(allocation_size);
// Copy boilerplate elements. // Copy boilerplate elements.
Variable offset(this, MachineType::PointerRepresentation()); Variable offset(this, MachineType::PointerRepresentation());
......
...@@ -794,6 +794,13 @@ Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes, ...@@ -794,6 +794,13 @@ Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
return address.value(); return address.value();
} }
Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes,
AllocationFlags flags) {
DCHECK(flags == kNone || flags == kDoubleAlignment);
CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
return Allocate(size_in_bytes, flags);
}
Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) { Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
Comment("Allocate"); Comment("Allocate");
bool const new_space = !(flags & kPretenured); bool const new_space = !(flags & kPretenured);
...@@ -822,6 +829,13 @@ Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) { ...@@ -822,6 +829,13 @@ Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address); return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address);
} }
Node* CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
AllocationFlags flags) {
CHECK(flags == kNone || flags == kDoubleAlignment);
DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
}
Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags); return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
} }
...@@ -1560,7 +1574,7 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, ...@@ -1560,7 +1574,7 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
Bind(&if_sizeissmall); Bind(&if_sizeissmall);
{ {
// Just allocate the SeqOneByteString in new space. // Just allocate the SeqOneByteString in new space.
Node* result = Allocate(size, flags); Node* result = AllocateInNewSpace(size, flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex)); DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex); StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset, StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
...@@ -1631,7 +1645,7 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, ...@@ -1631,7 +1645,7 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
Bind(&if_sizeissmall); Bind(&if_sizeissmall);
{ {
// Just allocate the SeqTwoByteString in new space. // Just allocate the SeqTwoByteString in new space.
Node* result = Allocate(size, flags); Node* result = AllocateInNewSpace(size, flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex)); DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex); StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
StoreObjectFieldNoWriteBarrier( StoreObjectFieldNoWriteBarrier(
...@@ -1853,7 +1867,7 @@ Node* CodeStubAssembler::AllocateNameDictionary(Node* at_least_space_for) { ...@@ -1853,7 +1867,7 @@ Node* CodeStubAssembler::AllocateNameDictionary(Node* at_least_space_for) {
IntPtrAdd(WordShl(length, IntPtrConstant(kPointerSizeLog2)), IntPtrAdd(WordShl(length, IntPtrConstant(kPointerSizeLog2)),
IntPtrConstant(NameDictionary::kHeaderSize)); IntPtrConstant(NameDictionary::kHeaderSize));
Node* result = Allocate(store_size); Node* result = AllocateInNewSpace(store_size);
Comment("Initialize NameDictionary"); Comment("Initialize NameDictionary");
// Initialize FixedArray fields. // Initialize FixedArray fields.
DCHECK(Heap::RootIsImmortalImmovable(Heap::kHashTableMapRootIndex)); DCHECK(Heap::RootIsImmortalImmovable(Heap::kHashTableMapRootIndex));
...@@ -1894,8 +1908,7 @@ Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties, ...@@ -1894,8 +1908,7 @@ Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties,
CSA_ASSERT(this, IsMap(map)); CSA_ASSERT(this, IsMap(map));
Node* size = Node* size =
IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize)); IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize));
CSA_ASSERT(this, IsRegularHeapObjectSize(size)); Node* object = AllocateInNewSpace(size, flags);
Node* object = Allocate(size, flags);
StoreMapNoWriteBarrier(object, map); StoreMapNoWriteBarrier(object, map);
InitializeJSObjectFromMap(object, map, size, properties, elements); InitializeJSObjectFromMap(object, map, size, properties, elements);
return object; return object;
...@@ -1996,7 +2009,8 @@ Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind, ...@@ -1996,7 +2009,8 @@ Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
Node* length, Node* length,
Node* allocation_site, Node* allocation_site,
Node* size_in_bytes) { Node* size_in_bytes) {
Node* array = Allocate(size_in_bytes); // Allocate space for the JSArray and the elements FixedArray in one go.
Node* array = AllocateInNewSpace(size_in_bytes);
Comment("write JSArray headers"); Comment("write JSArray headers");
StoreMapNoWriteBarrier(array, array_map); StoreMapNoWriteBarrier(array, array_map);
......
...@@ -241,6 +241,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler { ...@@ -241,6 +241,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
void GotoIfNumber(Node* value, Label* is_number); void GotoIfNumber(Node* value, Label* is_number);
// Allocate an object of the given size. // Allocate an object of the given size.
Node* AllocateInNewSpace(Node* size, AllocationFlags flags = kNone);
Node* AllocateInNewSpace(int size, AllocationFlags flags = kNone);
Node* Allocate(Node* size, AllocationFlags flags = kNone); Node* Allocate(Node* size, AllocationFlags flags = kNone);
Node* Allocate(int size, AllocationFlags flags = kNone); Node* Allocate(int size, AllocationFlags flags = kNone);
Node* InnerAllocate(Node* previous, int offset); Node* InnerAllocate(Node* previous, int offset);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment