Commit d4c3fa8d authored by ishell's avatar ishell Committed by Commit bot

[stubs] Cleanup storing of maps to objects.

1) CSA::StoreMap() added which triggers map-specific write barrier
   unlike StoreObjectField() which triggers the full write barrier.
2) CSA::StoreMapNoWriteBarrier(object, map_root_index) added.
3) StoreMapNoWriteBarrier() is used for storing immortal immovable
   maps since they don't require write barriers even for objects in
   old space.

BUG=

Review-Url: https://codereview.chromium.org/2544793002
Cr-Commit-Position: refs/heads/master@{#41435}
parent 60257069
......@@ -1324,6 +1324,7 @@ Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
Node* CodeStubAssembler::StoreObjectField(
Node* object, int offset, Node* value) {
DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
}
......@@ -1353,10 +1354,22 @@ Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
}
Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
CSA_SLOW_ASSERT(this, IsMap(map));
return StoreWithMapWriteBarrier(
object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
}
Node* CodeStubAssembler::StoreMapNoWriteBarrier(
Node* object, Heap::RootListIndex map_root_index) {
return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
}
Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
CSA_SLOW_ASSERT(this, IsMap(map));
return StoreNoWriteBarrier(
MachineRepresentation::kTagged, object,
IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map);
IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
}
Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
......@@ -1475,8 +1488,7 @@ Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
Heap::RootListIndex heap_map_index =
mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
: Heap::kMutableHeapNumberMapRootIndex;
Node* map = LoadRoot(heap_map_index);
StoreMapNoWriteBarrier(result, map);
StoreMapNoWriteBarrier(result, heap_map_index);
return result;
}
......@@ -1492,7 +1504,7 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(int length,
Comment("AllocateSeqOneByteString");
Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex));
StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
SmiConstant(Smi::FromInt(length)));
StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
......@@ -1522,7 +1534,7 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
// Just allocate the SeqOneByteString in new space.
Node* result = Allocate(size, flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex));
StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
StoreObjectFieldNoWriteBarrier(
result, SeqOneByteString::kLengthOffset,
mode == SMI_PARAMETERS ? length : SmiFromWord(length));
......@@ -1552,7 +1564,7 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(int length,
Comment("AllocateSeqTwoByteString");
Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
SmiConstant(Smi::FromInt(length)));
StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
......@@ -1582,7 +1594,7 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
// Just allocate the SeqTwoByteString in new space.
Node* result = Allocate(size, flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
StoreObjectFieldNoWriteBarrier(
result, SeqTwoByteString::kLengthOffset,
mode == SMI_PARAMETERS ? length : SmiFromWord(length));
......@@ -1612,9 +1624,8 @@ Node* CodeStubAssembler::AllocateSlicedString(
Node* offset) {
CSA_ASSERT(this, TaggedIsSmi(length));
Node* result = Allocate(SlicedString::kSize);
Node* map = LoadRoot(map_root_index);
DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
StoreMapNoWriteBarrier(result, map);
StoreMapNoWriteBarrier(result, map_root_index);
StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
MachineRepresentation::kTagged);
StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
......@@ -1645,9 +1656,8 @@ Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
AllocationFlags flags) {
CSA_ASSERT(this, TaggedIsSmi(length));
Node* result = Allocate(ConsString::kSize, flags);
Node* map = LoadRoot(map_root_index);
DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
StoreMapNoWriteBarrier(result, map);
StoreMapNoWriteBarrier(result, map_root_index);
StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
MachineRepresentation::kTagged);
StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset,
......@@ -1798,8 +1808,8 @@ Node* CodeStubAssembler::AllocateNameDictionary(Node* at_least_space_for) {
Node* result = Allocate(store_size);
Comment("Initialize NameDictionary");
// Initialize FixedArray fields.
StoreObjectFieldRoot(result, FixedArray::kMapOffset,
Heap::kHashTableMapRootIndex);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kHashTableMapRootIndex));
StoreMapNoWriteBarrier(result, Heap::kHashTableMapRootIndex);
StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
SmiFromWord(length));
// Initialized HashTable fields.
......@@ -1958,17 +1968,16 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
Node* capacity, Node* length,
Node* allocation_site,
ParameterMode capacity_mode) {
bool is_double = IsFastDoubleElementsKind(kind);
// Allocate both array and elements object, and initialize the JSArray.
Node *array, *elements;
std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
kind, array_map, length, allocation_site, capacity, capacity_mode);
// Setup elements object.
Heap* heap = isolate()->heap();
Handle<Map> elements_map(is_double ? heap->fixed_double_array_map()
: heap->fixed_array_map());
StoreMapNoWriteBarrier(elements, HeapConstant(elements_map));
Heap::RootListIndex elements_map_index =
IsFastDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex
: Heap::kFixedArrayMapRootIndex;
DCHECK(Heap::RootIsImmortalImmovable(elements_map_index));
StoreMapNoWriteBarrier(elements, elements_map_index);
StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
TagParameter(capacity, capacity_mode));
......@@ -1991,15 +2000,11 @@ Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
// Allocate both array and elements object, and initialize the JSArray.
Node* array = Allocate(total_size, flags);
Heap* heap = isolate()->heap();
Handle<Map> map(IsFastDoubleElementsKind(kind)
? heap->fixed_double_array_map()
: heap->fixed_array_map());
if (flags & kPretenured) {
StoreObjectField(array, JSObject::kMapOffset, HeapConstant(map));
} else {
StoreMapNoWriteBarrier(array, HeapConstant(map));
}
Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind)
? Heap::kFixedDoubleArrayMapRootIndex
: Heap::kFixedArrayMapRootIndex;
DCHECK(Heap::RootIsImmortalImmovable(map_index));
StoreMapNoWriteBarrier(array, map_index);
StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
TagParameter(capacity_node, mode));
return array;
......@@ -5982,7 +5987,7 @@ void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
Bind(&done);
}
StoreObjectField(object, JSObject::kMapOffset, map);
StoreMap(object, map);
}
void CodeStubAssembler::TrapAllocationMemento(Node* object,
......@@ -6122,9 +6127,7 @@ Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
Node* size = IntPtrConstant(AllocationSite::kSize);
Node* site = Allocate(size, CodeStubAssembler::kPretenured);
// Store the map
StoreObjectFieldRoot(site, AllocationSite::kMapOffset,
Heap::kAllocationSiteMapRootIndex);
StoreMap(site, LoadRoot(Heap::kAllocationSiteMapRootIndex));
Node* kind = SmiConstant(Smi::FromInt(GetInitialFastElementsKind()));
StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset,
kind);
......@@ -6170,7 +6173,8 @@ Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
Node* cell = Allocate(size, CodeStubAssembler::kPretenured);
// Initialize the WeakCell.
StoreObjectFieldRoot(cell, WeakCell::kMapOffset, Heap::kWeakCellMapRootIndex);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakCellMapRootIndex));
StoreMapNoWriteBarrier(cell, Heap::kWeakCellMapRootIndex);
StoreObjectField(cell, WeakCell::kValueOffset, value);
StoreObjectFieldRoot(cell, WeakCell::kNextOffset,
Heap::kTheHoleValueRootIndex);
......
......@@ -347,6 +347,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* object, Node* offset, Node* value,
MachineRepresentation rep = MachineRepresentation::kTagged);
// Store the Map of an HeapObject.
Node* StoreMap(Node* object, Node* map);
Node* StoreMapNoWriteBarrier(Node* object,
Heap::RootListIndex map_root_index);
Node* StoreMapNoWriteBarrier(Node* object, Node* map);
Node* StoreObjectFieldRoot(Node* object, int offset,
Heap::RootListIndex root);
......
......@@ -478,7 +478,7 @@ void StoreMapStub::GenerateAssembly(compiler::CodeAssemblerState* state) const {
Node* map = assembler.Parameter(Descriptor::kMap);
Node* value = assembler.Parameter(Descriptor::kValue);
assembler.StoreObjectField(receiver, JSObject::kMapOffset, map);
assembler.StoreMap(receiver, map);
assembler.Return(value);
}
......@@ -521,7 +521,7 @@ void StoreTransitionStub::GenerateAssembly(
// And finally update the map.
assembler.Comment("Store map");
assembler.StoreObjectField(receiver, JSObject::kMapOffset, map);
assembler.StoreMap(receiver, map);
assembler.Return(value);
// Only store to tagged field never bails out.
......@@ -2233,9 +2233,8 @@ compiler::Node* FastCloneShallowObjectStub::GenerateFastPath(
if (FLAG_allocation_site_pretenuring) {
Node* memento = assembler->InnerAllocate(copy, object_size);
assembler->StoreObjectFieldNoWriteBarrier(
memento, HeapObject::kMapOffset,
assembler->LoadRoot(Heap::kAllocationMementoMapRootIndex));
assembler->StoreMapNoWriteBarrier(memento,
Heap::kAllocationMementoMapRootIndex);
assembler->StoreObjectFieldNoWriteBarrier(
memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
Node* memento_create_count = assembler->LoadObjectField(
......@@ -2602,10 +2601,8 @@ compiler::Node* FastNewFunctionContextStub::Generate(
// Create a new closure from the given function info in new space
Node* function_context = assembler->Allocate(size);
Isolate* isolate = assembler->isolate();
assembler->StoreMapNoWriteBarrier(
function_context,
assembler->HeapConstant(isolate->factory()->function_context_map()));
assembler->StoreMapNoWriteBarrier(function_context,
Heap::kFunctionContextMapRootIndex);
assembler->StoreObjectFieldNoWriteBarrier(function_context,
Context::kLengthOffset,
assembler->SmiFromWord32(length));
......@@ -2734,10 +2731,14 @@ compiler::Node* NonEmptyShallowClone(CodeStubAssembler* assembler,
kind, boilerplate_map, length, allocation_site, capacity, param_mode);
assembler->Comment("copy elements header");
for (int offset = 0; offset < FixedArrayBase::kHeaderSize;
offset += kPointerSize) {
Node* value = assembler->LoadObjectField(boilerplate_elements, offset);
assembler->StoreObjectField(elements, offset, value);
// Header consists of map and length.
STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
assembler->StoreMap(elements, assembler->LoadMap(boilerplate_elements));
{
int offset = FixedArrayBase::kLengthOffset;
assembler->StoreObjectFieldNoWriteBarrier(
elements, offset,
assembler->LoadObjectField(boilerplate_elements, offset));
}
if (assembler->Is64()) {
......
......@@ -317,6 +317,12 @@ Node* CodeAssembler::Store(Node* base, Node* offset, Node* value) {
value, kFullWriteBarrier);
}
Node* CodeAssembler::StoreWithMapWriteBarrier(Node* base, Node* offset,
Node* value) {
return raw_assembler()->Store(MachineRepresentation::kTagged, base, offset,
value, kMapWriteBarrier);
}
Node* CodeAssembler::StoreNoWriteBarrier(MachineRepresentation rep, Node* base,
Node* value) {
return raw_assembler()->Store(rep, base, value, kNoWriteBarrier);
......
......@@ -273,6 +273,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
// Store value to raw memory location.
Node* Store(Node* base, Node* value);
Node* Store(Node* base, Node* offset, Node* value);
Node* StoreWithMapWriteBarrier(Node* base, Node* offset, Node* value);
Node* StoreNoWriteBarrier(MachineRepresentation rep, Node* base, Node* value);
Node* StoreNoWriteBarrier(MachineRepresentation rep, Node* base, Node* offset,
Node* value);
......
......@@ -590,7 +590,7 @@ void AccessorAssemblerImpl::HandleStoreICProtoHandler(
descriptors, value_index_in_descriptor, 0, INTPTR_PARAMETERS);
GotoIf(WordNotEqual(p->value, constant), miss);
StoreObjectField(p->receiver, JSObject::kMapOffset, transition);
StoreMap(p->receiver, transition);
Return(p->value);
}
}
......@@ -709,7 +709,7 @@ void AccessorAssemblerImpl::HandleStoreFieldAndReturn(
StoreNamedField(holder, offset, true, representation, prepared_value,
transition_to_field);
if (transition_to_field) {
StoreObjectField(holder, JSObject::kMapOffset, transition);
StoreMap(holder, transition);
}
Return(value);
}
......@@ -719,7 +719,7 @@ void AccessorAssemblerImpl::HandleStoreFieldAndReturn(
StoreNamedField(holder, offset, false, representation, prepared_value,
transition_to_field);
if (transition_to_field) {
StoreObjectField(holder, JSObject::kMapOffset, transition);
StoreMap(holder, transition);
}
Return(value);
}
......
......@@ -149,7 +149,7 @@ void KeyedStoreGenericAssembler::TryRewriteElements(
GrowElementsCapacity(receiver, elements, from_kind, to_kind, capacity,
capacity, INTPTR_PARAMETERS, bailout);
}
StoreObjectField(receiver, JSObject::kMapOffset, var_target_map.value());
StoreMap(receiver, var_target_map.value());
}
}
......@@ -166,7 +166,7 @@ void KeyedStoreGenericAssembler::TryChangeToHoleyMapHelper(
}
Node* holey_map =
LoadContextElement(native_context, Context::ArrayMapIndex(holey_kind));
StoreObjectField(receiver, JSObject::kMapOffset, holey_map);
StoreMap(receiver, holey_map);
Goto(done);
}
......
......@@ -2624,7 +2624,7 @@ void Interpreter::DoForInNext(InterpreterAssembler* assembler) {
// Check if we can use the for-in fast path potentially using the enum cache.
Label if_fast(assembler), if_slow(assembler, Label::kDeferred);
Node* receiver_map = __ LoadObjectField(receiver, HeapObject::kMapOffset);
Node* receiver_map = __ LoadMap(receiver);
__ Branch(__ WordEqual(receiver_map, cache_type), &if_fast, &if_slow);
__ Bind(&if_fast);
{
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment