Commit ee485e2c authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[ptr-compr] Introduce MapWordSlot

which will require different handling for compressed pointers case.

Bug: v8:8518
Change-Id: I99cb103bad57b134ecb8d7dd7018cf16ed3d8f25
Reviewed-on: https://chromium-review.googlesource.com/c/1365272Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58073}
parent b04632d5
......@@ -602,6 +602,7 @@ struct SlotTraits;
template <>
struct SlotTraits<SlotLocation::kOffHeap> {
using TObjectSlot = FullObjectSlot;
using TMapWordSlot = FullObjectSlot;
using TMaybeObjectSlot = FullMaybeObjectSlot;
using THeapObjectSlot = FullHeapObjectSlot;
};
......@@ -611,6 +612,7 @@ struct SlotTraits<SlotLocation::kOffHeap> {
template <>
struct SlotTraits<SlotLocation::kOnHeap> {
using TObjectSlot = FullObjectSlot;
using TMapWordSlot = FullObjectSlot;
using TMaybeObjectSlot = FullMaybeObjectSlot;
using THeapObjectSlot = FullHeapObjectSlot;
};
......@@ -619,6 +621,10 @@ struct SlotTraits<SlotLocation::kOnHeap> {
// holding ObjectPtr value (smi or strong heap object).
using ObjectSlot = SlotTraits<SlotLocation::kOnHeap>::TObjectSlot;
// An MapWordSlot instance describes a kTaggedSize-sized on-heap field ("slot")
// holding HeapObjectPtr (strong heap object) value or a forwarding pointer.
using MapWordSlot = SlotTraits<SlotLocation::kOnHeap>::TMapWordSlot;
// A MaybeObjectSlot instance describes a kTaggedSize-sized on-heap field
// ("slot") holding MaybeObject (smi or weak heap object or strong heap object).
using MaybeObjectSlot = SlotTraits<SlotLocation::kOnHeap>::TMaybeObjectSlot;
......
......@@ -508,12 +508,13 @@ AllocationMemento* Heap::FindAllocationMemento(Map map, HeapObject* object) {
return nullptr;
}
HeapObject* candidate = HeapObject::FromAddress(memento_address);
Map candidate_map = candidate->map();
MapWordSlot candidate_map_slot = candidate->map_slot();
// This fast check may peek at an uninitialized word. However, the slow check
// below (memento_address == top) ensures that this is safe. Mark the word as
// initialized to silence MemorySanitizer warnings.
MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
if (candidate_map != ReadOnlyRoots(this).allocation_memento_map()) {
MSAN_MEMORY_IS_INITIALIZED(candidate_map_slot.address(), kTaggedSize);
if (!candidate_map_slot.contains_value(
ReadOnlyRoots(this).allocation_memento_map().ptr())) {
return nullptr;
}
......
......@@ -2485,7 +2485,8 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
// At this point, we may be deserializing the heap from a snapshot, and
// none of the maps have been created yet and are nullptr.
DCHECK((filler->map().is_null() && !deserialization_complete_) ||
DCHECK((filler->map_slot().contains_value(kNullAddress) &&
!deserialization_complete_) ||
filler->map()->IsMap());
return filler;
}
......
......@@ -1188,8 +1188,7 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
if (mode != MigrationMode::kFast)
base->ExecuteMigrationObservers(dest, src, dst, size);
}
base::Relaxed_Store(reinterpret_cast<base::AtomicWord*>(src_addr),
static_cast<base::AtomicWord>(dst_addr));
src->set_map_word(MapWord::FromForwardingAddress(dst));
}
EvacuateVisitorBase(Heap* heap, LocalAllocator* local_allocator,
......
......@@ -75,9 +75,10 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(Map map,
}
template <typename ResultType, typename ConcreteVisitor>
void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(HeapObject* host,
ObjectSlot map) {
static_cast<ConcreteVisitor*>(this)->VisitPointer(host, map);
void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(
HeapObject* host, MapWordSlot map_slot) {
DCHECK(!host->map_word().IsForwardingAddress());
static_cast<ConcreteVisitor*>(this)->VisitPointer(host, ObjectSlot(map_slot));
}
#define VISIT(TypeName, Type) \
......
......@@ -103,7 +103,7 @@ class HeapVisitor : public ObjectVisitor {
// Guard predicate for visiting the objects map pointer separately.
V8_INLINE bool ShouldVisitMapPointer() { return true; }
// A callback for visiting the map pointer in the object header.
V8_INLINE void VisitMapPointer(HeapObject* host, ObjectSlot map);
V8_INLINE void VisitMapPointer(HeapObject* host, MapWordSlot map_slot);
// If this predicate returns false, then the heap visitor will fail
// in default Visit implemention for subclasses of JSObject.
V8_INLINE bool AllowDefaultJSObjectVisit() { return true; }
......
......@@ -814,29 +814,25 @@ void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
}
}
ObjectSlot HeapObject::map_slot() {
return ObjectSlot(FIELD_ADDR(this, kMapOffset));
MapWordSlot HeapObject::map_slot() const {
return MapWordSlot(FIELD_ADDR(this, kMapOffset));
}
MapWord HeapObject::map_word() const {
return MapWord(RELAXED_READ_FIELD(this, kMapOffset).ptr());
return MapWord(map_slot().Relaxed_Load().ptr());
}
void HeapObject::set_map_word(MapWord map_word) {
RELAXED_WRITE_FIELD(this, kMapOffset,
reinterpret_cast<Object*>(map_word.value_));
map_slot().Relaxed_Store(ObjectPtr(map_word.value_));
}
MapWord HeapObject::synchronized_map_word() const {
return MapWord(
reinterpret_cast<Address>(ACQUIRE_READ_FIELD(this, kMapOffset)));
return MapWord(map_slot().Acquire_Load().ptr());
}
void HeapObject::synchronized_set_map_word(MapWord map_word) {
RELEASE_WRITE_FIELD(
this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
map_slot().Release_Store(ObjectPtr(map_word.value_));
}
int HeapObject::Size() const { return SizeFromMap(map()); }
......@@ -1348,8 +1344,10 @@ int FreeSpace::Size() { return size(); }
FreeSpace* FreeSpace::next() {
#ifdef DEBUG
Heap* heap = Heap::FromWritableHeapObject(this);
DCHECK_IMPLIES(map() != heap->isolate()->root(RootIndex::kFreeSpaceMap),
!heap->deserialization_complete() && map().is_null());
Object* free_space_map = heap->isolate()->root(RootIndex::kFreeSpaceMap);
DCHECK_IMPLIES(!map_slot().contains_value(free_space_map->ptr()),
!heap->deserialization_complete() &&
map_slot().contains_value(kNullAddress));
#endif
DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
return reinterpret_cast<FreeSpace*>(Memory<Address>(address() + kNextOffset));
......@@ -1359,8 +1357,10 @@ FreeSpace* FreeSpace::next() {
void FreeSpace::set_next(FreeSpace* next) {
#ifdef DEBUG
Heap* heap = Heap::FromWritableHeapObject(this);
DCHECK_IMPLIES(map() != heap->isolate()->root(RootIndex::kFreeSpaceMap),
!heap->deserialization_complete() && map().is_null());
Object* free_space_map = heap->isolate()->root(RootIndex::kFreeSpaceMap);
DCHECK_IMPLIES(!map_slot().contains_value(free_space_map->ptr()),
!heap->deserialization_complete() &&
map_slot().contains_value(kNullAddress));
#endif
DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
base::Relaxed_Store(
......
......@@ -1040,7 +1040,7 @@ class HeapObject: public Object {
inline Map map() const;
inline void set_map(Map value);
inline ObjectSlot map_slot();
inline MapWordSlot map_slot() const;
// The no-write-barrier version. This is OK if the object is white and in
// new space, or if the value is an immortal immutable object, like the maps
......
......@@ -135,17 +135,16 @@ void HeapObjectPtr::set_map_after_allocation(Map value, WriteBarrierMode mode) {
reinterpret_cast<HeapObject*>(ptr())->set_map_after_allocation(value, mode);
}
ObjectSlot HeapObjectPtr::map_slot() {
return ObjectSlot(FIELD_ADDR(this, kMapOffset));
MapWordSlot HeapObjectPtr::map_slot() const {
return MapWordSlot(FIELD_ADDR(this, kMapOffset));
}
MapWord HeapObjectPtr::map_word() const {
return MapWord(RELAXED_READ_FIELD(this, kMapOffset).ptr());
return MapWord(map_slot().Relaxed_Load().ptr());
}
void HeapObjectPtr::set_map_word(MapWord map_word) {
RELAXED_WRITE_FIELD(this, kMapOffset,
reinterpret_cast<Object*>(map_word.value_));
map_slot().Relaxed_Store(ObjectPtr(map_word.value_));
}
void HeapObjectPtr::synchronized_set_map(Map value) {
......@@ -163,8 +162,7 @@ void HeapObjectPtr::synchronized_set_map(Map value) {
}
void HeapObjectPtr::synchronized_set_map_word(MapWord map_word) {
RELEASE_WRITE_FIELD(this, kMapOffset,
reinterpret_cast<Object*>(map_word.value_));
map_slot().Release_Store(ObjectPtr(map_word.value_));
}
WriteBarrierMode HeapObjectPtr::GetWriteBarrierMode(
......
......@@ -159,7 +159,7 @@ class HeapObjectPtr : public ObjectPtr {
// of primitive (non-JS) objects like strings, heap numbers etc.
inline void set_map_no_write_barrier(Map value);
inline ObjectSlot map_slot();
inline MapWordSlot map_slot() const;
inline MapWord map_word() const;
inline void set_map_word(MapWord map_word);
......
......@@ -232,7 +232,7 @@ void RunLoadImmIndex(MachineType rep, TestAlignment t) {
for (int i = 0; i < kNumElems; i++) {
BufferedRawMachineAssemblerTester<CType> m;
void* base_pointer = &buffer[0] - offset;
#if V8_POINTER_COMPRESSION
#ifdef V8_COMPRESS_POINTERS
if (rep.IsTagged()) {
// When pointer compression is enabled then we need to access only
// the lower 32-bit of the tagged value while the buffer contains
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment