Commit 279bd3e1 authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

[runtime] Compress the off-heap string table

Rather than an Object array, use a Tagged_t array to store the
elements of the off-heap string table. This matches the old on-heap
string table's behaviour, and recovers memory regressions from that
work.

To be able to do this, this also introduces a new slot type,
OffHeapObjectSlot. This is because CompressedObjectSlot assumes that
the slot is on-heap, and that it can mask the slot location to
recover the isolate root. OffHeapObjectSlot doesn't define an
operator*, and instead provides a `load(const Isolate*)` method.
The other slots also gain this method so that they can use it in
slot-templated functions. Also, the RootVisitor gains an
OffHeapObjectSlot overload, which is UNREACHABLE by default and only
needs to be defined by visitors that can access the string table.

As a drive-by, fix some non-atomic accesses to the off-heap string
table, also using the new slot.

Bug: chromium:1109553
Bug: chromium:1115116
Bug: chromium:1115559
Bug: chromium:1115683
Change-Id: I819ed7bf820e9ef98ad5d5f9d0d592efbb6f5aa6
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2352489
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69381}
parent 5dedee92
......@@ -682,9 +682,11 @@ class CompressedObjectSlot;
class CompressedMaybeObjectSlot;
class CompressedMapWordSlot;
class CompressedHeapObjectSlot;
class OffHeapCompressedObjectSlot;
class FullObjectSlot;
class FullMaybeObjectSlot;
class FullHeapObjectSlot;
class OffHeapFullObjectSlot;
class OldSpace;
class ReadOnlySpace;
class RelocInfo;
......@@ -701,46 +703,39 @@ class Struct;
class Symbol;
class Variable;
enum class SlotLocation { kOnHeap, kOffHeap };
template <SlotLocation slot_location>
struct SlotTraits;
// Off-heap slots are always full-pointer slots.
template <>
struct SlotTraits<SlotLocation::kOffHeap> {
using TObjectSlot = FullObjectSlot;
using TMaybeObjectSlot = FullMaybeObjectSlot;
using THeapObjectSlot = FullHeapObjectSlot;
};
// On-heap slots are either full-pointer slots or compressed slots depending
// on whether the pointer compression is enabled or not.
template <>
struct SlotTraits<SlotLocation::kOnHeap> {
// Slots are either full-pointer slots or compressed slots depending on whether
// pointer compression is enabled or not.
struct SlotTraits {
#ifdef V8_COMPRESS_POINTERS
using TObjectSlot = CompressedObjectSlot;
using TMaybeObjectSlot = CompressedMaybeObjectSlot;
using THeapObjectSlot = CompressedHeapObjectSlot;
using TOffHeapObjectSlot = OffHeapCompressedObjectSlot;
#else
using TObjectSlot = FullObjectSlot;
using TMaybeObjectSlot = FullMaybeObjectSlot;
using THeapObjectSlot = FullHeapObjectSlot;
using TOffHeapObjectSlot = OffHeapFullObjectSlot;
#endif
};
// An ObjectSlot instance describes a kTaggedSize-sized on-heap field ("slot")
// holding Object value (smi or strong heap object).
using ObjectSlot = SlotTraits<SlotLocation::kOnHeap>::TObjectSlot;
// holding an Object value (smi or strong heap object).
using ObjectSlot = SlotTraits::TObjectSlot;
// A MaybeObjectSlot instance describes a kTaggedSize-sized on-heap field
// ("slot") holding MaybeObject (smi or weak heap object or strong heap object).
using MaybeObjectSlot = SlotTraits<SlotLocation::kOnHeap>::TMaybeObjectSlot;
using MaybeObjectSlot = SlotTraits::TMaybeObjectSlot;
// A HeapObjectSlot instance describes a kTaggedSize-sized field ("slot")
// holding a weak or strong pointer to a heap object (think:
// HeapObjectReference).
using HeapObjectSlot = SlotTraits<SlotLocation::kOnHeap>::THeapObjectSlot;
using HeapObjectSlot = SlotTraits::THeapObjectSlot;
// An OffHeapObjectSlot instance describes a kTaggedSize-sized field ("slot")
// holding an Object value (smi or strong heap object), whose slot location is
// off-heap.
using OffHeapObjectSlot = SlotTraits::TOffHeapObjectSlot;
using WeakSlotCallback = bool (*)(FullObjectSlot pointer);
......
......@@ -14,14 +14,18 @@
namespace v8 {
namespace internal {
inline const Isolate* GetIsolateForPtrCompr(HeapObject object) {
inline const Isolate* GetIsolateForPtrComprFromOnHeapAddress(Address address) {
#ifdef V8_COMPRESS_POINTERS
return Isolate::FromRoot(GetIsolateRoot(object.ptr()));
return Isolate::FromRoot(GetIsolateRoot(address));
#else
return nullptr;
#endif // V8_COMPRESS_POINTERS
}
inline const Isolate* GetIsolateForPtrCompr(HeapObject object) {
return GetIsolateForPtrComprFromOnHeapAddress(object.ptr());
}
inline const Isolate* GetIsolateForPtrCompr(const Isolate* isolate) {
#ifdef V8_COMPRESS_POINTERS
return isolate;
......
......@@ -23,6 +23,7 @@
#include "src/common/globals.h"
#include "src/debug/debug.h"
#include "src/deoptimizer/deoptimizer.h"
#include "src/execution/isolate-utils-inl.h"
#include "src/execution/microtask-queue.h"
#include "src/execution/runtime-profiler.h"
#include "src/execution/v8threads.h"
......@@ -1828,22 +1829,33 @@ void Heap::CopyRange(HeapObject dst_object, const TSlot dst_slot,
// Helper class for verifying the string table.
class StringTableVerifier : public RootVisitor {
public:
explicit StringTableVerifier(Isolate* isolate) : isolate_(isolate) {}
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
UNREACHABLE();
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
// Visit all HeapObject pointers in [start, end).
for (FullObjectSlot p = start; p < end; ++p) {
DCHECK(!HasWeakHeapObjectTag(*p));
if ((*p).IsHeapObject()) {
HeapObject object = HeapObject::cast(*p);
for (OffHeapObjectSlot p = start; p < end; ++p) {
Object o = p.load(isolate_);
DCHECK(!HasWeakHeapObjectTag(o));
if (o.IsHeapObject()) {
HeapObject object = HeapObject::cast(o);
// Check that the string is actually internalized.
CHECK(object.IsInternalizedString());
}
}
}
private:
Isolate* isolate_;
};
static void VerifyStringTable(Isolate* isolate) {
StringTableVerifier verifier;
StringTableVerifier verifier(isolate);
isolate->string_table()->IterateElements(&verifier);
}
#endif // VERIFY_HEAP
......@@ -3068,6 +3080,14 @@ class LeftTrimmerVerifierRootVisitor : public RootVisitor {
}
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
DCHECK_EQ(root, Root::kStringTable);
// We can skip iterating the string table, it doesn't point to any fixed
// arrays.
}
private:
FixedArrayBase to_check_;
......@@ -5942,6 +5962,11 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
FullObjectSlot start, FullObjectSlot end) override {
MarkPointersImpl(start, end);
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
MarkPointersImpl(start, end);
}
void TransitiveClosure() {
while (!marking_stack_.empty()) {
......@@ -5959,8 +5984,9 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
template <typename TSlot>
V8_INLINE void MarkPointersImpl(TSlot start, TSlot end) {
// Treat weak references as strong.
Isolate* isolate = filter_->heap_->isolate();
for (TSlot p = start; p < end; ++p) {
typename TSlot::TObject object = *p;
typename TSlot::TObject object = p.load(isolate);
HeapObject heap_object;
if (object.GetHeapObject(&heap_object)) {
MarkHeapObject(heap_object);
......@@ -6393,6 +6419,13 @@ void VerifyPointersVisitor::VisitRootPointers(Root root,
VerifyPointersImpl(start, end);
}
void VerifyPointersVisitor::VisitRootPointers(Root root,
const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) {
VerifyPointersImpl(start, end);
}
void VerifyPointersVisitor::VerifyHeapObjectImpl(HeapObject heap_object) {
CHECK(IsValidHeapObject(heap_, heap_object));
CHECK(heap_object.map().IsMap());
......@@ -6400,8 +6433,9 @@ void VerifyPointersVisitor::VerifyHeapObjectImpl(HeapObject heap_object) {
template <typename TSlot>
void VerifyPointersVisitor::VerifyPointersImpl(TSlot start, TSlot end) {
Isolate* isolate = heap_->isolate();
for (TSlot slot = start; slot < end; ++slot) {
typename TSlot::TObject object = *slot;
typename TSlot::TObject object = slot.load(isolate);
HeapObject heap_object;
if (object.GetHeapObject(&heap_object)) {
VerifyHeapObjectImpl(heap_object);
......
......@@ -2460,6 +2460,9 @@ class VerifyPointersVisitor : public ObjectVisitor, public RootVisitor {
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override;
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override;
protected:
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object);
......
......@@ -11,6 +11,8 @@
#include "src/deoptimizer/deoptimizer.h"
#include "src/execution/execution.h"
#include "src/execution/frames-inl.h"
#include "src/execution/isolate-utils-inl.h"
#include "src/execution/isolate-utils.h"
#include "src/execution/vm-state-inl.h"
#include "src/handles/global-handles.h"
#include "src/heap/array-buffer-sweeper.h"
......@@ -1058,12 +1060,19 @@ class InternalizedStringTableCleaner : public RootVisitor {
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
UNREACHABLE();
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
DCHECK_EQ(root, Root::kStringTable);
// Visit all HeapObject pointers in [start, end).
MarkCompactCollector::NonAtomicMarkingState* marking_state =
heap_->mark_compact_collector()->non_atomic_marking_state();
for (FullObjectSlot p = start; p < end; ++p) {
Object o = *p;
Isolate* isolate = heap_->isolate();
for (OffHeapObjectSlot p = start; p < end; ++p) {
Object o = p.load(isolate);
if (o.IsHeapObject()) {
HeapObject heap_object = HeapObject::cast(o);
DCHECK(!Heap::InYoungGeneration(heap_object));
......@@ -2627,6 +2636,12 @@ MaybeObject MakeSlotValue<MaybeObjectSlot, HeapObjectReferenceType::WEAK>(
return HeapObjectReference::Weak(heap_object);
}
template <>
Object MakeSlotValue<OffHeapObjectSlot, HeapObjectReferenceType::STRONG>(
HeapObject heap_object) {
return heap_object;
}
#ifdef V8_COMPRESS_POINTERS
template <>
Object MakeSlotValue<FullObjectSlot, HeapObjectReferenceType::STRONG>(
......@@ -2650,12 +2665,13 @@ template <AccessMode access_mode, HeapObjectReferenceType reference_type,
static inline SlotCallbackResult UpdateSlot(TSlot slot,
typename TSlot::TObject old,
HeapObject heap_obj) {
static_assert(
std::is_same<TSlot, FullObjectSlot>::value ||
static_assert(std::is_same<TSlot, FullObjectSlot>::value ||
std::is_same<TSlot, ObjectSlot>::value ||
std::is_same<TSlot, FullMaybeObjectSlot>::value ||
std::is_same<TSlot, MaybeObjectSlot>::value,
"Only [Full]ObjectSlot and [Full]MaybeObjectSlot are expected here");
std::is_same<TSlot, MaybeObjectSlot>::value ||
std::is_same<TSlot, OffHeapObjectSlot>::value,
"Only [Full|OffHeap]ObjectSlot and [Full]MaybeObjectSlot are "
"expected here");
MapWord map_word = heap_obj.map_word();
if (map_word.IsForwardingAddress()) {
DCHECK_IMPLIES(!Heap::InFromPage(heap_obj),
......@@ -2679,8 +2695,9 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot,
}
template <AccessMode access_mode, typename TSlot>
static inline SlotCallbackResult UpdateSlot(TSlot slot) {
typename TSlot::TObject obj = slot.Relaxed_Load();
static inline SlotCallbackResult UpdateSlot(const Isolate* isolate,
TSlot slot) {
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
HeapObject heap_obj;
if (TSlot::kCanBeWeak && obj->GetHeapObjectIfWeak(&heap_obj)) {
UpdateSlot<access_mode, HeapObjectReferenceType::WEAK>(slot, obj, heap_obj);
......@@ -2692,8 +2709,9 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot) {
}
template <AccessMode access_mode, typename TSlot>
static inline SlotCallbackResult UpdateStrongSlot(TSlot slot) {
typename TSlot::TObject obj = slot.Relaxed_Load();
static inline SlotCallbackResult UpdateStrongSlot(const Isolate* isolate,
TSlot slot) {
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
DCHECK(!HAS_WEAK_HEAP_OBJECT_TAG(obj.ptr()));
HeapObject heap_obj;
if (obj.GetHeapObject(&heap_obj)) {
......@@ -2709,37 +2727,48 @@ static inline SlotCallbackResult UpdateStrongSlot(TSlot slot) {
// It does not expect to encounter pointers to dead objects.
class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
public:
explicit PointersUpdatingVisitor(const Isolate* isolate)
: isolate_(isolate) {}
void VisitPointer(HeapObject host, ObjectSlot p) override {
UpdateStrongSlotInternal(p);
UpdateStrongSlotInternal(isolate_, p);
}
void VisitPointer(HeapObject host, MaybeObjectSlot p) override {
UpdateSlotInternal(p);
UpdateSlotInternal(isolate_, p);
}
void VisitPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) override {
for (ObjectSlot p = start; p < end; ++p) {
UpdateStrongSlotInternal(p);
UpdateStrongSlotInternal(isolate_, p);
}
}
void VisitPointers(HeapObject host, MaybeObjectSlot start,
MaybeObjectSlot end) final {
for (MaybeObjectSlot p = start; p < end; ++p) {
UpdateSlotInternal(p);
UpdateSlotInternal(isolate_, p);
}
}
void VisitRootPointer(Root root, const char* description,
FullObjectSlot p) override {
UpdateRootSlotInternal(p);
UpdateRootSlotInternal(isolate_, p);
}
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
for (FullObjectSlot p = start; p < end; ++p) {
UpdateRootSlotInternal(p);
UpdateRootSlotInternal(isolate_, p);
}
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
for (OffHeapObjectSlot p = start; p < end; ++p) {
UpdateRootSlotInternal(isolate_, p);
}
}
......@@ -2754,22 +2783,32 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
}
private:
static inline SlotCallbackResult UpdateRootSlotInternal(FullObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(slot);
static inline SlotCallbackResult UpdateRootSlotInternal(
const Isolate* isolate, FullObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
static inline SlotCallbackResult UpdateRootSlotInternal(
const Isolate* isolate, OffHeapObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
static inline SlotCallbackResult UpdateStrongMaybeObjectSlotInternal(
MaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(slot);
const Isolate* isolate, MaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
static inline SlotCallbackResult UpdateStrongSlotInternal(ObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(slot);
static inline SlotCallbackResult UpdateStrongSlotInternal(
const Isolate* isolate, ObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
static inline SlotCallbackResult UpdateSlotInternal(MaybeObjectSlot slot) {
return UpdateSlot<AccessMode::NON_ATOMIC>(slot);
static inline SlotCallbackResult UpdateSlotInternal(const Isolate* isolate,
MaybeObjectSlot slot) {
return UpdateSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
const Isolate* isolate_;
};
static String UpdateReferenceInExternalStringTableEntry(Heap* heap,
......@@ -3481,7 +3520,8 @@ class ToSpaceUpdatingItem : public UpdatingItem {
void ProcessVisitAll() {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
"ToSpaceUpdatingItem::ProcessVisitAll");
PointersUpdatingVisitor visitor;
PointersUpdatingVisitor visitor(
GetIsolateForPtrComprFromOnHeapAddress(start_));
for (Address cur = start_; cur < end_;) {
HeapObject object = HeapObject::FromAddress(cur);
Map map = object.map();
......@@ -3496,7 +3536,8 @@ class ToSpaceUpdatingItem : public UpdatingItem {
"ToSpaceUpdatingItem::ProcessVisitLive");
// For young generation evacuations we want to visit grey objects, for
// full MC, we need to visit black objects.
PointersUpdatingVisitor visitor;
PointersUpdatingVisitor visitor(
GetIsolateForPtrComprFromOnHeapAddress(start_));
for (auto object_and_size : LiveObjectRange<kAllLiveObjects>(
chunk_, marking_state_->bitmap(chunk_))) {
object_and_size.first.IterateBodyFast(&visitor);
......@@ -3642,11 +3683,12 @@ class RememberedSetUpdatingItem : public UpdatingItem {
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
(chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(chunk_);
const Isolate* isolate = heap_->isolate();
RememberedSet<OLD_TO_OLD>::Iterate(
chunk_,
[&filter](MaybeObjectSlot slot) {
[&filter, isolate](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
return UpdateSlot<AccessMode::NON_ATOMIC>(slot);
return UpdateSlot<AccessMode::NON_ATOMIC>(isolate, slot);
},
SlotSet::FREE_EMPTY_BUCKETS);
chunk_->ReleaseSlotSet<OLD_TO_OLD>();
......@@ -3677,13 +3719,15 @@ class RememberedSetUpdatingItem : public UpdatingItem {
(chunk_->typed_slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() !=
nullptr)) {
CHECK_NE(chunk_->owner(), heap_->map_space());
RememberedSet<OLD_TO_OLD>::IterateTyped(
chunk_, [=](SlotType slot_type, Address slot) {
RememberedSet<OLD_TO_OLD>::IterateTyped(chunk_, [=](SlotType slot_type,
Address slot) {
// Using UpdateStrongSlot is OK here, because there are no weak
// typed slots.
const Isolate* isolate = heap_->isolate();
return UpdateTypedSlotHelper::UpdateTypedSlot(
heap_, slot_type, slot,
UpdateStrongSlot<AccessMode::NON_ATOMIC, FullMaybeObjectSlot>);
heap_, slot_type, slot, [isolate](FullMaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
});
});
}
}
......@@ -3810,7 +3854,7 @@ class EphemeronTableUpdatingItem : public UpdatingItem {
void MarkCompactCollector::UpdatePointersAfterEvacuation() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
PointersUpdatingVisitor updating_visitor;
PointersUpdatingVisitor updating_visitor(isolate());
{
TRACE_GC(heap()->tracer(),
......@@ -4359,7 +4403,7 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS);
PointersUpdatingVisitor updating_visitor;
PointersUpdatingVisitor updating_visitor(isolate());
ItemParallelJob updating_job(isolate()->cancelable_task_manager(),
&page_parallel_job_semaphore_);
......
......@@ -33,6 +33,11 @@ Object CompressedObjectSlot::operator*() const {
return Object(DecompressTaggedAny(address(), value));
}
Object CompressedObjectSlot::load(const Isolate* isolate) const {
Tagged_t value = *location();
return Object(DecompressTaggedAny(isolate, value));
}
void CompressedObjectSlot::store(Object value) const {
*location() = CompressTagged(value.ptr());
}
......@@ -47,6 +52,11 @@ Object CompressedObjectSlot::Relaxed_Load() const {
return Object(DecompressTaggedAny(address(), value));
}
Object CompressedObjectSlot::Relaxed_Load(const Isolate* isolate) const {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
return Object(DecompressTaggedAny(isolate, value));
}
void CompressedObjectSlot::Relaxed_Store(Object value) const {
Tagged_t ptr = CompressTagged(value.ptr());
AsAtomicTagged::Relaxed_Store(location(), ptr);
......@@ -75,6 +85,11 @@ MaybeObject CompressedMaybeObjectSlot::operator*() const {
return MaybeObject(DecompressTaggedAny(address(), value));
}
MaybeObject CompressedMaybeObjectSlot::load(const Isolate* isolate) const {
Tagged_t value = *location();
return MaybeObject(DecompressTaggedAny(isolate, value));
}
void CompressedMaybeObjectSlot::store(MaybeObject value) const {
*location() = CompressTagged(value.ptr());
}
......@@ -84,6 +99,12 @@ MaybeObject CompressedMaybeObjectSlot::Relaxed_Load() const {
return MaybeObject(DecompressTaggedAny(address(), value));
}
MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(
const Isolate* isolate) const {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
return MaybeObject(DecompressTaggedAny(isolate, value));
}
void CompressedMaybeObjectSlot::Relaxed_Store(MaybeObject value) const {
Tagged_t ptr = CompressTagged(value.ptr());
AsAtomicTagged::Relaxed_Store(location(), ptr);
......@@ -105,6 +126,12 @@ HeapObjectReference CompressedHeapObjectSlot::operator*() const {
return HeapObjectReference(DecompressTaggedPointer(address(), value));
}
HeapObjectReference CompressedHeapObjectSlot::load(
const Isolate* isolate) const {
Tagged_t value = *location();
return HeapObjectReference(DecompressTaggedPointer(isolate, value));
}
void CompressedHeapObjectSlot::store(HeapObjectReference value) const {
*location() = CompressTagged(value.ptr());
}
......@@ -119,6 +146,36 @@ void CompressedHeapObjectSlot::StoreHeapObject(HeapObject value) const {
*location() = CompressTagged(value.ptr());
}
//
// OffHeapCompressedObjectSlot implementation.
//
Object OffHeapCompressedObjectSlot::load(const Isolate* isolate) const {
Tagged_t value = *location();
return Object(DecompressTaggedAny(isolate, value));
}
void OffHeapCompressedObjectSlot::store(Object value) const {
*location() = CompressTagged(value.ptr());
}
Object OffHeapCompressedObjectSlot::Relaxed_Load(const Isolate* isolate) const {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
return Object(DecompressTaggedAny(isolate, value));
}
void OffHeapCompressedObjectSlot::Relaxed_Store(Object value) const {
Tagged_t ptr = CompressTagged(value.ptr());
AsAtomicTagged::Relaxed_Store(location(), ptr);
}
void OffHeapCompressedObjectSlot::Release_CompareAndSwap(Object old,
Object target) const {
Tagged_t old_ptr = CompressTagged(old.ptr());
Tagged_t target_ptr = CompressTagged(target.ptr());
AsAtomicTagged::Release_CompareAndSwap(location(), old_ptr, target_ptr);
}
} // namespace internal
} // namespace v8
......
......@@ -38,11 +38,15 @@ class CompressedObjectSlot : public SlotBase<CompressedObjectSlot, Tagged_t> {
// raw value without decompression.
inline bool contains_value(Address raw_value) const;
// TODO(leszeks): Consider deprecating the operator* load, and always pass the
// Isolate.
inline Object operator*() const;
inline Object load(const Isolate* isolate) const;
inline void store(Object value) const;
inline Object Acquire_Load() const;
inline Object Relaxed_Load() const;
inline Object Relaxed_Load(const Isolate* isolate) const;
inline void Relaxed_Store(Object value) const;
inline void Release_Store(Object value) const;
inline Object Release_CompareAndSwap(Object old, Object target) const;
......@@ -73,9 +77,11 @@ class CompressedMaybeObjectSlot
: SlotBase(slot.address()) {}
inline MaybeObject operator*() const;
inline MaybeObject load(const Isolate* isolate) const;
inline void store(MaybeObject value) const;
inline MaybeObject Relaxed_Load() const;
inline MaybeObject Relaxed_Load(const Isolate* isolate) const;
inline void Relaxed_Store(MaybeObject value) const;
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
};
......@@ -99,6 +105,7 @@ class CompressedHeapObjectSlot
: SlotBase(slot.address()) {}
inline HeapObjectReference operator*() const;
inline HeapObjectReference load(const Isolate* isolate) const;
inline void store(HeapObjectReference value) const;
inline HeapObject ToHeapObject() const;
......@@ -106,6 +113,32 @@ class CompressedHeapObjectSlot
inline void StoreHeapObject(HeapObject value) const;
};
// An OffHeapCompressedObjectSlot instance describes a kTaggedSize-sized field
// ("slot") holding a compressed tagged pointer (smi or heap object).
// Unlike CompressedObjectSlot, it does not assume that the slot is on the heap,
// and so does not provide an operator* with implicit Isolate* calculation.
// Its address() is the address of the slot.
// The slot's contents can be read and written using load() and store().
class OffHeapCompressedObjectSlot
: public SlotBase<OffHeapCompressedObjectSlot, Tagged_t> {
public:
using TObject = Object;
using THeapObjectSlot = OffHeapCompressedObjectSlot;
static constexpr bool kCanBeWeak = false;
OffHeapCompressedObjectSlot() : SlotBase(kNullAddress) {}
explicit OffHeapCompressedObjectSlot(const uint32_t* ptr)
: SlotBase(reinterpret_cast<Address>(ptr)) {}
inline Object load(const Isolate* isolate) const;
inline void store(Object value) const;
inline Object Relaxed_Load(const Isolate* isolate) const;
inline void Relaxed_Store(Object value) const;
inline void Release_CompareAndSwap(Object old, Object target) const;
};
} // namespace internal
} // namespace v8
......
......@@ -5,14 +5,14 @@
#ifndef V8_OBJECTS_SLOTS_INL_H_
#define V8_OBJECTS_SLOTS_INL_H_
#include "src/objects/slots.h"
#include "src/base/atomic-utils.h"
#include "src/common/globals.h"
#include "src/common/ptr-compr-inl.h"
#include "src/objects/compressed-slots.h"
#include "src/objects/heap-object.h"
#include "src/objects/maybe-object.h"
#include "src/objects/objects.h"
#include "src/objects/slots.h"
#include "src/utils/memcopy.h"
namespace v8 {
......@@ -29,7 +29,9 @@ bool FullObjectSlot::contains_value(Address raw_value) const {
return base::AsAtomicPointer::Relaxed_Load(location()) == raw_value;
}
const Object FullObjectSlot::operator*() const { return Object(*location()); }
Object FullObjectSlot::operator*() const { return Object(*location()); }
Object FullObjectSlot::load(const Isolate* isolate) const { return **this; }
void FullObjectSlot::store(Object value) const { *location() = value.ptr(); }
......@@ -41,6 +43,10 @@ Object FullObjectSlot::Relaxed_Load() const {
return Object(base::AsAtomicPointer::Relaxed_Load(location()));
}
Object FullObjectSlot::Relaxed_Load(const Isolate* isolate) const {
return Relaxed_Load();
}
void FullObjectSlot::Relaxed_Store(Object value) const {
base::AsAtomicPointer::Relaxed_Store(location(), value.ptr());
}
......@@ -65,10 +71,14 @@ Object FullObjectSlot::Release_CompareAndSwap(Object old, Object target) const {
// FullMaybeObjectSlot implementation.
//
const MaybeObject FullMaybeObjectSlot::operator*() const {
MaybeObject FullMaybeObjectSlot::operator*() const {
return MaybeObject(*location());
}
MaybeObject FullMaybeObjectSlot::load(const Isolate* isolate) const {
return **this;
}
void FullMaybeObjectSlot::store(MaybeObject value) const {
*location() = value.ptr();
}
......@@ -77,6 +87,10 @@ MaybeObject FullMaybeObjectSlot::Relaxed_Load() const {
return MaybeObject(base::AsAtomicPointer::Relaxed_Load(location()));
}
MaybeObject FullMaybeObjectSlot::Relaxed_Load(const Isolate* isolate) const {
return Relaxed_Load();
}
void FullMaybeObjectSlot::Relaxed_Store(MaybeObject value) const {
base::AsAtomicPointer::Relaxed_Store(location(), value->ptr());
}
......@@ -91,10 +105,14 @@ void FullMaybeObjectSlot::Release_CompareAndSwap(MaybeObject old,
// FullHeapObjectSlot implementation.
//
const HeapObjectReference FullHeapObjectSlot::operator*() const {
HeapObjectReference FullHeapObjectSlot::operator*() const {
return HeapObjectReference(*location());
}
HeapObjectReference FullHeapObjectSlot::load(const Isolate* isolate) const {
return **this;
}
void FullHeapObjectSlot::store(HeapObjectReference value) const {
*location() = value.ptr();
}
......@@ -121,17 +139,23 @@ inline void CopyTagged(Address dst, const Address src, size_t num_tagged) {
}
// Sets |counter| number of kTaggedSize-sized values starting at |start| slot.
inline void MemsetTagged(ObjectSlot start, Object value, size_t counter) {
inline void MemsetTagged(Tagged_t* start, Object value, size_t counter) {
#ifdef V8_COMPRESS_POINTERS
Tagged_t raw_value = CompressTagged(value.ptr());
STATIC_ASSERT(kTaggedSize == kInt32Size);
MemsetInt32(reinterpret_cast<int32_t*>(start.location()), raw_value, counter);
MemsetUint32(start, raw_value, counter);
#else
Address raw_value = value.ptr();
MemsetPointer(start.location(), raw_value, counter);
MemsetPointer(start, raw_value, counter);
#endif
}
// Sets |counter| number of kTaggedSize-sized values starting at |start| slot.
template <typename T>
inline void MemsetTagged(SlotBase<T, Tagged_t> start, Object value,
size_t counter) {
MemsetTagged(start.location(), value, counter);
}
// Sets |counter| number of kSystemPointerSize-sized values starting at |start|
// slot.
inline void MemsetPointer(FullObjectSlot start, Object value, size_t counter) {
......
......@@ -109,11 +109,13 @@ class FullObjectSlot : public SlotBase<FullObjectSlot, Address> {
// raw value.
inline bool contains_value(Address raw_value) const;
inline const Object operator*() const;
inline Object operator*() const;
inline Object load(const Isolate* isolate) const;
inline void store(Object value) const;
inline Object Acquire_Load() const;
inline Object Relaxed_Load() const;
inline Object Relaxed_Load(const Isolate* isolate) const;
inline void Relaxed_Store(Object value) const;
inline void Release_Store(Object value) const;
inline Object Relaxed_CompareAndSwap(Object old, Object target) const;
......@@ -143,10 +145,12 @@ class FullMaybeObjectSlot
explicit FullMaybeObjectSlot(SlotBase<T, TData, kSlotDataAlignment> slot)
: SlotBase(slot.address()) {}
inline const MaybeObject operator*() const;
inline MaybeObject operator*() const;
inline MaybeObject load(const Isolate* isolate) const;
inline void store(MaybeObject value) const;
inline MaybeObject Relaxed_Load() const;
inline MaybeObject Relaxed_Load(const Isolate* isolate) const;
inline void Relaxed_Store(MaybeObject value) const;
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
};
......@@ -168,7 +172,8 @@ class FullHeapObjectSlot : public SlotBase<FullHeapObjectSlot, Address> {
explicit FullHeapObjectSlot(SlotBase<T, TData, kSlotDataAlignment> slot)
: SlotBase(slot.address()) {}
inline const HeapObjectReference operator*() const;
inline HeapObjectReference operator*() const;
inline HeapObjectReference load(const Isolate* isolate) const;
inline void store(HeapObjectReference value) const;
inline HeapObject ToHeapObject() const;
......@@ -254,6 +259,19 @@ class UnalignedSlot : public SlotBase<UnalignedSlot<T>, T, 1> {
}
};
// An off-heap uncompressed object slot can be the same as an on-heap one, with
// a few methods deleted.
class OffHeapFullObjectSlot : public FullObjectSlot {
public:
OffHeapFullObjectSlot() : FullObjectSlot() {}
explicit OffHeapFullObjectSlot(const Address* ptr) : FullObjectSlot(ptr) {}
inline Object operator*() const = delete;
using FullObjectSlot::Relaxed_Load;
inline Object Relaxed_Load() const = delete;
};
} // namespace internal
} // namespace v8
......
......@@ -7,6 +7,8 @@
#include "src/base/macros.h"
#include "src/common/assert-scope.h"
#include "src/common/globals.h"
#include "src/common/ptr-compr-inl.h"
#include "src/objects/internal-index.h"
#include "src/objects/object-list-macros.h"
#include "src/objects/slots-inl.h"
#include "src/objects/slots.h"
......@@ -84,12 +86,19 @@ bool KeyIsMatch(StringTableKey* key, String string) {
class StringTable::Data {
public:
static std::unique_ptr<Data> New(int capacity);
static void Resize(std::unique_ptr<Data>& data, int capacity);
static void Resize(const Isolate* isolate, std::unique_ptr<Data>& data,
int capacity);
Object Get(InternalIndex index) const { return elements_[index.as_uint32()]; }
OffHeapObjectSlot slot(InternalIndex index) const {
return OffHeapObjectSlot(&elements_[index.as_uint32()]);
}
Object Get(const Isolate* isolate, InternalIndex index) const {
return slot(index).Relaxed_Load(isolate);
}
void Set(InternalIndex index, String entry) {
elements_[index.as_uint32()] = entry;
slot(index).Relaxed_Store(entry);
}
void ElementAdded() {
......@@ -122,12 +131,14 @@ class StringTable::Data {
int number_of_deleted_elements() const { return number_of_deleted_elements_; }
template <typename StringTableKey>
InternalIndex FindEntry(StringTableKey* key, uint32_t hash) const;
InternalIndex FindEntry(const Isolate* isolate, StringTableKey* key,
uint32_t hash) const;
InternalIndex FindInsertionEntry(uint32_t hash) const;
InternalIndex FindInsertionEntry(const Isolate* isolate, uint32_t hash) const;
template <typename StringTableKey>
InternalIndex FindEntryOrInsertionEntry(StringTableKey* key,
InternalIndex FindEntryOrInsertionEntry(const Isolate* isolate,
StringTableKey* key,
uint32_t hash) const;
// Helper method for StringTable::TryStringToIndexOrLookupExisting.
......@@ -141,7 +152,7 @@ class StringTable::Data {
Data* PreviousData() { return previous_data_.get(); }
void DropPreviousData() { previous_data_.reset(); }
void Print() const;
void Print(const Isolate* isolate) const;
size_t GetCurrentMemoryUsage() const;
private:
......@@ -162,8 +173,7 @@ class StringTable::Data {
int number_of_elements_;
int number_of_deleted_elements_;
int capacity_;
// TODO(leszeks): Consider compressing these pointers.
Object elements_[1];
Tagged_t elements_[1];
};
void* StringTable::Data::operator new(size_t size, int capacity) {
......@@ -172,23 +182,23 @@ void* StringTable::Data::operator new(size_t size, int capacity) {
// Make sure that the elements_ array is at the end of Data, with no padding,
// so that subsequent elements can be accessed as offsets from elements_.
STATIC_ASSERT(offsetof(StringTable::Data, elements_) ==
sizeof(StringTable::Data) - sizeof(Object));
sizeof(StringTable::Data) - sizeof(Tagged_t));
// Make sure that elements_ is aligned when StringTable::Data is aligned.
STATIC_ASSERT(
(alignof(StringTable::Data) + offsetof(StringTable::Data, elements_)) %
kSystemPointerSize ==
kTaggedSize ==
0);
// Subtract 1 from capacity, as the member elements_ already supplies the
// storage for the first element.
return AlignedAlloc(size + (capacity - 1) * sizeof(Object),
return AlignedAlloc(size + (capacity - 1) * sizeof(Tagged_t),
alignof(StringTable::Data));
}
void StringTable::Data::operator delete(void* table) { AlignedFree(table); }
size_t StringTable::Data::GetCurrentMemoryUsage() const {
size_t usage = sizeof(*this) + (capacity_ - 1) * sizeof(Object);
size_t usage = sizeof(*this) + (capacity_ - 1) * sizeof(Tagged_t);
if (previous_data_) {
usage += previous_data_->GetCurrentMemoryUsage();
}
......@@ -200,15 +210,16 @@ StringTable::Data::Data(int capacity)
number_of_elements_(0),
number_of_deleted_elements_(0),
capacity_(capacity) {
FullObjectSlot first_slot(&elements_[0]);
MemsetPointer(first_slot, empty_element(), capacity);
OffHeapObjectSlot first_slot = slot(InternalIndex(0));
MemsetTagged(first_slot, empty_element(), capacity);
}
std::unique_ptr<StringTable::Data> StringTable::Data::New(int capacity) {
return std::unique_ptr<Data>(new (capacity) Data(capacity));
}
void StringTable::Data::Resize(std::unique_ptr<Data>& data, int capacity) {
void StringTable::Data::Resize(const Isolate* isolate,
std::unique_ptr<Data>& data, int capacity) {
std::unique_ptr<Data> new_data(new (capacity) Data(capacity));
DCHECK_LT(data->number_of_elements(), new_data->capacity());
......@@ -218,12 +229,12 @@ void StringTable::Data::Resize(std::unique_ptr<Data>& data, int capacity) {
// Rehash the elements.
for (InternalIndex i : InternalIndex::Range(data->capacity())) {
Object element = data->Get(i);
Object element = data->Get(isolate, i);
if (element == empty_element() || element == deleted_element()) continue;
String string = String::cast(element);
uint32_t hash = string.Hash();
InternalIndex insertion_index = new_data->FindInsertionEntry(hash);
new_data->elements_[insertion_index.as_uint32()] = element;
InternalIndex insertion_index = new_data->FindInsertionEntry(isolate, hash);
new_data->Set(insertion_index, string);
}
new_data->number_of_elements_ = data->number_of_elements();
......@@ -236,14 +247,17 @@ void StringTable::Data::Resize(std::unique_ptr<Data>& data, int capacity) {
}
template <typename StringTableKey>
InternalIndex StringTable::Data::FindEntry(StringTableKey* key,
InternalIndex StringTable::Data::FindEntry(const Isolate* isolate,
StringTableKey* key,
uint32_t hash) const {
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
DCHECK_LT(number_of_elements_, capacity_);
for (InternalIndex entry = FirstProbe(hash, capacity_);;
entry = NextProbe(entry, count++, capacity_)) {
Object element = Get(entry);
// TODO(leszeks): Consider delaying the decompression until after the
// comparisons against empty/deleted.
Object element = Get(isolate, entry);
if (element == empty_element()) return InternalIndex::NotFound();
if (element == deleted_element()) continue;
String string = String::cast(element);
......@@ -251,13 +265,16 @@ InternalIndex StringTable::Data::FindEntry(StringTableKey* key,
}
}
InternalIndex StringTable::Data::FindInsertionEntry(uint32_t hash) const {
InternalIndex StringTable::Data::FindInsertionEntry(const Isolate* isolate,
uint32_t hash) const {
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
DCHECK_LT(number_of_elements_, capacity_);
for (InternalIndex entry = FirstProbe(hash, capacity_);;
entry = NextProbe(entry, count++, capacity_)) {
Object element = Get(entry);
// TODO(leszeks): Consider delaying the decompression until after the
// comparisons against empty/deleted.
Object element = Get(isolate, entry);
if (element == empty_element() || element == deleted_element())
return entry;
}
......@@ -265,14 +282,16 @@ InternalIndex StringTable::Data::FindInsertionEntry(uint32_t hash) const {
template <typename StringTableKey>
InternalIndex StringTable::Data::FindEntryOrInsertionEntry(
StringTableKey* key, uint32_t hash) const {
const Isolate* isolate, StringTableKey* key, uint32_t hash) const {
InternalIndex insertion_entry = InternalIndex::NotFound();
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
DCHECK_LT(number_of_elements_, capacity_);
for (InternalIndex entry = FirstProbe(hash, capacity_);;
entry = NextProbe(entry, count++, capacity_)) {
Object element = Get(entry);
// TODO(leszeks): Consider delaying the decompression until after the
// comparisons against empty/deleted.
Object element = Get(isolate, entry);
if (element == empty_element()) {
// Empty entry, it's our insertion entry if there was no previous Hole.
if (insertion_entry.is_not_found()) return entry;
......@@ -292,16 +311,16 @@ InternalIndex StringTable::Data::FindEntryOrInsertionEntry(
}
void StringTable::Data::IterateElements(RootVisitor* visitor) {
FullObjectSlot first_slot(&elements_[0]);
FullObjectSlot end_slot(&elements_[capacity_]);
OffHeapObjectSlot first_slot = slot(InternalIndex(0));
OffHeapObjectSlot end_slot = slot(InternalIndex(capacity_));
visitor->VisitRootPointers(Root::kStringTable, nullptr, first_slot, end_slot);
}
void StringTable::Data::Print() const {
void StringTable::Data::Print(const Isolate* isolate) const {
OFStream os(stdout);
os << "StringTable {" << std::endl;
for (int i = 0; i < capacity_; ++i) {
os << " " << i << ": " << Brief(elements_[i]) << std::endl;
for (InternalIndex i : InternalIndex::Range(capacity_)) {
os << " " << i.as_uint32() << ": " << Brief(Get(isolate, i)) << std::endl;
}
os << "}" << std::endl;
}
......@@ -439,9 +458,9 @@ Handle<String> StringTable::LookupKey(Isolate* isolate, StringTableKey* key) {
// because the new table won't delete it's corresponding entry until the
// string is dead, in which case it will die in this table too and worst
// case we'll have a false miss.
InternalIndex entry = data->FindEntry(key, key->hash());
InternalIndex entry = data->FindEntry(isolate, key, key->hash());
if (entry.is_found()) {
return handle(String::cast(data->Get(entry)), isolate);
return handle(String::cast(data->Get(isolate, entry)), isolate);
}
// No entry found, so adding new string.
......@@ -455,15 +474,16 @@ Handle<String> StringTable::LookupKey(Isolate* isolate, StringTableKey* key) {
{
base::MutexGuard table_write_guard(&write_mutex_);
EnsureCapacity(1);
EnsureCapacity(isolate, 1);
// Reload the data pointer in case EnsureCapacity changed it.
StringTable::Data* data = data_.get();
// Check one last time if the key is present in the table, in case it was
// added after the check.
InternalIndex entry = data->FindEntryOrInsertionEntry(key, key->hash());
InternalIndex entry =
data->FindEntryOrInsertionEntry(isolate, key, key->hash());
Object element = data->Get(entry);
Object element = data->Get(isolate, entry);
if (element == empty_element()) {
// This entry is empty, so write it and register that we added an
// element.
......@@ -496,7 +516,8 @@ template Handle<String> StringTable::LookupKey(Isolate* isolate,
template Handle<String> StringTable::LookupKey(Isolate* isolate,
StringTableInsertionKey* key);
void StringTable::EnsureCapacity(int additional_elements) {
void StringTable::EnsureCapacity(const Isolate* isolate,
int additional_elements) {
// This call is only allowed while the write mutex is held.
write_mutex_.AssertHeld();
......@@ -522,7 +543,7 @@ void StringTable::EnsureCapacity(int additional_elements) {
}
if (new_capacity != -1) {
Data::Resize(data_, new_capacity);
Data::Resize(isolate, data_, new_capacity);
}
}
......@@ -569,14 +590,14 @@ Address StringTable::Data::TryStringToIndexOrLookupExisting(Isolate* isolate,
StringTable::Data* string_table_data = isolate->string_table()->data_.get();
InternalIndex entry = string_table_data->FindEntry(&key, key.hash());
InternalIndex entry = string_table_data->FindEntry(isolate, &key, key.hash());
if (entry.is_not_found()) {
// A string that's not an array index, and not in the string table,
// cannot have been used as a property name before.
return Smi::FromInt(ResultSentinel::kNotFound).ptr();
}
String internalized = String::cast(string_table_data->Get(entry));
String internalized = String::cast(string_table_data->Get(isolate, entry));
if (FLAG_thin_strings) {
string.MakeThin(isolate, internalized);
}
......@@ -621,7 +642,7 @@ Address StringTable::TryStringToIndexOrLookupExisting(Isolate* isolate,
isolate, string, source, start);
}
void StringTable::Print() const { data_->Print(); }
void StringTable::Print(const Isolate* isolate) const { data_->Print(isolate); }
size_t StringTable::GetCurrentMemoryUsage() const {
return sizeof(*this) + data_->GetCurrentMemoryUsage();
......
......@@ -74,7 +74,7 @@ class V8_EXPORT_PRIVATE StringTable {
static Address TryStringToIndexOrLookupExisting(Isolate* isolate,
Address raw_string);
void Print() const;
void Print(const Isolate* isolate) const;
size_t GetCurrentMemoryUsage() const;
// The following methods must be called either while holding the write lock,
......@@ -84,7 +84,7 @@ class V8_EXPORT_PRIVATE StringTable {
void NotifyElementsRemoved(int count);
private:
void EnsureCapacity(int additional_elements);
void EnsureCapacity(const Isolate* isolate, int additional_elements);
class Data;
std::unique_ptr<Data> data_;
......
......@@ -72,6 +72,20 @@ class RootVisitor {
VisitRootPointers(root, description, p, p + 1);
}
// Visits a contiguous arrays of off-heap pointers in the half-open range
// [start, end). Any or all of the values may be modified on return.
virtual void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) {
// This should be implemented for any visitor that visits the string table.
// If we ever add new off-heap data-structures that we want to walk as roots
// using this function, we should make it generic, by
//
// 1) Making this function pure virtual, and
// 2) Implementing it for all visitors.
UNREACHABLE();
}
// Intended for serialization/deserialization checking: insert, or
// check for the presence of, a tag at this position in the stream.
// Also used for marking up GC roots in heap snapshots.
......
......@@ -1468,6 +1468,17 @@ class RootsReferencesExtractor : public RootVisitor {
}
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
DCHECK_EQ(root, Root::kStringTable);
const Isolate* isolate = Isolate::FromHeap(explorer_->heap_);
for (OffHeapObjectSlot p = start; p < end; ++p) {
explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
p.load(isolate));
}
}
private:
V8HeapExplorer* explorer_;
bool visiting_weak_roots_;
......@@ -1766,22 +1777,38 @@ void V8HeapExplorer::TagObject(Object obj, const char* tag) {
class GlobalObjectsEnumerator : public RootVisitor {
public:
explicit GlobalObjectsEnumerator(Isolate* isolate) : isolate_(isolate) {}
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
for (FullObjectSlot p = start; p < end; ++p) {
if (!(*p).IsNativeContext()) continue;
JSObject proxy = Context::cast(*p).global_proxy();
if (!proxy.IsJSGlobalProxy()) continue;
Object global = proxy.map().prototype();
if (!global.IsJSGlobalObject()) continue;
objects_.push_back(Handle<JSGlobalObject>(JSGlobalObject::cast(global),
proxy.GetIsolate()));
VisitRootPointersImpl(root, description, start, end);
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
VisitRootPointersImpl(root, description, start, end);
}
int count() const { return static_cast<int>(objects_.size()); }
Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
private:
template <typename TSlot>
void VisitRootPointersImpl(Root root, const char* description, TSlot start,
TSlot end) {
for (TSlot p = start; p < end; ++p) {
Object o = p.load(isolate_);
if (!o.IsNativeContext(isolate_)) continue;
JSObject proxy = Context::cast(o).global_proxy();
if (!proxy.IsJSGlobalProxy(isolate_)) continue;
Object global = proxy.map(isolate_).prototype(isolate_);
if (!global.IsJSGlobalObject(isolate_)) continue;
objects_.push_back(handle(JSGlobalObject::cast(global), isolate_));
}
}
Isolate* isolate_;
std::vector<Handle<JSGlobalObject>> objects_;
};
......@@ -1790,7 +1817,7 @@ class GlobalObjectsEnumerator : public RootVisitor {
void V8HeapExplorer::TagGlobalObjects() {
Isolate* isolate = Isolate::FromHeap(heap_);
HandleScope scope(isolate);
GlobalObjectsEnumerator enumerator;
GlobalObjectsEnumerator enumerator(isolate);
isolate->global_handles()->IterateAllRoots(&enumerator);
std::vector<const char*> urls(enumerator.count());
for (int i = 0, l = enumerator.count(); i < l; ++i) {
......
......@@ -213,9 +213,16 @@ void StartupSerializer::SerializeStringTable(StringTable* string_table) {
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
UNREACHABLE();
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
DCHECK_EQ(root, Root::kStringTable);
for (FullObjectSlot current = start; current < end; ++current) {
Object obj = *current;
Isolate* isolate = serializer_->isolate();
for (OffHeapObjectSlot current = start; current < end; ++current) {
Object obj = current.load(isolate);
if (obj.IsHeapObject()) {
DCHECK(obj.IsInternalizedString());
serializer_->SerializeObject(HeapObject::cast(obj));
......
......@@ -187,7 +187,7 @@ inline void CopyBytes(T* dst, const T* src, size_t num_bytes) {
CopyImpl<kMinComplexMemCopy>(dst, src, num_bytes);
}
inline void MemsetInt32(int32_t* dest, int32_t value, size_t counter) {
inline void MemsetUint32(uint32_t* dest, uint32_t value, size_t counter) {
#if V8_HOST_ARCH_IA32 || V8_HOST_ARCH_X64
#define STOS "stosl"
#endif
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment