Commit e5a50904 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

api: Provide write barrier in TracedReferenceBase

TracedReferenceBase use (traced) global handles to implement the
referencs. Provide a write barrier in the corresponding handle
methods. Doing so
- avoids bugs by having embedders taking care of write barrier
  management.
- speeds up the barrier as it is better integrated in the handle
  methods.

Drive-by: We don't need write barriers on initializating stores.

Bug: v8:12165
Change-Id: Ie49cc3783aeed576fd46c957c473c61362fefbf2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3247039
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77593}
parent cced52a9
...@@ -26,13 +26,20 @@ namespace v8 { ...@@ -26,13 +26,20 @@ namespace v8 {
class Value; class Value;
namespace internal { namespace internal {
class BasicTracedReferenceExtractor; class BasicTracedReferenceExtractor;
} // namespace internal
namespace api_internal { enum class GlobalHandleDestructionMode { kWithDestructor, kWithoutDestructor };
enum class GlobalHandleStoreMode {
kInitializingStore,
kAssigningStore,
};
V8_EXPORT internal::Address* GlobalizeTracedReference( V8_EXPORT internal::Address* GlobalizeTracedReference(
internal::Isolate* isolate, internal::Address* handle, internal::Isolate* isolate, internal::Address* handle,
internal::Address* slot, bool has_destructor); internal::Address* slot, GlobalHandleDestructionMode destruction_mode,
GlobalHandleStoreMode store_mode);
V8_EXPORT void MoveTracedGlobalReference(internal::Address** from, V8_EXPORT void MoveTracedGlobalReference(internal::Address** from,
internal::Address** to); internal::Address** to);
V8_EXPORT void CopyTracedGlobalReference(const internal::Address* const* from, V8_EXPORT void CopyTracedGlobalReference(const internal::Address* const* from,
...@@ -41,7 +48,8 @@ V8_EXPORT void DisposeTracedGlobal(internal::Address* global_handle); ...@@ -41,7 +48,8 @@ V8_EXPORT void DisposeTracedGlobal(internal::Address* global_handle);
V8_EXPORT void SetFinalizationCallbackTraced( V8_EXPORT void SetFinalizationCallbackTraced(
internal::Address* location, void* parameter, internal::Address* location, void* parameter,
WeakCallbackInfo<void>::Callback callback); WeakCallbackInfo<void>::Callback callback);
} // namespace api_internal
} // namespace internal
/** /**
* Deprecated. Use |TracedReference<T>| instead. * Deprecated. Use |TracedReference<T>| instead.
...@@ -164,15 +172,15 @@ class BasicTracedReference : public TracedReferenceBase { ...@@ -164,15 +172,15 @@ class BasicTracedReference : public TracedReferenceBase {
} }
private: private:
enum DestructionMode { kWithDestructor, kWithoutDestructor };
/** /**
* An empty BasicTracedReference without storage cell. * An empty BasicTracedReference without storage cell.
*/ */
BasicTracedReference() = default; BasicTracedReference() = default;
V8_INLINE static internal::Address* New(Isolate* isolate, T* that, void* slot, V8_INLINE static internal::Address* New(
DestructionMode destruction_mode); Isolate* isolate, T* that, void* slot,
internal::GlobalHandleDestructionMode destruction_mode,
internal::GlobalHandleStoreMode store_mode);
friend class EmbedderHeapTracer; friend class EmbedderHeapTracer;
template <typename F> template <typename F>
...@@ -215,8 +223,10 @@ class TracedGlobal : public BasicTracedReference<T> { ...@@ -215,8 +223,10 @@ class TracedGlobal : public BasicTracedReference<T> {
*/ */
template <class S> template <class S>
TracedGlobal(Isolate* isolate, Local<S> that) : BasicTracedReference<T>() { TracedGlobal(Isolate* isolate, Local<S> that) : BasicTracedReference<T>() {
this->val_ = this->New(isolate, that.val_, &this->val_, this->val_ =
BasicTracedReference<T>::kWithDestructor); this->New(isolate, that.val_, &this->val_,
internal::GlobalHandleDestructionMode::kWithDestructor,
internal::GlobalHandleStoreMode::kInitializingStore);
static_assert(std::is_base_of<T, S>::value, "type check"); static_assert(std::is_base_of<T, S>::value, "type check");
} }
...@@ -338,8 +348,10 @@ class TracedReference : public BasicTracedReference<T> { ...@@ -338,8 +348,10 @@ class TracedReference : public BasicTracedReference<T> {
*/ */
template <class S> template <class S>
TracedReference(Isolate* isolate, Local<S> that) : BasicTracedReference<T>() { TracedReference(Isolate* isolate, Local<S> that) : BasicTracedReference<T>() {
this->val_ = this->New(isolate, that.val_, &this->val_, this->val_ =
BasicTracedReference<T>::kWithoutDestructor); this->New(isolate, that.val_, &this->val_,
internal::GlobalHandleDestructionMode::kWithoutDestructor,
internal::GlobalHandleStoreMode::kInitializingStore);
static_assert(std::is_base_of<T, S>::value, "type check"); static_assert(std::is_base_of<T, S>::value, "type check");
} }
...@@ -420,18 +432,19 @@ class TracedReference : public BasicTracedReference<T> { ...@@ -420,18 +432,19 @@ class TracedReference : public BasicTracedReference<T> {
// --- Implementation --- // --- Implementation ---
template <class T> template <class T>
internal::Address* BasicTracedReference<T>::New( internal::Address* BasicTracedReference<T>::New(
Isolate* isolate, T* that, void* slot, DestructionMode destruction_mode) { Isolate* isolate, T* that, void* slot,
internal::GlobalHandleDestructionMode destruction_mode,
internal::GlobalHandleStoreMode store_mode) {
if (that == nullptr) return nullptr; if (that == nullptr) return nullptr;
internal::Address* p = reinterpret_cast<internal::Address*>(that); internal::Address* p = reinterpret_cast<internal::Address*>(that);
return api_internal::GlobalizeTracedReference( return internal::GlobalizeTracedReference(
reinterpret_cast<internal::Isolate*>(isolate), p, reinterpret_cast<internal::Isolate*>(isolate), p,
reinterpret_cast<internal::Address*>(slot), reinterpret_cast<internal::Address*>(slot), destruction_mode, store_mode);
destruction_mode == kWithDestructor);
} }
void TracedReferenceBase::Reset() { void TracedReferenceBase::Reset() {
if (IsEmpty()) return; if (IsEmpty()) return;
api_internal::DisposeTracedGlobal(reinterpret_cast<internal::Address*>(val_)); internal::DisposeTracedGlobal(reinterpret_cast<internal::Address*>(val_));
SetSlotThreadSafe(nullptr); SetSlotThreadSafe(nullptr);
} }
...@@ -484,7 +497,8 @@ void TracedGlobal<T>::Reset(Isolate* isolate, const Local<S>& other) { ...@@ -484,7 +497,8 @@ void TracedGlobal<T>::Reset(Isolate* isolate, const Local<S>& other) {
Reset(); Reset();
if (other.IsEmpty()) return; if (other.IsEmpty()) return;
this->val_ = this->New(isolate, other.val_, &this->val_, this->val_ = this->New(isolate, other.val_, &this->val_,
BasicTracedReference<T>::kWithDestructor); internal::GlobalHandleDestructionMode::kWithDestructor,
internal::GlobalHandleStoreMode::kAssigningStore);
} }
template <class T> template <class T>
...@@ -506,7 +520,7 @@ TracedGlobal<T>& TracedGlobal<T>::operator=(const TracedGlobal<S>& rhs) { ...@@ -506,7 +520,7 @@ TracedGlobal<T>& TracedGlobal<T>::operator=(const TracedGlobal<S>& rhs) {
template <class T> template <class T>
TracedGlobal<T>& TracedGlobal<T>::operator=(TracedGlobal&& rhs) { TracedGlobal<T>& TracedGlobal<T>::operator=(TracedGlobal&& rhs) {
if (this != &rhs) { if (this != &rhs) {
api_internal::MoveTracedGlobalReference( internal::MoveTracedGlobalReference(
reinterpret_cast<internal::Address**>(&rhs.val_), reinterpret_cast<internal::Address**>(&rhs.val_),
reinterpret_cast<internal::Address**>(&this->val_)); reinterpret_cast<internal::Address**>(&this->val_));
} }
...@@ -518,7 +532,7 @@ TracedGlobal<T>& TracedGlobal<T>::operator=(const TracedGlobal& rhs) { ...@@ -518,7 +532,7 @@ TracedGlobal<T>& TracedGlobal<T>::operator=(const TracedGlobal& rhs) {
if (this != &rhs) { if (this != &rhs) {
this->Reset(); this->Reset();
if (rhs.val_ != nullptr) { if (rhs.val_ != nullptr) {
api_internal::CopyTracedGlobalReference( internal::CopyTracedGlobalReference(
reinterpret_cast<const internal::Address* const*>(&rhs.val_), reinterpret_cast<const internal::Address* const*>(&rhs.val_),
reinterpret_cast<internal::Address**>(&this->val_)); reinterpret_cast<internal::Address**>(&this->val_));
} }
...@@ -534,7 +548,8 @@ void TracedReference<T>::Reset(Isolate* isolate, const Local<S>& other) { ...@@ -534,7 +548,8 @@ void TracedReference<T>::Reset(Isolate* isolate, const Local<S>& other) {
if (other.IsEmpty()) return; if (other.IsEmpty()) return;
this->SetSlotThreadSafe( this->SetSlotThreadSafe(
this->New(isolate, other.val_, &this->val_, this->New(isolate, other.val_, &this->val_,
BasicTracedReference<T>::kWithoutDestructor)); internal::GlobalHandleDestructionMode::kWithoutDestructor,
internal::GlobalHandleStoreMode::kAssigningStore));
} }
template <class T> template <class T>
...@@ -557,7 +572,7 @@ TracedReference<T>& TracedReference<T>::operator=( ...@@ -557,7 +572,7 @@ TracedReference<T>& TracedReference<T>::operator=(
template <class T> template <class T>
TracedReference<T>& TracedReference<T>::operator=(TracedReference&& rhs) { TracedReference<T>& TracedReference<T>::operator=(TracedReference&& rhs) {
if (this != &rhs) { if (this != &rhs) {
api_internal::MoveTracedGlobalReference( internal::MoveTracedGlobalReference(
reinterpret_cast<internal::Address**>(&rhs.val_), reinterpret_cast<internal::Address**>(&rhs.val_),
reinterpret_cast<internal::Address**>(&this->val_)); reinterpret_cast<internal::Address**>(&this->val_));
} }
...@@ -569,7 +584,7 @@ TracedReference<T>& TracedReference<T>::operator=(const TracedReference& rhs) { ...@@ -569,7 +584,7 @@ TracedReference<T>& TracedReference<T>::operator=(const TracedReference& rhs) {
if (this != &rhs) { if (this != &rhs) {
this->Reset(); this->Reset();
if (rhs.val_ != nullptr) { if (rhs.val_ != nullptr) {
api_internal::CopyTracedGlobalReference( internal::CopyTracedGlobalReference(
reinterpret_cast<const internal::Address* const*>(&rhs.val_), reinterpret_cast<const internal::Address* const*>(&rhs.val_),
reinterpret_cast<internal::Address**>(&this->val_)); reinterpret_cast<internal::Address**>(&this->val_));
} }
...@@ -596,7 +611,7 @@ uint16_t TracedReferenceBase::WrapperClassId() const { ...@@ -596,7 +611,7 @@ uint16_t TracedReferenceBase::WrapperClassId() const {
template <class T> template <class T>
void TracedGlobal<T>::SetFinalizationCallback( void TracedGlobal<T>::SetFinalizationCallback(
void* parameter, typename WeakCallbackInfo<void>::Callback callback) { void* parameter, typename WeakCallbackInfo<void>::Callback callback) {
api_internal::SetFinalizationCallbackTraced( internal::SetFinalizationCallbackTraced(
reinterpret_cast<internal::Address*>(this->val_), parameter, callback); reinterpret_cast<internal::Address*>(this->val_), parameter, callback);
} }
......
...@@ -830,17 +830,19 @@ void ResourceConstraints::ConfigureDefaults(uint64_t physical_memory, ...@@ -830,17 +830,19 @@ void ResourceConstraints::ConfigureDefaults(uint64_t physical_memory,
} }
} }
namespace api_internal { namespace internal {
i::Address* GlobalizeTracedReference(i::Isolate* isolate, i::Address* obj,
internal::Address* slot, i::Address* GlobalizeTracedReference(
bool has_destructor) { i::Isolate* isolate, i::Address* obj, internal::Address* slot,
GlobalHandleDestructionMode destruction_mode,
GlobalHandleStoreMode store_mode) {
LOG_API(isolate, TracedGlobal, New); LOG_API(isolate, TracedGlobal, New);
#ifdef DEBUG #ifdef DEBUG
Utils::ApiCheck((slot != nullptr), "v8::GlobalizeTracedReference", Utils::ApiCheck((slot != nullptr), "v8::GlobalizeTracedReference",
"the address slot must be not null"); "the address slot must be not null");
#endif #endif
i::Handle<i::Object> result = i::Handle<i::Object> result = isolate->global_handles()->CreateTraced(
isolate->global_handles()->CreateTraced(*obj, slot, has_destructor); *obj, slot, destruction_mode, store_mode);
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (i::FLAG_verify_heap) { if (i::FLAG_verify_heap) {
i::Object(*obj).ObjectVerify(isolate); i::Object(*obj).ObjectVerify(isolate);
...@@ -849,6 +851,30 @@ i::Address* GlobalizeTracedReference(i::Isolate* isolate, i::Address* obj, ...@@ -849,6 +851,30 @@ i::Address* GlobalizeTracedReference(i::Isolate* isolate, i::Address* obj,
return result.location(); return result.location();
} }
void MoveTracedGlobalReference(internal::Address** from,
internal::Address** to) {
GlobalHandles::MoveTracedGlobal(from, to);
}
void CopyTracedGlobalReference(const internal::Address* const* from,
internal::Address** to) {
GlobalHandles::CopyTracedGlobal(from, to);
}
void DisposeTracedGlobal(internal::Address* location) {
GlobalHandles::DestroyTraced(location);
}
void SetFinalizationCallbackTraced(internal::Address* location, void* parameter,
WeakCallbackInfo<void>::Callback callback) {
GlobalHandles::SetFinalizationCallbackForTraced(location, parameter,
callback);
}
} // namespace internal
namespace api_internal {
i::Address* GlobalizeReference(i::Isolate* isolate, i::Address* obj) { i::Address* GlobalizeReference(i::Isolate* isolate, i::Address* obj) {
LOG_API(isolate, Persistent, New); LOG_API(isolate, Persistent, New);
i::Handle<i::Object> result = isolate->global_handles()->Create(*obj); i::Handle<i::Object> result = isolate->global_handles()->Create(*obj);
...@@ -900,26 +926,6 @@ Value* Eternalize(Isolate* v8_isolate, Value* value) { ...@@ -900,26 +926,6 @@ Value* Eternalize(Isolate* v8_isolate, Value* value) {
isolate->eternal_handles()->Get(index).location()); isolate->eternal_handles()->Get(index).location());
} }
void MoveTracedGlobalReference(internal::Address** from,
internal::Address** to) {
i::GlobalHandles::MoveTracedGlobal(from, to);
}
void CopyTracedGlobalReference(const internal::Address* const* from,
internal::Address** to) {
i::GlobalHandles::CopyTracedGlobal(from, to);
}
void DisposeTracedGlobal(internal::Address* location) {
i::GlobalHandles::DestroyTraced(location);
}
void SetFinalizationCallbackTraced(internal::Address* location, void* parameter,
WeakCallbackInfo<void>::Callback callback) {
i::GlobalHandles::SetFinalizationCallbackForTraced(location, parameter,
callback);
}
void FromJustIsNothing() { void FromJustIsNothing() {
Utils::ApiCheck(false, "v8::FromJust", "Maybe value is Nothing."); Utils::ApiCheck(false, "v8::FromJust", "Maybe value is Nothing.");
} }
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#include "src/heap/embedder-tracing.h" #include "src/heap/embedder-tracing.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/heap-write-barrier-inl.h" #include "src/heap/heap-write-barrier-inl.h"
#include "src/heap/heap-write-barrier.h"
#include "src/init/v8.h" #include "src/init/v8.h"
#include "src/logging/counters.h" #include "src/logging/counters.h"
#include "src/objects/objects-inl.h" #include "src/objects/objects-inl.h"
...@@ -948,16 +949,17 @@ Handle<Object> GlobalHandles::Create(Address value) { ...@@ -948,16 +949,17 @@ Handle<Object> GlobalHandles::Create(Address value) {
return Create(Object(value)); return Create(Object(value));
} }
Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot, Handle<Object> GlobalHandles::CreateTraced(
bool has_destructor) { Object value, Address* slot, GlobalHandleDestructionMode destruction_mode,
GlobalHandleStoreMode store_mode) {
return CreateTraced( return CreateTraced(
value, slot, has_destructor, value, slot, destruction_mode, store_mode,
on_stack_nodes_->IsOnStack(reinterpret_cast<uintptr_t>(slot))); on_stack_nodes_->IsOnStack(reinterpret_cast<uintptr_t>(slot)));
} }
Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot, Handle<Object> GlobalHandles::CreateTraced(
bool has_destructor, Object value, Address* slot, GlobalHandleDestructionMode destruction_mode,
bool is_on_stack) { GlobalHandleStoreMode store_mode, bool is_on_stack) {
GlobalHandles::TracedNode* result; GlobalHandles::TracedNode* result;
if (is_on_stack) { if (is_on_stack) {
result = on_stack_nodes_->Acquire(value, reinterpret_cast<uintptr_t>(slot)); result = on_stack_nodes_->Acquire(value, reinterpret_cast<uintptr_t>(slot));
...@@ -967,15 +969,21 @@ Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot, ...@@ -967,15 +969,21 @@ Handle<Object> GlobalHandles::CreateTraced(Object value, Address* slot,
traced_young_nodes_.push_back(result); traced_young_nodes_.push_back(result);
result->set_in_young_list(true); result->set_in_young_list(true);
} }
if (store_mode != GlobalHandleStoreMode::kInitializingStore) {
WriteBarrier::MarkingFromGlobalHandle(value);
}
} }
const bool has_destructor =
destruction_mode == GlobalHandleDestructionMode::kWithDestructor;
result->set_has_destructor(has_destructor); result->set_has_destructor(has_destructor);
result->set_parameter(has_destructor ? slot : nullptr); result->set_parameter(has_destructor ? slot : nullptr);
return result->handle(); return result->handle();
} }
Handle<Object> GlobalHandles::CreateTraced(Address value, Address* slot, Handle<Object> GlobalHandles::CreateTraced(
bool has_destructor) { Address value, Address* slot, GlobalHandleDestructionMode destruction_mode,
return CreateTraced(Object(value), slot, has_destructor); GlobalHandleStoreMode store_mode) {
return CreateTraced(Object(value), slot, destruction_mode, store_mode);
} }
Handle<Object> GlobalHandles::CopyGlobal(Address* location) { Handle<Object> GlobalHandles::CopyGlobal(Address* location) {
...@@ -1012,7 +1020,10 @@ void GlobalHandles::CopyTracedGlobal(const Address* const* from, Address** to) { ...@@ -1012,7 +1020,10 @@ void GlobalHandles::CopyTracedGlobal(const Address* const* from, Address** to) {
GlobalHandles* global_handles = GlobalHandles* global_handles =
GlobalHandles::From(const_cast<TracedNode*>(node)); GlobalHandles::From(const_cast<TracedNode*>(node));
Handle<Object> o = global_handles->CreateTraced( Handle<Object> o = global_handles->CreateTraced(
node->object(), reinterpret_cast<Address*>(to), node->has_destructor()); node->object(), reinterpret_cast<Address*>(to),
node->has_destructor() ? GlobalHandleDestructionMode::kWithDestructor
: GlobalHandleDestructionMode::kWithoutDestructor,
GlobalHandleStoreMode::kAssigningStore);
SetSlotThreadSafe(to, o.location()); SetSlotThreadSafe(to, o.location());
TracedNode::Verify(global_handles, from); TracedNode::Verify(global_handles, from);
TracedNode::Verify(global_handles, to); TracedNode::Verify(global_handles, to);
...@@ -1082,7 +1093,10 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) { ...@@ -1082,7 +1093,10 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) {
DCHECK(global_handles); DCHECK(global_handles);
Handle<Object> o = global_handles->CreateTraced( Handle<Object> o = global_handles->CreateTraced(
from_node->object(), reinterpret_cast<Address*>(to), from_node->object(), reinterpret_cast<Address*>(to),
from_node->has_destructor(), to_on_stack); from_node->has_destructor()
? GlobalHandleDestructionMode::kWithDestructor
: GlobalHandleDestructionMode::kWithoutDestructor,
GlobalHandleStoreMode::kAssigningStore, to_on_stack);
SetSlotThreadSafe(to, o.location()); SetSlotThreadSafe(to, o.location());
to_node = TracedNode::FromLocation(*to); to_node = TracedNode::FromLocation(*to);
DCHECK(to_node->markbit()); DCHECK(to_node->markbit());
...@@ -1095,6 +1109,9 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) { ...@@ -1095,6 +1109,9 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) {
global_handles->traced_young_nodes_.push_back(to_node); global_handles->traced_young_nodes_.push_back(to_node);
to_node->set_in_young_list(true); to_node->set_in_young_list(true);
} }
if (!to_on_stack) {
WriteBarrier::MarkingFromGlobalHandle(to_node->object());
}
} }
DestroyTraced(*from); DestroyTraced(*from);
SetSlotThreadSafe(from, nullptr); SetSlotThreadSafe(from, nullptr);
...@@ -1110,6 +1127,7 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) { ...@@ -1110,6 +1127,7 @@ void GlobalHandles::MoveTracedGlobal(Address** from, Address** to) {
if (to_node->has_destructor()) { if (to_node->has_destructor()) {
to_node->set_parameter(to); to_node->set_parameter(to);
} }
WriteBarrier::MarkingFromGlobalHandle(to_node->object());
SetSlotThreadSafe(from, nullptr); SetSlotThreadSafe(from, nullptr);
} }
TracedNode::Verify(global_handles, to); TracedNode::Verify(global_handles, to);
......
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
#include "include/v8-callbacks.h" #include "include/v8-callbacks.h"
#include "include/v8-persistent-handle.h" #include "include/v8-persistent-handle.h"
#include "include/v8-profiler.h" #include "include/v8-profiler.h"
#include "include/v8-traced-handle.h"
#include "src/handles/handles.h" #include "src/handles/handles.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/objects/heap-object.h" #include "src/objects/heap-object.h"
...@@ -104,11 +105,16 @@ class V8_EXPORT_PRIVATE GlobalHandles final { ...@@ -104,11 +105,16 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
template <typename T> template <typename T>
inline Handle<T> Create(T value); inline Handle<T> Create(T value);
Handle<Object> CreateTraced(Object value, Address* slot, bool has_destructor, Handle<Object> CreateTraced(Object value, Address* slot,
GlobalHandleDestructionMode destruction_mode,
GlobalHandleStoreMode store_mode,
bool is_on_stack); bool is_on_stack);
Handle<Object> CreateTraced(Object value, Address* slot, bool has_destructor); Handle<Object> CreateTraced(Object value, Address* slot,
GlobalHandleDestructionMode destruction_mode,
GlobalHandleStoreMode store_mode);
Handle<Object> CreateTraced(Address value, Address* slot, Handle<Object> CreateTraced(Address value, Address* slot,
bool has_destructor); GlobalHandleDestructionMode destruction_mode,
GlobalHandleStoreMode store_mode);
void RecordStats(HeapStats* stats); void RecordStats(HeapStats* stats);
......
...@@ -262,6 +262,19 @@ void WriteBarrier::Marking(DescriptorArray descriptor_array, ...@@ -262,6 +262,19 @@ void WriteBarrier::Marking(DescriptorArray descriptor_array,
MarkingSlow(*heap, descriptor_array, number_of_own_descriptors); MarkingSlow(*heap, descriptor_array, number_of_own_descriptors);
} }
// static
void WriteBarrier::MarkingFromGlobalHandle(Object value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
if (!value.IsHeapObject()) return;
HeapObject heap_value = HeapObject::cast(value);
// Value may be in read only space but the chunk should never be marked
// as marking which would result in a bail out.
auto heap = GetHeapIfMarking(heap_value);
if (!heap) return;
MarkingSlowFromGlobalHandle(*heap, heap_value);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -40,6 +40,11 @@ void WriteBarrier::MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot slot, ...@@ -40,6 +40,11 @@ void WriteBarrier::MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot slot,
marking_barrier->Write(host, slot, value); marking_barrier->Write(host, slot, value);
} }
// static
void WriteBarrier::MarkingSlowFromGlobalHandle(Heap* heap, HeapObject value) {
heap->marking_barrier()->WriteWithoutHost(value);
}
void WriteBarrier::MarkingSlow(Heap* heap, Code host, RelocInfo* reloc_info, void WriteBarrier::MarkingSlow(Heap* heap, Code host, RelocInfo* reloc_info,
HeapObject value) { HeapObject value) {
MarkingBarrier* marking_barrier = current_marking_barrier MarkingBarrier* marking_barrier = current_marking_barrier
......
...@@ -55,6 +55,8 @@ class V8_EXPORT_PRIVATE WriteBarrier { ...@@ -55,6 +55,8 @@ class V8_EXPORT_PRIVATE WriteBarrier {
static inline void Marking(DescriptorArray, int number_of_own_descriptors); static inline void Marking(DescriptorArray, int number_of_own_descriptors);
// It is invoked from generated code and has to take raw addresses. // It is invoked from generated code and has to take raw addresses.
static int MarkingFromCode(Address raw_host, Address raw_slot); static int MarkingFromCode(Address raw_host, Address raw_slot);
// Invoked from global handles where no host object is available.
static inline void MarkingFromGlobalHandle(Object value);
static void SetForThread(MarkingBarrier*); static void SetForThread(MarkingBarrier*);
static void ClearForThread(MarkingBarrier*); static void ClearForThread(MarkingBarrier*);
...@@ -62,6 +64,8 @@ class V8_EXPORT_PRIVATE WriteBarrier { ...@@ -62,6 +64,8 @@ class V8_EXPORT_PRIVATE WriteBarrier {
static MarkingBarrier* CurrentMarkingBarrier(Heap* heap); static MarkingBarrier* CurrentMarkingBarrier(Heap* heap);
private: private:
static inline base::Optional<Heap*> GetHeapIfMarking(HeapObject object);
static void MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot, static void MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot,
HeapObject value); HeapObject value);
static void MarkingSlow(Heap* heap, Code host, RelocInfo*, HeapObject value); static void MarkingSlow(Heap* heap, Code host, RelocInfo*, HeapObject value);
...@@ -69,7 +73,7 @@ class V8_EXPORT_PRIVATE WriteBarrier { ...@@ -69,7 +73,7 @@ class V8_EXPORT_PRIVATE WriteBarrier {
ArrayBufferExtension*); ArrayBufferExtension*);
static void MarkingSlow(Heap* heap, DescriptorArray, static void MarkingSlow(Heap* heap, DescriptorArray,
int number_of_own_descriptors); int number_of_own_descriptors);
static inline base::Optional<Heap*> GetHeapIfMarking(HeapObject object); static void MarkingSlowFromGlobalHandle(Heap* heap, HeapObject value);
}; };
} // namespace internal } // namespace internal
......
...@@ -48,6 +48,17 @@ void MarkingBarrier::Write(HeapObject host, HeapObjectSlot slot, ...@@ -48,6 +48,17 @@ void MarkingBarrier::Write(HeapObject host, HeapObjectSlot slot,
} }
} }
void MarkingBarrier::WriteWithoutHost(HeapObject value) {
DCHECK(is_main_thread_barrier_);
if (WhiteToGreyAndPush(value)) {
incremental_marking_->RestartIfNotMarking();
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(Root::kWriteBarrier, value);
}
}
}
void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) { void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) {
DCHECK(IsCurrentMarkingBarrier()); DCHECK(IsCurrentMarkingBarrier());
if (MarkValue(host, value)) { if (MarkValue(host, value)) {
......
...@@ -36,6 +36,9 @@ class MarkingBarrier { ...@@ -36,6 +36,9 @@ class MarkingBarrier {
void Write(Code host, RelocInfo*, HeapObject value); void Write(Code host, RelocInfo*, HeapObject value);
void Write(JSArrayBuffer host, ArrayBufferExtension*); void Write(JSArrayBuffer host, ArrayBufferExtension*);
void Write(DescriptorArray, int number_of_own_descriptors); void Write(DescriptorArray, int number_of_own_descriptors);
// Only usable when there's no valid JS host object for this write, e.g., when
// value is held alive from a global handle.
void WriteWithoutHost(HeapObject value);
// Returns true if the slot needs to be recorded. // Returns true if the slot needs to be recorded.
inline bool MarkValue(HeapObject host, HeapObject value); inline bool MarkValue(HeapObject host, HeapObject value);
......
...@@ -463,6 +463,10 @@ TEST(TracedGlobalInStdUnorderedMap) { ...@@ -463,6 +463,10 @@ TEST(TracedGlobalInStdUnorderedMap) {
} }
TEST(TracedGlobalToUnmodifiedJSObjectDiesOnMarkSweep) { TEST(TracedGlobalToUnmodifiedJSObjectDiesOnMarkSweep) {
// When stressing incremental marking, a write barrier may keep the object
// alive.
if (FLAG_stress_incremental_marking) return;
CcTest::InitializeVM(); CcTest::InitializeVM();
TracedGlobalTest( TracedGlobalTest(
CcTest::isolate(), ConstructJSObject, CcTest::isolate(), ConstructJSObject,
......
...@@ -3,14 +3,17 @@ ...@@ -3,14 +3,17 @@
// found in the LICENSE file. // found in the LICENSE file.
#include "include/v8-cppgc.h" #include "include/v8-cppgc.h"
#include "include/v8-traced-handle.h"
#include "src/api/api-inl.h"
#include "src/heap/cppgc/visitor.h" #include "src/heap/cppgc/visitor.h"
#include "test/unittests/heap/heap-utils.h"
#include "test/unittests/test-utils.h" #include "test/unittests/test-utils.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
using TracedReferenceTest = TestWithIsolate; using TracedReferenceTest = TestWithHeapInternals;
TEST_F(TracedReferenceTest, ResetFromLocal) { TEST_F(TracedReferenceTest, ResetFromLocal) {
v8::Local<v8::Context> context = v8::Context::New(v8_isolate()); v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
...@@ -200,5 +203,148 @@ TEST_F(TracedReferenceTest, TracedReferenceTrace) { ...@@ -200,5 +203,148 @@ TEST_F(TracedReferenceTest, TracedReferenceTrace) {
} }
} }
TEST_F(TracedReferenceTest, NoWriteBarrierOnConstruction) {
if (!FLAG_incremental_marking) return;
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
{
v8::HandleScope handles(v8_isolate());
v8::Local<v8::Object> local =
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state;
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, WriteBarrierOnHeapReset) {
if (!FLAG_incremental_marking) return;
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
{
v8::HandleScope handles(v8_isolate());
v8::Local<v8::Object> local =
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
auto ref = std::make_unique<v8::TracedReference<v8::Object>>();
SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state;
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref->Reset(v8_isolate(), local);
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackReset) {
if (!FLAG_incremental_marking) return;
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
{
v8::HandleScope handles(v8_isolate());
v8::Local<v8::Object> local =
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
v8::TracedReference<v8::Object> ref;
SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state;
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref.Reset(v8_isolate(), local);
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, WriteBarrierOnHeapCopy) {
if (!FLAG_incremental_marking) return;
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
{
v8::HandleScope handles(v8_isolate());
v8::Local<v8::Object> local =
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
auto ref_from =
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
auto ref_to = std::make_unique<v8::TracedReference<v8::Object>>();
SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state;
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
*ref_to = *ref_from;
EXPECT_TRUE(!ref_from->IsEmpty());
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackCopy) {
if (!FLAG_incremental_marking) return;
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
{
v8::HandleScope handles(v8_isolate());
v8::Local<v8::Object> local =
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
auto ref_from =
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
v8::TracedReference<v8::Object> ref_to;
SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state;
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref_to = *ref_from;
EXPECT_TRUE(!ref_from->IsEmpty());
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, WriteBarrierOnMove) {
if (!FLAG_incremental_marking) return;
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
{
v8::HandleScope handles(v8_isolate());
v8::Local<v8::Object> local =
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
auto ref_from =
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
auto ref_to = std::make_unique<v8::TracedReference<v8::Object>>();
SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state;
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
*ref_to = std::move(*ref_from);
ASSERT_TRUE(ref_from->IsEmpty());
EXPECT_TRUE(state.IsGrey(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
TEST_F(TracedReferenceTest, NoWriteBarrierOnStackMove) {
if (!FLAG_incremental_marking) return;
isolate()->global_handles()->SetStackStart(base::Stack::GetStackStart());
v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context);
{
v8::HandleScope handles(v8_isolate());
v8::Local<v8::Object> local =
v8::Local<v8::Object>::New(v8_isolate(), v8::Object::New(v8_isolate()));
auto ref_from =
std::make_unique<v8::TracedReference<v8::Object>>(v8_isolate(), local);
v8::TracedReference<v8::Object> ref_to;
SimulateIncrementalMarking();
MarkCompactCollector::MarkingState state;
ASSERT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
ref_to = std::move(*ref_from);
ASSERT_TRUE(ref_from->IsEmpty());
EXPECT_TRUE(state.IsWhite(HeapObject::cast(*Utils::OpenHandle(*local))));
}
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment