Commit d10f61e1 authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

cppgc-js, heap: Concurrently push references from v8 to Oilpan

Included in this CL:
(*) Introduce CppMarkingState that V8 should use to push references to
    Oilpan. CppMarkingState allocates its own Worklist::Locals to
    support concurrent updates from V8.
(*) Split Oilpan MarkingWorklist object to form a base class used by
    CppMarkingState.
(*) Remove MarkerFactory and split marking initialization. Marking
    worklists should already be initialized when V8 initializes
    visitors. For incremental marking, this requires splitting
    marking initialization and marking start.
(*) Drive-by: Mark JSObject::IsApiWrapper and
    JSObject::IsDroppableApiWrapper as const.

Bug: v8:12407
Change-Id: I35cc816343da86f69a68306204675720e9b3913f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3293410Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78446}
parent b014d0ba
......@@ -1317,6 +1317,8 @@ filegroup(
"src/heap/concurrent-marking.h",
"src/heap/cppgc-js/cpp-heap.cc",
"src/heap/cppgc-js/cpp-heap.h",
"src/heap/cppgc-js/cpp-marking-state.h",
"src/heap/cppgc-js/cpp-marking-state-inl.h",
"src/heap/cppgc-js/cpp-snapshot.cc",
"src/heap/cppgc-js/cpp-snapshot.h",
"src/heap/cppgc-js/unified-heap-marking-state.h",
......@@ -1326,6 +1328,7 @@ filegroup(
"src/heap/cppgc-js/unified-heap-marking-visitor.h",
"src/heap/embedder-tracing.cc",
"src/heap/embedder-tracing.h",
"src/heap/embedder-tracing-inl.h",
"src/heap/factory-base.cc",
"src/heap/factory-base.h",
"src/heap/factory-base-inl.h",
......
......@@ -2931,10 +2931,13 @@ v8_header_set("v8_internal_headers") {
"src/heap/concurrent-allocator.h",
"src/heap/concurrent-marking.h",
"src/heap/cppgc-js/cpp-heap.h",
"src/heap/cppgc-js/cpp-marking-state-inl.h",
"src/heap/cppgc-js/cpp-marking-state.h",
"src/heap/cppgc-js/cpp-snapshot.h",
"src/heap/cppgc-js/unified-heap-marking-state.h",
"src/heap/cppgc-js/unified-heap-marking-verifier.h",
"src/heap/cppgc-js/unified-heap-marking-visitor.h",
"src/heap/embedder-tracing-inl.h",
"src/heap/embedder-tracing.h",
"src/heap/factory-base-inl.h",
"src/heap/factory-base.h",
......
......@@ -451,7 +451,11 @@ void ConcurrentMarking::Run(JobDelegate* delegate,
int kObjectsUntilInterrupCheck = 1000;
uint8_t task_id = delegate->GetTaskId() + 1;
TaskState* task_state = &task_state_[task_id];
MarkingWorklists::Local local_marking_worklists(marking_worklists_);
auto* cpp_heap = CppHeap::From(heap_->cpp_heap());
MarkingWorklists::Local local_marking_worklists(
marking_worklists_, cpp_heap
? cpp_heap->CreateCppMarkingState()
: MarkingWorklists::Local::kNoCppMarkingState);
WeakObjects::Local local_weak_objects(weak_objects_);
ConcurrentMarkingVisitor visitor(
task_id, &local_marking_worklists, &local_weak_objects, heap_,
......
......@@ -22,6 +22,7 @@
#include "src/handles/global-handles.h"
#include "src/handles/handles.h"
#include "src/heap/base/stack.h"
#include "src/heap/cppgc-js/cpp-marking-state.h"
#include "src/heap/cppgc-js/cpp-snapshot.h"
#include "src/heap/cppgc-js/unified-heap-marking-state.h"
#include "src/heap/cppgc-js/unified-heap-marking-verifier.h"
......@@ -172,13 +173,17 @@ UnifiedHeapConcurrentMarker::CreateConcurrentMarkingVisitor(
class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
public:
UnifiedHeapMarker(Key, Heap* v8_heap, cppgc::internal::HeapBase& cpp_heap,
UnifiedHeapMarker(Heap* v8_heap, cppgc::internal::HeapBase& cpp_heap,
cppgc::Platform* platform, MarkingConfig config);
~UnifiedHeapMarker() final = default;
void AddObject(void*);
cppgc::internal::MarkingWorklists& GetMarkingWorklists() {
return marking_worklists_;
}
protected:
cppgc::Visitor& visitor() final { return marking_visitor_; }
cppgc::internal::ConservativeTracingVisitor& conservative_visitor() final {
......@@ -194,11 +199,11 @@ class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
cppgc::internal::ConservativeMarkingVisitor conservative_marking_visitor_;
};
UnifiedHeapMarker::UnifiedHeapMarker(Key key, Heap* v8_heap,
UnifiedHeapMarker::UnifiedHeapMarker(Heap* v8_heap,
cppgc::internal::HeapBase& heap,
cppgc::Platform* platform,
MarkingConfig config)
: cppgc::internal::MarkerBase(key, heap, platform, config),
: cppgc::internal::MarkerBase(heap, platform, config),
unified_heap_marking_state_(v8_heap),
marking_visitor_(heap, mutator_marking_state_,
unified_heap_marking_state_),
......@@ -391,17 +396,6 @@ void CppHeap::DetachIsolate() {
no_gc_scope_++;
}
void CppHeap::RegisterV8References(
const std::vector<std::pair<void*, void*> >& embedder_fields) {
DCHECK(marker_);
for (auto& tuple : embedder_fields) {
// First field points to type.
// Second field points to object.
static_cast<UnifiedHeapMarker*>(marker_.get())->AddObject(tuple.second);
}
marking_done_ = false;
}
namespace {
bool IsMemoryReducingGC(CppHeap::GarbageCollectionFlags flags) {
......@@ -417,7 +411,8 @@ bool ShouldReduceMemory(CppHeap::GarbageCollectionFlags flags) {
}
} // namespace
void CppHeap::TracePrologue(GarbageCollectionFlags gc_flags) {
void CppHeap::InitializeTracing(GarbageCollectionFlags gc_flags) {
CHECK(!sweeper_.IsSweepingInProgress());
#if defined(CPPGC_YOUNG_GENERATION)
......@@ -445,10 +440,13 @@ void CppHeap::TracePrologue(GarbageCollectionFlags gc_flags) {
compactor_.InitializeIfShouldCompact(marking_config.marking_type,
marking_config.stack_state);
}
marker_ =
cppgc::internal::MarkerFactory::CreateAndStartMarking<UnifiedHeapMarker>(
isolate_ ? isolate_->heap() : nullptr, AsBase(), platform_.get(),
marking_config);
marker_ = std::make_unique<UnifiedHeapMarker>(
isolate_ ? isolate()->heap() : nullptr, AsBase(), platform_.get(),
marking_config);
}
void CppHeap::StartTracing() {
marker_->StartMarking();
marking_done_ = false;
}
......@@ -593,7 +591,10 @@ void CppHeap::CollectGarbageForTesting(
} else {
// Perform an atomic GC, with starting incremental/concurrent marking and
// immediately finalizing the garbage collection.
if (!IsMarking()) TracePrologue(GarbageCollectionFlagValues::kForced);
if (!IsMarking()) {
InitializeTracing(GarbageCollectionFlagValues::kForced);
StartTracing();
}
EnterFinalPause(stack_state);
AdvanceTracing(std::numeric_limits<double>::infinity());
TraceEpilogue();
......@@ -614,7 +615,8 @@ void CppHeap::StartIncrementalGarbageCollectionForTesting() {
DCHECK_NULL(isolate_);
if (IsMarking()) return;
force_incremental_marking_for_testing_ = true;
TracePrologue(GarbageCollectionFlagValues::kForced);
InitializeTracing(GarbageCollectionFlagValues::kForced);
StartTracing();
force_incremental_marking_for_testing_ = false;
}
......@@ -713,5 +715,12 @@ CppHeap::MetricRecorderAdapter* CppHeap::GetMetricRecorder() const {
void CppHeap::FinishSweepingIfRunning() { sweeper_.FinishIfRunning(); }
std::unique_ptr<CppMarkingState> CppHeap::CreateCppMarkingState() {
DCHECK(IsMarking());
return std::make_unique<CppMarkingState>(
*this, wrapper_descriptor_,
static_cast<UnifiedHeapMarker*>(marker())->GetMarkingWorklists());
}
} // namespace internal
} // namespace v8
......@@ -25,6 +25,8 @@ class Isolate;
namespace internal {
class CppMarkingState;
// A C++ heap implementation used with V8 to implement unified heap.
class V8_EXPORT_PRIVATE CppHeap final
: public cppgc::internal::HeapBase,
......@@ -113,9 +115,8 @@ class V8_EXPORT_PRIVATE CppHeap final
void FinishSweepingIfRunning();
void RegisterV8References(
const std::vector<std::pair<void*, void*>>& embedder_fields);
void TracePrologue(GarbageCollectionFlags);
void InitializeTracing(GarbageCollectionFlags);
void StartTracing();
bool AdvanceTracing(double max_duration);
bool IsTracingDone();
void TraceEpilogue();
......@@ -134,6 +135,8 @@ class V8_EXPORT_PRIVATE CppHeap final
Isolate* isolate() const { return isolate_; }
std::unique_ptr<CppMarkingState> CreateCppMarkingState();
private:
void FinalizeIncrementalGarbageCollectionIfNeeded(
cppgc::Heap::StackState) final {
......
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_JS_CPP_MARKING_STATE_INL_H_
#define V8_HEAP_CPPGC_JS_CPP_MARKING_STATE_INL_H_
#include "src/heap/cppgc-js/cpp-marking-state.h"
#include "src/heap/embedder-tracing-inl.h"
#include "src/objects/js-objects.h"
namespace v8 {
namespace internal {
void CppMarkingState::MarkAndPush(const JSObject& js_object) {
DCHECK(js_object.IsApiWrapper());
LocalEmbedderHeapTracer::WrapperInfo info;
if (LocalEmbedderHeapTracer::ExtractWrappableInfo(
isolate_, js_object, wrapper_descriptor_, &info)) {
marking_state_.MarkAndPush(
cppgc::internal::HeapObjectHeader::FromObject(info.second));
}
}
} // namespace internal
} // namespace v8
#endif // V8_HEAP_CPPGC_JS_CPP_MARKING_STATE_INL_H_
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_JS_CPP_MARKING_STATE_H_
#define V8_HEAP_CPPGC_JS_CPP_MARKING_STATE_H_
#include "src/heap/cppgc-js/cpp-heap.h"
#include "src/heap/cppgc/marking-state.h"
#include "src/heap/cppgc/marking-worklists.h"
namespace v8 {
namespace internal {
class JSObject;
class CppMarkingState {
public:
CppMarkingState(CppHeap& cpp_heap,
const WrapperDescriptor& wrapper_descriptor,
cppgc::internal::MarkingWorklists& marking_worklists)
: isolate_(reinterpret_cast<Isolate*>(cpp_heap.isolate())),
wrapper_descriptor_(wrapper_descriptor),
marking_state_(cpp_heap.AsBase(), marking_worklists) {}
CppMarkingState(const CppMarkingState&) = delete;
CppMarkingState& operator=(const CppMarkingState&) = delete;
void Publish() { marking_state_.Publish(); }
inline void MarkAndPush(const JSObject& js_object);
bool IsLocalEmpty() {
return marking_state_.marking_worklist().IsLocalEmpty();
}
private:
Isolate* const isolate_;
const WrapperDescriptor& wrapper_descriptor_;
cppgc::internal::MarkingStateBase marking_state_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_CPPGC_JS_CPP_MARKING_STATE_H_
......@@ -13,7 +13,7 @@ namespace v8 {
namespace internal {
UnifiedHeapMarkingVisitorBase::UnifiedHeapMarkingVisitorBase(
HeapBase& heap, cppgc::internal::MarkingStateBase& marking_state,
HeapBase& heap, cppgc::internal::BasicMarkingState& marking_state,
UnifiedHeapMarkingState& unified_heap_marking_state)
: JSVisitor(cppgc::internal::VisitorFactory::CreateKey()),
marking_state_(marking_state),
......
......@@ -17,7 +17,7 @@ class SourceLocation;
namespace internal {
class ConcurrentMarkingState;
class MarkingStateBase;
class BasicMarkingState;
class MutatorMarkingState;
} // namespace internal
} // namespace cppgc
......@@ -33,7 +33,7 @@ using cppgc::internal::MutatorMarkingState;
class V8_EXPORT_PRIVATE UnifiedHeapMarkingVisitorBase : public JSVisitor {
public:
UnifiedHeapMarkingVisitorBase(HeapBase&, cppgc::internal::MarkingStateBase&,
UnifiedHeapMarkingVisitorBase(HeapBase&, cppgc::internal::BasicMarkingState&,
UnifiedHeapMarkingState&);
~UnifiedHeapMarkingVisitorBase() override = default;
......@@ -51,7 +51,7 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVisitorBase : public JSVisitor {
// JS handling.
void Visit(const TracedReferenceBase& ref) final;
cppgc::internal::MarkingStateBase& marking_state_;
cppgc::internal::BasicMarkingState& marking_state_;
UnifiedHeapMarkingState& unified_heap_marking_state_;
};
......
......@@ -159,8 +159,8 @@ void Heap::StartGarbageCollection(Config config) {
const Marker::MarkingConfig marking_config{
config.collection_type, config.stack_state, config.marking_type,
config.is_forced_gc};
marker_ = MarkerFactory::CreateAndStartMarking<Marker>(
AsBase(), platform_.get(), marking_config);
marker_ = std::make_unique<Marker>(AsBase(), platform_.get(), marking_config);
marker_->StartMarking();
}
void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
......
......@@ -86,7 +86,7 @@ static constexpr size_t kDefaultDeadlineCheckInterval = 150u;
template <size_t kDeadlineCheckInterval = kDefaultDeadlineCheckInterval,
typename WorklistLocal, typename Callback>
bool DrainWorklistWithBytesAndTimeDeadline(MarkingStateBase& marking_state,
bool DrainWorklistWithBytesAndTimeDeadline(BasicMarkingState& marking_state,
size_t marked_bytes_deadline,
v8::base::TimeTicks time_deadline,
WorklistLocal& worklist_local,
......@@ -153,7 +153,7 @@ void MarkerBase::IncrementalMarkingTask::Run() {
}
}
MarkerBase::MarkerBase(Key, HeapBase& heap, cppgc::Platform* platform,
MarkerBase::MarkerBase(HeapBase& heap, cppgc::Platform* platform,
MarkingConfig config)
: heap_(heap),
config_(config),
......@@ -624,9 +624,8 @@ void MarkerBase::WaitForConcurrentMarkingForTesting() {
concurrent_marker_->JoinForTesting();
}
Marker::Marker(Key key, HeapBase& heap, cppgc::Platform* platform,
MarkingConfig config)
: MarkerBase(key, heap, platform, config),
Marker::Marker(HeapBase& heap, cppgc::Platform* platform, MarkingConfig config)
: MarkerBase(heap, platform, config),
marking_visitor_(heap, mutator_marking_state_),
conservative_marking_visitor_(heap, mutator_marking_state_,
marking_visitor_) {
......
......@@ -24,12 +24,10 @@ namespace cppgc {
namespace internal {
class HeapBase;
class MarkerFactory;
// Marking algorithm. Example for a valid call sequence creating the marking
// phase:
// 1. StartMarking() [Called implicitly when creating a Marker using
// MarkerFactory]
// 1. StartMarking()
// 2. AdvanceMarkingWithLimits() [Optional, depending on environment.]
// 3. EnterAtomicPause()
// 4. AdvanceMarkingWithLimits()
......@@ -87,6 +85,10 @@ class V8_EXPORT_PRIVATE MarkerBase {
// objects to be marked and merely updates marking states if needed.
void LeaveAtomicPause();
// Initialize marking according to the given config. This method will
// trigger incremental/concurrent marking if needed.
void StartMarking();
// Combines:
// - EnterAtomicPause()
// - AdvanceMarkingWithLimits()
......@@ -141,17 +143,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
static constexpr v8::base::TimeDelta kMaximumIncrementalStepDuration =
v8::base::TimeDelta::FromMilliseconds(2);
class Key {
private:
Key() = default;
friend class MarkerFactory;
};
MarkerBase(Key, HeapBase&, cppgc::Platform*, MarkingConfig);
// Initialize marking according to the given config. This method will
// trigger incremental/concurrent marking if needed.
void StartMarking();
MarkerBase(HeapBase&, cppgc::Platform*, MarkingConfig);
virtual cppgc::Visitor& visitor() = 0;
virtual ConservativeTracingVisitor& conservative_visitor() = 0;
......@@ -195,27 +187,11 @@ class V8_EXPORT_PRIVATE MarkerBase {
bool main_marking_disabled_for_testing_{false};
bool visited_cross_thread_persistents_in_atomic_pause_{false};
friend class MarkerFactory;
};
class V8_EXPORT_PRIVATE MarkerFactory {
public:
template <typename T, typename... Args>
static std::unique_ptr<T> CreateAndStartMarking(Args&&... args) {
static_assert(std::is_base_of<MarkerBase, T>::value,
"MarkerFactory can only create subclasses of MarkerBase");
std::unique_ptr<T> marker =
std::make_unique<T>(MarkerBase::Key(), std::forward<Args>(args)...);
marker->StartMarking();
return marker;
}
};
class V8_EXPORT_PRIVATE Marker final : public MarkerBase {
public:
Marker(Key, HeapBase&, cppgc::Platform*,
MarkingConfig = MarkingConfig::Default());
Marker(HeapBase&, cppgc::Platform*, MarkingConfig = MarkingConfig::Default());
protected:
cppgc::Visitor& visitor() final { return marking_visitor_; }
......
......@@ -23,8 +23,7 @@ namespace internal {
// C++ marking implementation.
class MarkingStateBase {
public:
inline MarkingStateBase(HeapBase& heap, MarkingWorklists&,
CompactionWorklists*);
inline MarkingStateBase(HeapBase&, MarkingWorklists&);
MarkingStateBase(const MarkingStateBase&) = delete;
MarkingStateBase& operator=(const MarkingStateBase&) = delete;
......@@ -34,6 +33,86 @@ class MarkingStateBase {
inline void PushMarked(HeapObjectHeader&, TraceDescriptor desc);
void Publish() { marking_worklist_.Publish(); }
MarkingWorklists::MarkingWorklist::Local& marking_worklist() {
return marking_worklist_;
}
MarkingWorklists::NotFullyConstructedWorklist&
not_fully_constructed_worklist() {
return not_fully_constructed_worklist_;
}
protected:
inline void MarkAndPush(HeapObjectHeader&, TraceDescriptor);
inline bool MarkNoPush(HeapObjectHeader&);
HeapBase& heap_;
MarkingWorklists::MarkingWorklist::Local marking_worklist_;
MarkingWorklists::NotFullyConstructedWorklist&
not_fully_constructed_worklist_;
};
MarkingStateBase::MarkingStateBase(HeapBase& heap,
MarkingWorklists& marking_worklists)
: heap_(heap),
marking_worklist_(marking_worklists.marking_worklist()),
not_fully_constructed_worklist_(
*marking_worklists.not_fully_constructed_worklist()) {}
void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) {
DCHECK_NOT_NULL(object);
MarkAndPush(
HeapObjectHeader::FromObject(const_cast<void*>(desc.base_object_payload)),
desc);
}
void MarkingStateBase::MarkAndPush(HeapObjectHeader& header,
TraceDescriptor desc) {
DCHECK_NOT_NULL(desc.callback);
if (header.IsInConstruction<AccessMode::kAtomic>()) {
not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header);
} else if (MarkNoPush(header)) {
PushMarked(header, desc);
}
}
bool MarkingStateBase::MarkNoPush(HeapObjectHeader& header) {
// A GC should only mark the objects that belong in its heap.
DCHECK_EQ(&heap_, &BasePage::FromPayload(&header)->heap());
// Never mark free space objects. This would e.g. hint to marking a promptly
// freed backing store.
DCHECK(!header.IsFree<AccessMode::kAtomic>());
return header.TryMarkAtomic();
}
void MarkingStateBase::MarkAndPush(HeapObjectHeader& header) {
MarkAndPush(
header,
{header.ObjectStart(),
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}
void MarkingStateBase::PushMarked(HeapObjectHeader& header,
TraceDescriptor desc) {
DCHECK(header.IsMarked<AccessMode::kAtomic>());
DCHECK(!header.IsInConstruction<AccessMode::kAtomic>());
DCHECK_NOT_NULL(desc.callback);
marking_worklist_.Push(desc);
}
class BasicMarkingState : public MarkingStateBase {
public:
inline BasicMarkingState(HeapBase& heap, MarkingWorklists&,
CompactionWorklists*);
BasicMarkingState(const BasicMarkingState&) = delete;
BasicMarkingState& operator=(const BasicMarkingState&) = delete;
inline void RegisterWeakReferenceIfNeeded(const void*, TraceDescriptor,
WeakCallback, const void*);
inline void RegisterWeakCallback(WeakCallback, const void*);
......@@ -58,7 +137,7 @@ class MarkingStateBase {
size_t marked_bytes() const { return marked_bytes_; }
void Publish() {
marking_worklist_.Publish();
MarkingStateBase::Publish();
previously_not_fully_constructed_worklist_.Publish();
weak_callback_worklist_.Publish();
write_barrier_worklist_.Publish();
......@@ -68,13 +147,6 @@ class MarkingStateBase {
if (IsCompactionEnabled()) movable_slots_worklist_->Publish();
}
MarkingWorklists::MarkingWorklist::Local& marking_worklist() {
return marking_worklist_;
}
MarkingWorklists::NotFullyConstructedWorklist&
not_fully_constructed_worklist() {
return not_fully_constructed_worklist_;
}
MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local&
previously_not_fully_constructed_worklist() {
return previously_not_fully_constructed_worklist_;
......@@ -121,21 +193,12 @@ class MarkingStateBase {
void set_in_atomic_pause() { in_atomic_pause_ = true; }
protected:
inline void MarkAndPush(HeapObjectHeader&, TraceDescriptor);
inline bool MarkNoPush(HeapObjectHeader&);
inline void RegisterWeakContainer(HeapObjectHeader&);
inline bool IsCompactionEnabled() const {
return movable_slots_worklist_.get();
}
HeapBase& heap_;
MarkingWorklists::MarkingWorklist::Local marking_worklist_;
MarkingWorklists::NotFullyConstructedWorklist&
not_fully_constructed_worklist_;
MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local
previously_not_fully_constructed_worklist_;
MarkingWorklists::WeakCallbackWorklist::Local weak_callback_worklist_;
......@@ -160,13 +223,10 @@ class MarkingStateBase {
bool in_atomic_pause_ = false;
};
MarkingStateBase::MarkingStateBase(HeapBase& heap,
MarkingWorklists& marking_worklists,
CompactionWorklists* compaction_worklists)
: heap_(heap),
marking_worklist_(marking_worklists.marking_worklist()),
not_fully_constructed_worklist_(
*marking_worklists.not_fully_constructed_worklist()),
BasicMarkingState::BasicMarkingState(HeapBase& heap,
MarkingWorklists& marking_worklists,
CompactionWorklists* compaction_worklists)
: MarkingStateBase(heap, marking_worklists),
previously_not_fully_constructed_worklist_(
marking_worklists.previously_not_fully_constructed_worklist()),
weak_callback_worklist_(marking_worklists.weak_callback_worklist()),
......@@ -187,53 +247,9 @@ MarkingStateBase::MarkingStateBase(HeapBase& heap,
}
}
void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) {
DCHECK_NOT_NULL(object);
MarkAndPush(
HeapObjectHeader::FromObject(const_cast<void*>(desc.base_object_payload)),
desc);
}
void MarkingStateBase::MarkAndPush(HeapObjectHeader& header,
TraceDescriptor desc) {
DCHECK_NOT_NULL(desc.callback);
if (header.IsInConstruction<AccessMode::kAtomic>()) {
not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header);
} else if (MarkNoPush(header)) {
PushMarked(header, desc);
}
}
bool MarkingStateBase::MarkNoPush(HeapObjectHeader& header) {
// A GC should only mark the objects that belong in its heap.
DCHECK_EQ(&heap_, &BasePage::FromPayload(&header)->heap());
// Never mark free space objects. This would e.g. hint to marking a promptly
// freed backing store.
DCHECK(!header.IsFree<AccessMode::kAtomic>());
return header.TryMarkAtomic();
}
void MarkingStateBase::MarkAndPush(HeapObjectHeader& header) {
MarkAndPush(
header,
{header.ObjectStart(),
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}
void MarkingStateBase::PushMarked(HeapObjectHeader& header,
TraceDescriptor desc) {
DCHECK(header.IsMarked<AccessMode::kAtomic>());
DCHECK(!header.IsInConstruction<AccessMode::kAtomic>());
DCHECK_NOT_NULL(desc.callback);
marking_worklist_.Push(desc);
}
void MarkingStateBase::RegisterWeakReferenceIfNeeded(const void* object,
TraceDescriptor desc,
WeakCallback weak_callback,
const void* parameter) {
void BasicMarkingState::RegisterWeakReferenceIfNeeded(
const void* object, TraceDescriptor desc, WeakCallback weak_callback,
const void* parameter) {
// Filter out already marked values. The write barrier for WeakMember
// ensures that any newly set value after this point is kept alive and does
// not require the callback.
......@@ -245,20 +261,20 @@ void MarkingStateBase::RegisterWeakReferenceIfNeeded(const void* object,
RegisterWeakCallback(weak_callback, parameter);
}
void MarkingStateBase::RegisterWeakCallback(WeakCallback callback,
const void* object) {
void BasicMarkingState::RegisterWeakCallback(WeakCallback callback,
const void* object) {
DCHECK_NOT_NULL(callback);
weak_callback_worklist_.Push({callback, object});
}
void MarkingStateBase::RegisterWeakContainer(HeapObjectHeader& header) {
void BasicMarkingState::RegisterWeakContainer(HeapObjectHeader& header) {
weak_containers_worklist_.Push<AccessMode::kAtomic>(&header);
}
void MarkingStateBase::ProcessWeakContainer(const void* object,
TraceDescriptor desc,
WeakCallback callback,
const void* data) {
void BasicMarkingState::ProcessWeakContainer(const void* object,
TraceDescriptor desc,
WeakCallback callback,
const void* data) {
DCHECK_NOT_NULL(object);
HeapObjectHeader& header =
......@@ -291,9 +307,9 @@ void MarkingStateBase::ProcessWeakContainer(const void* object,
}
}
void MarkingStateBase::ProcessEphemeron(const void* key, const void* value,
TraceDescriptor value_desc,
Visitor& visitor) {
void BasicMarkingState::ProcessEphemeron(const void* key, const void* value,
TraceDescriptor value_desc,
Visitor& visitor) {
// ProcessEphemeron is not expected to find new ephemerons recursively, which
// would break the main marking loop.
DCHECK(!in_ephemeron_processing_);
......@@ -325,7 +341,7 @@ void MarkingStateBase::ProcessEphemeron(const void* key, const void* value,
in_ephemeron_processing_ = false;
}
void MarkingStateBase::AccountMarkedBytes(const HeapObjectHeader& header) {
void BasicMarkingState::AccountMarkedBytes(const HeapObjectHeader& header) {
AccountMarkedBytes(
header.IsLargeObject<AccessMode::kAtomic>()
? reinterpret_cast<const LargePage*>(BasePage::FromPayload(&header))
......@@ -333,18 +349,18 @@ void MarkingStateBase::AccountMarkedBytes(const HeapObjectHeader& header) {
: header.AllocatedSize<AccessMode::kAtomic>());
}
void MarkingStateBase::AccountMarkedBytes(size_t marked_bytes) {
void BasicMarkingState::AccountMarkedBytes(size_t marked_bytes) {
marked_bytes_ += marked_bytes;
}
class MutatorMarkingState : public MarkingStateBase {
class MutatorMarkingState : public BasicMarkingState {
public:
MutatorMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
CompactionWorklists* compaction_worklists)
: MarkingStateBase(heap, marking_worklists, compaction_worklists) {}
: BasicMarkingState(heap, marking_worklists, compaction_worklists) {}
inline bool MarkNoPush(HeapObjectHeader& header) {
return MutatorMarkingState::MarkingStateBase::MarkNoPush(header);
return MutatorMarkingState::BasicMarkingState::MarkNoPush(header);
}
inline void ReTraceMarkedWeakContainer(cppgc::Visitor&, HeapObjectHeader&);
......@@ -440,11 +456,11 @@ void MutatorMarkingState::RecentlyRetracedWeakContainers::Insert(
recently_retraced_cache_[last_used_index_] = header;
}
class ConcurrentMarkingState : public MarkingStateBase {
class ConcurrentMarkingState : public BasicMarkingState {
public:
ConcurrentMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
CompactionWorklists* compaction_worklists)
: MarkingStateBase(heap, marking_worklists, compaction_worklists) {}
: BasicMarkingState(heap, marking_worklists, compaction_worklists) {}
~ConcurrentMarkingState() { DCHECK_EQ(last_marked_bytes_, marked_bytes_); }
......
......@@ -12,7 +12,7 @@ namespace cppgc {
namespace internal {
MarkingVisitorBase::MarkingVisitorBase(HeapBase& heap,
MarkingStateBase& marking_state)
BasicMarkingState& marking_state)
: marking_state_(marking_state) {}
void MarkingVisitorBase::Visit(const void* object, TraceDescriptor desc) {
......
......@@ -16,13 +16,13 @@ namespace internal {
class HeapBase;
class HeapObjectHeader;
class Marker;
class MarkingStateBase;
class BasicMarkingState;
class MutatorMarkingState;
class ConcurrentMarkingState;
class V8_EXPORT_PRIVATE MarkingVisitorBase : public VisitorBase {
public:
MarkingVisitorBase(HeapBase&, MarkingStateBase&);
MarkingVisitorBase(HeapBase&, BasicMarkingState&);
~MarkingVisitorBase() override = default;
protected:
......@@ -35,7 +35,7 @@ class V8_EXPORT_PRIVATE MarkingVisitorBase : public VisitorBase {
void RegisterWeakCallback(WeakCallback, const void*) final;
void HandleMovableReference(const void**) final;
MarkingStateBase& marking_state_;
BasicMarkingState& marking_state_;
};
class V8_EXPORT_PRIVATE MutatorMarkingVisitor : public MarkingVisitorBase {
......
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_EMBEDDER_TRACING_INL_H_
#define V8_HEAP_EMBEDDER_TRACING_INL_H_
#include "src/heap/embedder-tracing.h"
#include "src/objects/embedder-data-slot.h"
#include "src/objects/js-objects-inl.h"
namespace v8 {
namespace internal {
// static
bool LocalEmbedderHeapTracer::ExtractWrappableInfo(
Isolate* isolate, JSObject js_object,
const WrapperDescriptor& wrapper_descriptor, WrapperInfo* info) {
DCHECK(js_object.IsApiWrapper());
if (js_object.GetEmbedderFieldCount() < 2) return false;
if (EmbedderDataSlot(js_object, wrapper_descriptor.wrappable_type_index)
.ToAlignedPointerSafe(isolate, &info->first) &&
info->first &&
EmbedderDataSlot(js_object, wrapper_descriptor.wrappable_instance_index)
.ToAlignedPointerSafe(isolate, &info->second) &&
info->second) {
return (wrapper_descriptor.embedder_id_for_garbage_collected ==
WrapperDescriptor::kUnknownEmbedderId) ||
(*static_cast<uint16_t*>(info->first) ==
wrapper_descriptor.embedder_id_for_garbage_collected);
}
return false;
}
} // namespace internal
} // namespace v8
#endif // V8_HEAP_EMBEDDER_TRACING_INL_H_
......@@ -7,9 +7,9 @@
#include "include/v8-cppgc.h"
#include "src/base/logging.h"
#include "src/handles/global-handles.h"
#include "src/heap/embedder-tracing-inl.h"
#include "src/heap/gc-tracer.h"
#include "src/objects/embedder-data-slot.h"
#include "src/objects/js-objects-inl.h"
#include "src/heap/marking-worklist-inl.h"
namespace v8 {
namespace internal {
......@@ -41,13 +41,18 @@ CppHeap::GarbageCollectionFlags ConvertTraceFlags(
}
} // namespace
void LocalEmbedderHeapTracer::PrepareForTrace(
EmbedderHeapTracer::TraceFlags flags) {
if (cpp_heap_) cpp_heap()->InitializeTracing(ConvertTraceFlags(flags));
}
void LocalEmbedderHeapTracer::TracePrologue(
EmbedderHeapTracer::TraceFlags flags) {
if (!InUse()) return;
embedder_worklist_empty_ = false;
if (cpp_heap_)
cpp_heap()->TracePrologue(ConvertTraceFlags(flags));
cpp_heap()->StartTracing();
else
remote_tracer_->TracePrologue(flags);
}
......@@ -113,42 +118,17 @@ void LocalEmbedderHeapTracer::SetEmbedderStackStateForNextFinalization(
NotifyEmptyEmbedderStack();
}
namespace {
bool ExtractWrappableInfo(Isolate* isolate, JSObject js_object,
const WrapperDescriptor& wrapper_descriptor,
LocalEmbedderHeapTracer::WrapperInfo* info) {
DCHECK(js_object.IsApiWrapper());
if (js_object.GetEmbedderFieldCount() < 2) return false;
if (EmbedderDataSlot(js_object, wrapper_descriptor.wrappable_type_index)
.ToAlignedPointerSafe(isolate, &info->first) &&
info->first &&
EmbedderDataSlot(js_object, wrapper_descriptor.wrappable_instance_index)
.ToAlignedPointerSafe(isolate, &info->second) &&
info->second) {
return (wrapper_descriptor.embedder_id_for_garbage_collected ==
WrapperDescriptor::kUnknownEmbedderId) ||
(*static_cast<uint16_t*>(info->first) ==
wrapper_descriptor.embedder_id_for_garbage_collected);
}
return false;
}
} // namespace
LocalEmbedderHeapTracer::ProcessingScope::ProcessingScope(
LocalEmbedderHeapTracer* tracer)
: tracer_(tracer), wrapper_descriptor_(tracer->wrapper_descriptor()) {
: tracer_(tracer), wrapper_descriptor_(tracer->wrapper_descriptor_) {
DCHECK(!tracer_->cpp_heap_);
wrapper_cache_.reserve(kWrapperCacheSize);
}
LocalEmbedderHeapTracer::ProcessingScope::~ProcessingScope() {
DCHECK(!tracer_->cpp_heap_);
if (!wrapper_cache_.empty()) {
if (tracer_->cpp_heap_)
tracer_->cpp_heap()->RegisterV8References(std::move(wrapper_cache_));
else
tracer_->remote_tracer_->RegisterV8References(std::move(wrapper_cache_));
tracer_->remote_tracer_->RegisterV8References(std::move(wrapper_cache_));
}
}
......@@ -174,11 +154,9 @@ void LocalEmbedderHeapTracer::ProcessingScope::TracePossibleWrapper(
}
void LocalEmbedderHeapTracer::ProcessingScope::FlushWrapperCacheIfFull() {
DCHECK(!tracer_->cpp_heap_);
if (wrapper_cache_.size() == wrapper_cache_.capacity()) {
if (tracer_->cpp_heap_)
tracer_->cpp_heap()->RegisterV8References(std::move(wrapper_cache_));
else
tracer_->remote_tracer_->RegisterV8References(std::move(wrapper_cache_));
tracer_->remote_tracer_->RegisterV8References(std::move(wrapper_cache_));
wrapper_cache_.clear();
wrapper_cache_.reserve(kWrapperCacheSize);
}
......@@ -213,6 +191,20 @@ void LocalEmbedderHeapTracer::NotifyEmptyEmbedderStack() {
isolate_->global_handles()->NotifyEmptyEmbedderStack();
}
void LocalEmbedderHeapTracer::EmbedderWriteBarrier(Heap* heap,
JSObject js_object) {
DCHECK(InUse());
DCHECK(js_object.IsApiWrapper());
if (cpp_heap_) {
DCHECK_NOT_NULL(heap->mark_compact_collector());
heap->mark_compact_collector()->local_marking_worklists()->PushWrapper(
js_object);
return;
}
LocalEmbedderHeapTracer::ProcessingScope scope(this);
scope.TracePossibleWrapper(js_object);
}
bool DefaultEmbedderRootsHandler::IsRoot(
const v8::TracedReference<v8::Value>& handle) {
return !tracer_ || tracer_->IsRootForNonTracingGC(handle);
......
......@@ -74,6 +74,10 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
WrapperCache wrapper_cache_;
};
static V8_INLINE bool ExtractWrappableInfo(Isolate*, JSObject,
const WrapperDescriptor&,
WrapperInfo*);
explicit LocalEmbedderHeapTracer(Isolate* isolate) : isolate_(isolate) {}
~LocalEmbedderHeapTracer() {
......@@ -91,6 +95,7 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
void SetRemoteTracer(EmbedderHeapTracer* tracer);
void SetCppHeap(CppHeap* cpp_heap);
void PrepareForTrace(EmbedderHeapTracer::TraceFlags flags);
void TracePrologue(EmbedderHeapTracer::TraceFlags flags);
void TraceEpilogue();
void EnterFinalPause();
......@@ -149,6 +154,8 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
return embedder_stack_state_;
}
void EmbedderWriteBarrier(Heap*, JSObject);
private:
static constexpr size_t kEmbedderAllocatedThreshold = 128 * KB;
......
......@@ -52,10 +52,9 @@ void WriteBarrier::MarkingSlowFromInternalFields(Heap* heap, JSObject host) {
// We are not checking the mark bits of host here as (a) there's no
// synchronization with the marker and (b) we are writing into a live object
// (independent of the mark bits).
if (!heap->local_embedder_heap_tracer()->InUse()) return;
LocalEmbedderHeapTracer::ProcessingScope scope(
heap->local_embedder_heap_tracer());
scope.TracePossibleWrapper(host);
auto* local_embedder_heap_tracer = heap->local_embedder_heap_tracer();
if (!local_embedder_heap_tracer->InUse()) return;
local_embedder_heap_tracer->EmbedderWriteBarrier(heap, host);
}
void WriteBarrier::MarkingSlow(Heap* heap, Code host, RelocInfo* reloc_info,
......
......@@ -235,6 +235,16 @@ void IncrementalMarking::StartMarking() {
is_compacting_ = collector_->StartCompaction(
MarkCompactCollector::StartCompactionMode::kIncremental);
auto embedder_flags = heap_->flags_for_embedder_tracer();
{
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE);
// PrepareForTrace should be called before visitor initialization in
// StartMarking. It is only used with CppHeap.
heap_->local_embedder_heap_tracer()->PrepareForTrace(embedder_flags);
}
collector_->StartMarking();
SetState(MARKING);
......@@ -261,8 +271,7 @@ void IncrementalMarking::StartMarking() {
// marking (including write barriers) is fully set up.
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE);
heap_->local_embedder_heap_tracer()->TracePrologue(
heap_->flags_for_embedder_tracer());
heap_->local_embedder_heap_tracer()->TracePrologue(embedder_flags);
}
heap_->InvokeIncrementalMarkingEpilogueCallbacks();
......@@ -533,13 +542,15 @@ StepResult IncrementalMarking::EmbedderStep(double expected_duration_ms,
LocalEmbedderHeapTracer* local_tracer = heap_->local_embedder_heap_tracer();
const double start = heap_->MonotonicallyIncreasingTimeInMs();
const double deadline = start + expected_duration_ms;
bool empty_worklist;
{
bool empty_worklist = true;
if (local_marking_worklists()->PublishWrapper()) {
DCHECK(local_marking_worklists()->IsWrapperEmpty());
} else {
// Cannot directly publish wrapper objects.
LocalEmbedderHeapTracer::ProcessingScope scope(local_tracer);
HeapObject object;
size_t cnt = 0;
empty_worklist = true;
while (local_marking_worklists()->PopEmbedder(&object)) {
while (local_marking_worklists()->PopWrapper(&object)) {
scope.TracePossibleWrapper(JSObject::cast(object));
if (++cnt == kObjectsToProcessBeforeDeadlineCheck) {
if (deadline <= heap_->MonotonicallyIncreasingTimeInMs()) {
......
......@@ -572,8 +572,11 @@ void MarkCompactCollector::StartMarking() {
}
code_flush_mode_ = Heap::GetCodeFlushMode(isolate());
marking_worklists()->CreateContextWorklists(contexts);
local_marking_worklists_ =
std::make_unique<MarkingWorklists::Local>(marking_worklists());
auto* cpp_heap = CppHeap::From(heap_->cpp_heap());
local_marking_worklists_ = std::make_unique<MarkingWorklists::Local>(
marking_worklists(), cpp_heap
? cpp_heap->CreateCppMarkingState()
: MarkingWorklists::Local::kNoCppMarkingState);
local_weak_objects_ = std::make_unique<WeakObjects::Local>(weak_objects());
marking_visitor_ = std::make_unique<MarkingVisitor>(
marking_state(), local_marking_worklists(), local_weak_objects_.get(),
......@@ -926,8 +929,11 @@ void MarkCompactCollector::Prepare() {
if (!was_marked_incrementally_) {
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_PROLOGUE);
heap_->local_embedder_heap_tracer()->TracePrologue(
heap_->flags_for_embedder_tracer());
auto embedder_flags = heap_->flags_for_embedder_tracer();
// PrepareForTrace should be called before visitor initialization in
// StartMarking.
heap_->local_embedder_heap_tracer()->PrepareForTrace(embedder_flags);
heap_->local_embedder_heap_tracer()->TracePrologue(embedder_flags);
}
StartCompaction(StartCompactionMode::kAtomic);
StartMarking();
......@@ -1931,7 +1937,7 @@ bool MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
} while (another_ephemeron_iteration_main_thread ||
heap()->concurrent_marking()->another_ephemeron_iteration() ||
!local_marking_worklists()->IsEmpty() ||
!local_marking_worklists()->IsEmbedderEmpty() ||
!local_marking_worklists()->IsWrapperEmpty() ||
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone());
CHECK(local_marking_worklists()->IsEmpty());
......@@ -2053,7 +2059,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
// is necessary.
work_to_do = !local_marking_worklists()->IsEmpty() ||
!local_marking_worklists()->IsEmbedderEmpty() ||
!local_marking_worklists()->IsWrapperEmpty() ||
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
CHECK(local_weak_objects()
->discovered_ephemerons_local.IsLocalAndGlobalEmpty());
......@@ -2074,11 +2080,14 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
void MarkCompactCollector::PerformWrapperTracing() {
if (heap_->local_embedder_heap_tracer()->InUse()) {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_TRACING);
{
if (local_marking_worklists()->PublishWrapper()) {
DCHECK(local_marking_worklists()->IsWrapperEmpty());
} else {
// Cannot directly publish wrapper objects.
LocalEmbedderHeapTracer::ProcessingScope scope(
heap_->local_embedder_heap_tracer());
HeapObject object;
while (local_marking_worklists()->PopEmbedder(&object)) {
while (local_marking_worklists()->PopWrapper(&object)) {
scope.TracePossibleWrapper(JSObject::cast(object));
}
}
......@@ -2303,8 +2312,8 @@ void MarkCompactCollector::MarkLiveObjects() {
PerformWrapperTracing();
DrainMarkingWorklist();
} while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone() ||
!local_marking_worklists()->IsEmbedderEmpty());
DCHECK(local_marking_worklists()->IsEmbedderEmpty());
!local_marking_worklists()->IsWrapperEmpty());
DCHECK(local_marking_worklists()->IsWrapperEmpty());
DCHECK(local_marking_worklists()->IsEmpty());
}
......@@ -2347,7 +2356,7 @@ void MarkCompactCollector::MarkLiveObjects() {
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY);
ProcessEphemeronMarking();
DCHECK(local_marking_worklists()->IsEmbedderEmpty());
DCHECK(local_marking_worklists()->IsWrapperEmpty());
DCHECK(local_marking_worklists()->IsEmpty());
}
......
......@@ -268,7 +268,7 @@ int MarkingVisitorBase<ConcreteVisitor,
if (size && is_embedder_tracing_enabled_) {
// Success: The object needs to be processed for embedder references on
// the main thread.
local_marking_worklists_->PushEmbedder(object);
local_marking_worklists_->PushWrapper(object);
}
return size;
}
......
......@@ -7,7 +7,9 @@
#include <unordered_map>
#include <vector>
#include "src/heap/cppgc-js/cpp-marking-state-inl.h"
#include "src/heap/marking-worklist.h"
#include "src/objects/js-objects-inl.h"
namespace v8 {
namespace internal {
......@@ -16,7 +18,7 @@ template <typename Callback>
void MarkingWorklists::Update(Callback callback) {
shared_.Update(callback);
on_hold_.Update(callback);
embedder_.Update(callback);
wrapper_.Update(callback);
other_.Update(callback);
for (auto cw : context_worklists_) {
if (cw.context == kSharedContext || cw.context == kOtherContext) {
......@@ -45,12 +47,17 @@ bool MarkingWorklists::Local::PopOnHold(HeapObject* object) {
return on_hold_.Pop(object);
}
void MarkingWorklists::Local::PushEmbedder(HeapObject object) {
embedder_.Push(object);
void MarkingWorklists::Local::PushWrapper(HeapObject object) {
if (cpp_marking_state_) {
cpp_marking_state_->MarkAndPush(JSObject::cast(object));
} else {
wrapper_.Push(object);
}
}
bool MarkingWorklists::Local::PopEmbedder(HeapObject* object) {
return embedder_.Pop(object);
bool MarkingWorklists::Local::PopWrapper(HeapObject* object) {
DCHECK(!cpp_marking_state_);
return wrapper_.Pop(object);
}
Address MarkingWorklists::Local::SwitchToContext(Address context) {
......@@ -72,6 +79,12 @@ void MarkingWorklists::Local::SwitchToContext(
active_context_ = context;
}
bool MarkingWorklists::Local::PublishWrapper() {
if (!cpp_marking_state_) return false;
cpp_marking_state_->Publish();
return true;
}
} // namespace internal
} // namespace v8
......
......@@ -5,8 +5,11 @@
#include "src/heap/marking-worklist.h"
#include <algorithm>
#include <cstddef>
#include <map>
#include "src/heap/cppgc-js/cpp-heap.h"
#include "src/heap/cppgc-js/cpp-marking-state.h"
#include "src/heap/marking-worklist-inl.h"
#include "src/objects/heap-object-inl.h"
#include "src/objects/heap-object.h"
......@@ -29,7 +32,7 @@ MarkingWorklists::~MarkingWorklists() {
void MarkingWorklists::Clear() {
shared_.Clear();
on_hold_.Clear();
embedder_.Clear();
wrapper_.Clear();
other_.Clear();
for (auto cw : context_worklists_) {
if (cw.context == kSharedContext || cw.context == kOtherContext) {
......@@ -94,13 +97,17 @@ void MarkingWorklists::PrintWorklist(const char* worklist_name,
#endif
}
const Address MarkingWorklists::Local::kSharedContext;
const Address MarkingWorklists::Local::kOtherContext;
constexpr Address MarkingWorklists::Local::kSharedContext;
constexpr Address MarkingWorklists::Local::kOtherContext;
constexpr std::nullptr_t MarkingWorklists::Local::kNoCppMarkingState;
MarkingWorklists::Local::Local(MarkingWorklists* global)
MarkingWorklists::Local::Local(
MarkingWorklists* global,
std::unique_ptr<CppMarkingState> cpp_marking_state)
: on_hold_(global->on_hold()),
embedder_(global->embedder()),
is_per_context_mode_(false) {
wrapper_(global->wrapper()),
is_per_context_mode_(false),
cpp_marking_state_(std::move(cpp_marking_state)) {
if (global->context_worklists().empty()) {
MarkingWorklist::Local shared(global->shared());
active_ = std::move(shared);
......@@ -133,7 +140,7 @@ MarkingWorklists::Local::~Local() {
void MarkingWorklists::Local::Publish() {
active_.Publish();
on_hold_.Publish();
embedder_.Publish();
wrapper_.Publish();
if (is_per_context_mode_) {
for (auto& cw : worklist_by_context_) {
if (cw.first != active_context_) {
......@@ -141,6 +148,7 @@ void MarkingWorklists::Local::Publish() {
}
}
}
PublishWrapper();
}
bool MarkingWorklists::Local::IsEmpty() {
......@@ -163,8 +171,12 @@ bool MarkingWorklists::Local::IsEmpty() {
return true;
}
bool MarkingWorklists::Local::IsEmbedderEmpty() const {
return embedder_.IsLocalEmpty() && embedder_.IsGlobalEmpty();
bool MarkingWorklists::Local::IsWrapperEmpty() const {
if (cpp_marking_state_) {
DCHECK(wrapper_.IsLocalAndGlobalEmpty());
return cpp_marking_state_->IsLocalEmpty();
}
return wrapper_.IsLocalAndGlobalEmpty();
}
void MarkingWorklists::Local::ShareWork() {
......
......@@ -5,6 +5,8 @@
#ifndef V8_HEAP_MARKING_WORKLIST_H_
#define V8_HEAP_MARKING_WORKLIST_H_
#include <cstddef>
#include <memory>
#include <unordered_map>
#include <vector>
......@@ -15,11 +17,14 @@
namespace v8 {
namespace internal {
class CppMarkingState;
class JSObject;
// The index of the main thread task used by concurrent/parallel GC.
const int kMainThreadTask = 0;
using MarkingWorklist = ::heap::base::Worklist<HeapObject, 64>;
using EmbedderTracingWorklist = ::heap::base::Worklist<HeapObject, 16>;
using WrapperTracingWorklist = ::heap::base::Worklist<HeapObject, 16>;
// We piggyback on marking to compute object sizes per native context that is
// needed for the new memory measurement API. The algorithm works as follows:
......@@ -82,7 +87,7 @@ class V8_EXPORT_PRIVATE MarkingWorklists {
MarkingWorklist* shared() { return &shared_; }
MarkingWorklist* on_hold() { return &on_hold_; }
EmbedderTracingWorklist* embedder() { return &embedder_; }
WrapperTracingWorklist* wrapper() { return &wrapper_; }
// A list of (context, worklist) pairs that was set up at the start of
// marking by CreateContextWorklists.
......@@ -115,7 +120,7 @@ class V8_EXPORT_PRIVATE MarkingWorklists {
// Worklist for objects that potentially require embedder tracing, i.e.,
// these objects need to be handed over to the embedder to find the full
// transitive closure.
EmbedderTracingWorklist embedder_;
WrapperTracingWorklist wrapper_;
// Per-context worklists.
std::vector<ContextWorklistPair> context_worklists_;
......@@ -137,10 +142,13 @@ class V8_EXPORT_PRIVATE MarkingWorklists {
// been moved to active_.
class V8_EXPORT_PRIVATE MarkingWorklists::Local {
public:
static const Address kSharedContext = MarkingWorklists::kSharedContext;
static const Address kOtherContext = MarkingWorklists::kOtherContext;
static constexpr Address kSharedContext = MarkingWorklists::kSharedContext;
static constexpr Address kOtherContext = MarkingWorklists::kOtherContext;
static constexpr std::nullptr_t kNoCppMarkingState = nullptr;
explicit Local(MarkingWorklists* global);
Local(
MarkingWorklists* global,
std::unique_ptr<CppMarkingState> cpp_marking_state = kNoCppMarkingState);
~Local();
inline void Push(HeapObject object);
......@@ -149,12 +157,12 @@ class V8_EXPORT_PRIVATE MarkingWorklists::Local {
inline void PushOnHold(HeapObject object);
inline bool PopOnHold(HeapObject* object);
inline void PushEmbedder(HeapObject object);
inline bool PopEmbedder(HeapObject* object);
inline void PushWrapper(HeapObject object);
inline bool PopWrapper(HeapObject* object);
void Publish();
bool IsEmpty();
bool IsEmbedderEmpty() const;
bool IsWrapperEmpty() const;
// Publishes the local active marking worklist if its global worklist is
// empty. In the per-context marking mode it also publishes the shared
// worklist.
......@@ -162,6 +170,10 @@ class V8_EXPORT_PRIVATE MarkingWorklists::Local {
// Merges the on-hold worklist to the shared worklist.
void MergeOnHold();
// Returns true if wrapper objects could be directly pushed. Otherwise,
// objects need to be processed one by one.
inline bool PublishWrapper();
// Returns the context of the active worklist.
Address Context() const { return active_context_; }
inline Address SwitchToContext(Address context);
......@@ -174,13 +186,15 @@ class V8_EXPORT_PRIVATE MarkingWorklists::Local {
inline void SwitchToContext(Address context,
MarkingWorklist::Local* worklist);
MarkingWorklist::Local on_hold_;
EmbedderTracingWorklist::Local embedder_;
WrapperTracingWorklist::Local wrapper_;
MarkingWorklist::Local active_;
Address active_context_;
MarkingWorklist::Local* active_owner_;
bool is_per_context_mode_;
std::unordered_map<Address, std::unique_ptr<MarkingWorklist::Local>>
worklist_by_context_;
std::unique_ptr<CppMarkingState> cpp_marking_state_;
};
} // namespace internal
......
......@@ -5208,7 +5208,7 @@ Maybe<bool> JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object,
: Nothing<bool>();
}
bool JSObject::IsApiWrapper() {
bool JSObject::IsApiWrapper() const {
// These object types can carry information relevant for embedders. The
// *_API_* types are generated through templates which can have embedder
// fields. The other types have their embedder fields added at compile time.
......@@ -5222,7 +5222,7 @@ bool JSObject::IsApiWrapper() {
InstanceTypeChecker::IsJSApiObject(instance_type);
}
bool JSObject::IsDroppableApiWrapper() {
bool JSObject::IsDroppableApiWrapper() const {
auto instance_type = map().instance_type();
return InstanceTypeChecker::IsJSApiObject(instance_type) ||
instance_type == JS_SPECIAL_API_OBJECT_TYPE;
......
......@@ -579,8 +579,8 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
// embedder fields as well as the number of embedder fields.
// The |function_has_prototype_slot| parameter is needed only for
// JSFunction objects.
static int GetHeaderSize(InstanceType instance_type,
bool function_has_prototype_slot = false);
static V8_EXPORT_PRIVATE int GetHeaderSize(
InstanceType instance_type, bool function_has_prototype_slot = false);
static inline int GetHeaderSize(Map map);
static inline int GetEmbedderFieldsStartOffset(Map map);
......@@ -598,10 +598,10 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
// garbage collection treatment.
// TODO(mlippautz): Make check exact and replace the pattern match in
// Heap::TracePossibleWrapper.
bool IsApiWrapper();
V8_EXPORT_PRIVATE bool IsApiWrapper() const;
// Same as IsApiWrapper() but also allow dropping the wrapper on minor GCs.
bool IsDroppableApiWrapper();
bool IsDroppableApiWrapper() const;
// Returns a new map with all transitions dropped from the object's current
// map and the ElementsKind set.
......
......@@ -70,8 +70,9 @@ class EphemeronPairTest : public testing::TestWithHeap {
}
void InitializeMarker(HeapBase& heap, cppgc::Platform* platform) {
marker_ = MarkerFactory::CreateAndStartMarking<Marker>(
heap, platform, IncrementalPreciseMarkingConfig);
marker_ = std::make_unique<Marker>(heap, platform,
IncrementalPreciseMarkingConfig);
marker_->StartMarking();
}
Marker* marker() const { return marker_.get(); }
......
......@@ -40,8 +40,8 @@ class MarkerTest : public testing::TestWithHeap {
void InitializeMarker(HeapBase& heap, cppgc::Platform* platform,
MarkingConfig config) {
marker_ =
MarkerFactory::CreateAndStartMarking<Marker>(heap, platform, config);
marker_ = std::make_unique<Marker>(heap, platform, config);
marker_->StartMarking();
}
Marker* marker() const { return marker_.get(); }
......@@ -421,8 +421,8 @@ class IncrementalMarkingTest : public testing::TestWithHeap {
void InitializeMarker(HeapBase& heap, cppgc::Platform* platform,
MarkingConfig config) {
GetMarkerRef() =
MarkerFactory::CreateAndStartMarking<Marker>(heap, platform, config);
GetMarkerRef() = std::make_unique<Marker>(heap, platform, config);
GetMarkerRef()->StartMarking();
}
MarkerBase* marker() const { return Heap::From(GetHeap())->marker(); }
......
......@@ -23,8 +23,10 @@ namespace {
class MarkingVisitorTest : public testing::TestWithHeap {
public:
MarkingVisitorTest()
: marker_(MarkerFactory::CreateAndStartMarking<Marker>(
*Heap::From(GetHeap()), GetPlatformHandle().get())) {}
: marker_(std::make_unique<Marker>(*Heap::From(GetHeap()),
GetPlatformHandle().get())) {
marker_->StartMarking();
}
~MarkingVisitorTest() override { marker_->ClearAllWorklistsForTesting(); }
Marker* GetMarker() { return marker_.get(); }
......@@ -51,7 +53,7 @@ class TestMarkingVisitor : public MutatorMarkingVisitor {
marker->MutatorMarkingStateForTesting()) {}
~TestMarkingVisitor() { marking_state_.Publish(); }
MarkingStateBase& marking_state() { return marking_state_; }
BasicMarkingState& marking_state() { return marking_state_; }
};
} // namespace
......
......@@ -68,8 +68,9 @@ class V8_NODISCARD CppgcTracingScopesTest : public testing::TestWithHeap {
Config config = {Config::CollectionType::kMajor,
Config::StackState::kNoHeapPointers,
Config::MarkingType::kIncremental};
GetMarkerRef() = MarkerFactory::CreateAndStartMarking<Marker>(
GetMarkerRef() = std::make_unique<Marker>(
Heap::From(GetHeap())->AsBase(), GetPlatformHandle().get(), config);
GetMarkerRef()->StartMarking();
DelegatingTracingControllerImpl::check_expectations = true;
}
......
......@@ -24,8 +24,9 @@ class WeakContainerTest : public testing::TestWithHeap {
Config config = {Config::CollectionType::kMajor,
Config::StackState::kNoHeapPointers,
Config::MarkingType::kIncremental};
GetMarkerRef() = MarkerFactory::CreateAndStartMarking<Marker>(
GetMarkerRef() = std::make_unique<Marker>(
Heap::From(GetHeap())->AsBase(), GetPlatformHandle().get(), config);
GetMarkerRef()->StartMarking();
}
void FinishMarking(Config::StackState stack_state) {
......
......@@ -162,10 +162,11 @@ class WriteBarrierTest : public testing::TestWithHeap {
public:
WriteBarrierTest() : internal_heap_(Heap::From(GetHeap())) {
DCHECK_NULL(GetMarkerRef().get());
GetMarkerRef() = MarkerFactory::CreateAndStartMarking<Marker>(
*internal_heap_, GetPlatformHandle().get(),
IncrementalMarkingScope::kIncrementalConfig);
GetMarkerRef() =
std::make_unique<Marker>(*internal_heap_, GetPlatformHandle().get(),
IncrementalMarkingScope::kIncrementalConfig);
marker_ = GetMarkerRef().get();
marker_->StartMarking();
}
~WriteBarrierTest() override {
......
......@@ -45,9 +45,9 @@ TEST_F(MarkingWorklistTest, PushPopEmbedder) {
MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object =
ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.PushEmbedder(pushed_object);
worklists.PushWrapper(pushed_object);
HeapObject popped_object;
EXPECT_TRUE(worklists.PopEmbedder(&popped_object));
EXPECT_TRUE(worklists.PopWrapper(&popped_object));
EXPECT_EQ(popped_object, pushed_object);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment