Commit b5979eaa authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

Reland "cppgc: Port backing store compaction."

This is a reland of 90ea9b35

Original change's description:
> cppgc: Port backing store compaction.
>
> This CL ports the existing backing store compaction algorithm from
> blink. It does not attempt to improve on the existing algorithm.
>
> Currently only unified heap uses the compaction implementation. It is
> never triggered through standalone GCs.
>
> The compaction implementation resides within an internal "subtle" namespace.
>
> Bug: v8:10990
> Change-Id: I4aa781db1b711e7aafc34234c4fb142de84394d7
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2485228
> Commit-Queue: Omer Katz <omerkatz@chromium.org>
> Reviewed-by: Anton Bikineev <bikineev@chromium.org>
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#70714}

Bug: v8:10990
Change-Id: I527c2042a26648d058bfe4d355527cce9a3eeadc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2492331
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70736}
parent 35382590
...@@ -4385,6 +4385,10 @@ v8_source_set("cppgc_base") { ...@@ -4385,6 +4385,10 @@ v8_source_set("cppgc_base") {
"include/cppgc/visitor.h", "include/cppgc/visitor.h",
"include/v8config.h", "include/v8config.h",
"src/heap/cppgc/allocation.cc", "src/heap/cppgc/allocation.cc",
"src/heap/cppgc/compaction-worklists.cc",
"src/heap/cppgc/compaction-worklists.h",
"src/heap/cppgc/compactor.cc",
"src/heap/cppgc/compactor.h",
"src/heap/cppgc/concurrent-marker.cc", "src/heap/cppgc/concurrent-marker.cc",
"src/heap/cppgc/concurrent-marker.h", "src/heap/cppgc/concurrent-marker.h",
"src/heap/cppgc/free-list.cc", "src/heap/cppgc/free-list.cc",
......
...@@ -14,8 +14,6 @@ struct CustomSpaceIndex { ...@@ -14,8 +14,6 @@ struct CustomSpaceIndex {
size_t value; size_t value;
}; };
enum class CustomSpaceCompactability { kNotCompactable, kCompactable };
/** /**
* Top-level base class for custom spaces. Users must inherit from CustomSpace * Top-level base class for custom spaces. Users must inherit from CustomSpace
* below. * below.
...@@ -66,6 +64,28 @@ struct SpaceTrait { ...@@ -66,6 +64,28 @@ struct SpaceTrait {
using Space = void; using Space = void;
}; };
namespace internal {
template <typename CustomSpace>
struct IsAllocatedOnCompactableSpaceImpl {
static constexpr bool value = CustomSpace::kSupportsCompaction;
};
template <>
struct IsAllocatedOnCompactableSpaceImpl<void> {
// Non-custom spaces are by default not compactable.
static constexpr bool value = false;
};
template <typename T>
struct IsAllocatedOnCompactableSpace {
public:
static constexpr bool value =
IsAllocatedOnCompactableSpaceImpl<typename SpaceTrait<T>::Space>::value;
};
} // namespace internal
} // namespace cppgc } // namespace cppgc
#endif // INCLUDE_CPPGC_CUSTOM_SPACE_H_ #endif // INCLUDE_CPPGC_CUSTOM_SPACE_H_
...@@ -24,7 +24,7 @@ class MemberBase { ...@@ -24,7 +24,7 @@ class MemberBase {
MemberBase() = default; MemberBase() = default;
explicit MemberBase(void* value) : raw_(value) {} explicit MemberBase(void* value) : raw_(value) {}
void* const* GetRawSlot() const { return &raw_; } void** GetRawSlot() const { return &raw_; }
void* GetRaw() const { return raw_; } void* GetRaw() const { return raw_; }
void SetRaw(void* value) { raw_ = value; } void SetRaw(void* value) { raw_ = value; }
...@@ -178,6 +178,10 @@ class BasicMember final : private MemberBase, private CheckingPolicy { ...@@ -178,6 +178,10 @@ class BasicMember final : private MemberBase, private CheckingPolicy {
return result; return result;
} }
const T** GetSlotForTesting() const {
return reinterpret_cast<const T**>(const_cast<const void**>(GetRawSlot()));
}
private: private:
T* GetRawAtomic() const { T* GetRawAtomic() const {
return static_cast<T*>(MemberBase::GetRawAtomic()); return static_cast<T*>(MemberBase::GetRawAtomic());
......
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
#ifndef INCLUDE_CPPGC_VISITOR_H_ #ifndef INCLUDE_CPPGC_VISITOR_H_
#define INCLUDE_CPPGC_VISITOR_H_ #define INCLUDE_CPPGC_VISITOR_H_
#include "cppgc/custom-space.h"
#include "cppgc/ephemeron-pair.h" #include "cppgc/ephemeron-pair.h"
#include "cppgc/garbage-collected.h" #include "cppgc/garbage-collected.h"
#include "cppgc/internal/logging.h" #include "cppgc/internal/logging.h"
...@@ -13,6 +14,7 @@ ...@@ -13,6 +14,7 @@
#include "cppgc/member.h" #include "cppgc/member.h"
#include "cppgc/source-location.h" #include "cppgc/source-location.h"
#include "cppgc/trace-trait.h" #include "cppgc/trace-trait.h"
#include "cppgc/type-traits.h"
namespace cppgc { namespace cppgc {
...@@ -26,7 +28,6 @@ class BasicPersistent; ...@@ -26,7 +28,6 @@ class BasicPersistent;
class ConservativeTracingVisitor; class ConservativeTracingVisitor;
class VisitorBase; class VisitorBase;
class VisitorFactory; class VisitorFactory;
} // namespace internal } // namespace internal
using WeakCallback = void (*)(const LivenessBroker&, const void*); using WeakCallback = void (*)(const LivenessBroker&, const void*);
...@@ -82,6 +83,8 @@ class V8_EXPORT Visitor { ...@@ -82,6 +83,8 @@ class V8_EXPORT Visitor {
static_assert(sizeof(T), "Pointee type must be fully defined."); static_assert(sizeof(T), "Pointee type must be fully defined.");
static_assert(internal::IsGarbageCollectedType<T>::value, static_assert(internal::IsGarbageCollectedType<T>::value,
"T must be GarbageCollected or GarbageCollectedMixin type"); "T must be GarbageCollected or GarbageCollectedMixin type");
static_assert(!internal::IsAllocatedOnCompactableSpace<T>::value,
"Weak references to compactable objects are not allowed");
const T* value = weak_member.GetRawAtomic(); const T* value = weak_member.GetRawAtomic();
...@@ -176,6 +179,22 @@ class V8_EXPORT Visitor { ...@@ -176,6 +179,22 @@ class V8_EXPORT Visitor {
data); data);
} }
/**
* Registers a slot containing a reference to an object allocated on a
* compactable space. Such references maybe be arbitrarily moved by the GC.
*
* \param slot location of reference to object that might be moved by the GC.
*/
template <typename T>
void RegisterMovableReference(const T** slot) {
static_assert(internal::IsAllocatedOnCompactableSpace<T>::value,
"Only references to objects allocated on compactable spaces "
"should be registered as movable slots.");
static_assert(!internal::IsGarbageCollectedMixinTypeV<T>,
"Mixin types do not support compaction.");
HandleMovableReference(reinterpret_cast<const void**>(slot));
}
/** /**
* Registers a weak callback that is invoked during garbage collection. * Registers a weak callback that is invoked during garbage collection.
* *
...@@ -214,6 +233,7 @@ class V8_EXPORT Visitor { ...@@ -214,6 +233,7 @@ class V8_EXPORT Visitor {
virtual void VisitWeakContainer(const void* self, TraceDescriptor strong_desc, virtual void VisitWeakContainer(const void* self, TraceDescriptor strong_desc,
TraceDescriptor weak_desc, TraceDescriptor weak_desc,
WeakCallback callback, const void* data) {} WeakCallback callback, const void* data) {}
virtual void HandleMovableReference(const void**) {}
private: private:
template <typename T, void (T::*method)(const LivenessBroker&)> template <typename T, void (T::*method)(const LivenessBroker&)>
...@@ -261,6 +281,8 @@ class V8_EXPORT Visitor { ...@@ -261,6 +281,8 @@ class V8_EXPORT Visitor {
static_assert(internal::IsGarbageCollectedType<PointeeType>::value, static_assert(internal::IsGarbageCollectedType<PointeeType>::value,
"Persistent's pointee type must be GarbageCollected or " "Persistent's pointee type must be GarbageCollected or "
"GarbageCollectedMixin"); "GarbageCollectedMixin");
static_assert(!internal::IsAllocatedOnCompactableSpace<PointeeType>::value,
"Weak references to compactable objects are not allowed");
VisitWeakRoot(p.Get(), TraceTrait<PointeeType>::GetTraceDescriptor(p.Get()), VisitWeakRoot(p.Get(), TraceTrait<PointeeType>::GetTraceDescriptor(p.Get()),
&HandleWeak<WeakPersistent>, &p, loc); &HandleWeak<WeakPersistent>, &p, loc);
} }
......
...@@ -177,6 +177,12 @@ void CppHeap::TracePrologue(TraceFlags flags) { ...@@ -177,6 +177,12 @@ void CppHeap::TracePrologue(TraceFlags flags) {
UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor, UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
cppgc::Heap::StackState::kNoHeapPointers, cppgc::Heap::StackState::kNoHeapPointers,
UnifiedHeapMarker::MarkingConfig::MarkingType::kIncrementalAndConcurrent}; UnifiedHeapMarker::MarkingConfig::MarkingType::kIncrementalAndConcurrent};
if ((flags == TraceFlags::kReduceMemory) || (flags == TraceFlags::kForced)) {
// Only enable compaction when in a memory reduction garbage collection as
// it may significantly increase the final garbage collection pause.
compactor_.InitializeIfShouldCompact(marking_config.marking_type,
marking_config.stack_state);
}
marker_ = marker_ =
cppgc::internal::MarkerFactory::CreateAndStartMarking<UnifiedHeapMarker>( cppgc::internal::MarkerFactory::CreateAndStartMarking<UnifiedHeapMarker>(
*isolate_.heap(), AsBase(), platform_.get(), marking_config); *isolate_.heap(), AsBase(), platform_.get(), marking_config);
...@@ -195,6 +201,11 @@ bool CppHeap::IsTracingDone() { return marking_done_; } ...@@ -195,6 +201,11 @@ bool CppHeap::IsTracingDone() { return marking_done_; }
void CppHeap::EnterFinalPause(EmbedderStackState stack_state) { void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
marker_->EnterAtomicPause(stack_state); marker_->EnterAtomicPause(stack_state);
if (compactor_.CancelIfShouldNotCompact(
UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic,
stack_state)) {
marker_->NotifyCompactionCancelled();
}
} }
void CppHeap::TraceEpilogue(TraceSummary* trace_summary) { void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {
...@@ -213,10 +224,15 @@ void CppHeap::TraceEpilogue(TraceSummary* trace_summary) { ...@@ -213,10 +224,15 @@ void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {
UnifiedHeapMarkingVerifier verifier(*this); UnifiedHeapMarkingVerifier verifier(*this);
verifier.Run(cppgc::Heap::StackState::kNoHeapPointers); verifier.Run(cppgc::Heap::StackState::kNoHeapPointers);
#endif #endif
cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling
compactable_space_handling = compactor_.CompactSpacesIfEnabled();
{ {
NoGCScope no_gc(*this); NoGCScope no_gc(*this);
sweeper().Start( const cppgc::internal::Sweeper::SweepingConfig sweeping_config{
cppgc::internal::Sweeper::Config::kIncrementalAndConcurrent); cppgc::internal::Sweeper::SweepingConfig::SweepingType::
kIncrementalAndConcurrent,
compactable_space_handling};
sweeper().Start(sweeping_config);
} }
} }
......
...@@ -48,6 +48,10 @@ void UnifiedHeapMarkingVisitorBase::RegisterWeakCallback(WeakCallback callback, ...@@ -48,6 +48,10 @@ void UnifiedHeapMarkingVisitorBase::RegisterWeakCallback(WeakCallback callback,
marking_state_.RegisterWeakCallback(callback, object); marking_state_.RegisterWeakCallback(callback, object);
} }
void UnifiedHeapMarkingVisitorBase::HandleMovableReference(const void** slot) {
marking_state_.RegisterMovableReference(slot);
}
namespace { namespace {
void DeferredTraceJSMember(cppgc::Visitor* visitor, const void* ref) { void DeferredTraceJSMember(cppgc::Visitor* visitor, const void* ref) {
static_cast<JSVisitor*>(visitor)->Trace( static_cast<JSVisitor*>(visitor)->Trace(
......
...@@ -48,6 +48,7 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVisitorBase : public JSVisitor { ...@@ -48,6 +48,7 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVisitorBase : public JSVisitor {
TraceDescriptor weak_desc, WeakCallback callback, TraceDescriptor weak_desc, WeakCallback callback,
const void* data) final; const void* data) final;
void RegisterWeakCallback(WeakCallback, const void*) final; void RegisterWeakCallback(WeakCallback, const void*) final;
void HandleMovableReference(const void**) final;
// JS handling. // JS handling.
void Visit(const internal::JSMemberBase& ref) final; void Visit(const internal::JSMemberBase& ref) final;
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/compaction-worklists.h"
namespace cppgc {
namespace internal {
void CompactionWorklists::ClearForTesting() { movable_slots_worklist_.Clear(); }
} // namespace internal
} // namespace cppgc
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_COMPACTION_WORKLISTS_H_
#define V8_HEAP_CPPGC_COMPACTION_WORKLISTS_H_
#include <unordered_set>
#include "src/heap/base/worklist.h"
namespace cppgc {
namespace internal {
class CompactionWorklists {
public:
using MovableReference = const void*;
using MovableReferencesWorklist =
heap::base::Worklist<MovableReference*, 256 /* local entries */>;
MovableReferencesWorklist* movable_slots_worklist() {
return &movable_slots_worklist_;
}
void ClearForTesting();
private:
MovableReferencesWorklist movable_slots_worklist_;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_COMPACTION_WORKLISTS_H_
This diff is collapsed.
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_COMPACTOR_H_
#define V8_HEAP_CPPGC_COMPACTOR_H_
#include "src/heap/cppgc/compaction-worklists.h"
#include "src/heap/cppgc/garbage-collector.h"
#include "src/heap/cppgc/raw-heap.h"
namespace cppgc {
namespace internal {
class V8_EXPORT_PRIVATE Compactor final {
using CompactableSpaceHandling =
Sweeper::SweepingConfig::CompactableSpaceHandling;
public:
explicit Compactor(RawHeap&);
~Compactor() { DCHECK(!is_enabled_); }
void InitializeIfShouldCompact(GarbageCollector::Config::MarkingType,
GarbageCollector::Config::StackState);
// Returns true is compaction was cancelled.
bool CancelIfShouldNotCompact(GarbageCollector::Config::MarkingType,
GarbageCollector::Config::StackState);
CompactableSpaceHandling CompactSpacesIfEnabled();
CompactionWorklists* compaction_worklists() {
return compaction_worklists_.get();
}
void EnableForNextGCForTesting() { enable_for_next_gc_for_testing_ = true; }
bool IsEnabledForTesting() const { return is_enabled_; }
private:
bool ShouldCompact(GarbageCollector::Config::MarkingType,
GarbageCollector::Config::StackState);
RawHeap& heap_;
// Compactor does not own the compactable spaces. The heap owns all spaces.
std::vector<NormalPageSpace*> compactable_spaces_;
std::unique_ptr<CompactionWorklists> compaction_worklists_;
bool is_enabled_ = false;
bool enable_for_next_gc_for_testing_ = false;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_COMPACTOR_H_
...@@ -74,7 +74,8 @@ void ConcurrentMarkingTask::Run(JobDelegate* job_delegate) { ...@@ -74,7 +74,8 @@ void ConcurrentMarkingTask::Run(JobDelegate* job_delegate) {
if (!HasWorkForConcurrentMarking(concurrent_marker_.marking_worklists())) if (!HasWorkForConcurrentMarking(concurrent_marker_.marking_worklists()))
return; return;
ConcurrentMarkingState concurrent_marking_state( ConcurrentMarkingState concurrent_marking_state(
concurrent_marker_.heap(), concurrent_marker_.marking_worklists()); concurrent_marker_.heap(), concurrent_marker_.marking_worklists(),
concurrent_marker_.heap().compactor().compaction_worklists());
std::unique_ptr<Visitor> concurrent_marking_visitor = std::unique_ptr<Visitor> concurrent_marking_visitor =
concurrent_marker_.CreateConcurrentMarkingVisitor( concurrent_marker_.CreateConcurrentMarkingVisitor(
concurrent_marking_state); concurrent_marking_state);
...@@ -186,6 +187,10 @@ void ConcurrentMarkerBase::JoinForTesting() { ...@@ -186,6 +187,10 @@ void ConcurrentMarkerBase::JoinForTesting() {
concurrent_marking_handle_->Join(); concurrent_marking_handle_->Join();
} }
bool ConcurrentMarkerBase::IsActive() const {
return concurrent_marking_handle_ && concurrent_marking_handle_->IsRunning();
}
ConcurrentMarkerBase::~ConcurrentMarkerBase() { ConcurrentMarkerBase::~ConcurrentMarkerBase() {
CHECK_IMPLIES(concurrent_marking_handle_, CHECK_IMPLIES(concurrent_marking_handle_,
!concurrent_marking_handle_->IsValid()); !concurrent_marking_handle_->IsValid());
......
...@@ -30,6 +30,8 @@ class V8_EXPORT_PRIVATE ConcurrentMarkerBase { ...@@ -30,6 +30,8 @@ class V8_EXPORT_PRIVATE ConcurrentMarkerBase {
bool NotifyIncrementalMutatorStepCompleted(); bool NotifyIncrementalMutatorStepCompleted();
bool IsActive() const;
HeapBase& heap() const { return heap_; } HeapBase& heap() const { return heap_; }
MarkingWorklists& marking_worklists() const { return marking_worklists_; } MarkingWorklists& marking_worklists() const { return marking_worklists_; }
IncrementalMarkingSchedule& incremental_marking_schedule() const { IncrementalMarkingSchedule& incremental_marking_schedule() const {
......
...@@ -19,7 +19,7 @@ class GarbageCollector { ...@@ -19,7 +19,7 @@ class GarbageCollector {
using CollectionType = Marker::MarkingConfig::CollectionType; using CollectionType = Marker::MarkingConfig::CollectionType;
using StackState = cppgc::Heap::StackState; using StackState = cppgc::Heap::StackState;
using MarkingType = Marker::MarkingConfig::MarkingType; using MarkingType = Marker::MarkingConfig::MarkingType;
using SweepingType = Sweeper::Config; using SweepingType = Sweeper::SweepingConfig::SweepingType;
static constexpr Config ConservativeAtomicConfig() { static constexpr Config ConservativeAtomicConfig() {
return {CollectionType::kMajor, StackState::kMayContainHeapPointers, return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
......
...@@ -70,6 +70,7 @@ HeapBase::HeapBase( ...@@ -70,6 +70,7 @@ HeapBase::HeapBase(
stack_(std::make_unique<heap::base::Stack>( stack_(std::make_unique<heap::base::Stack>(
v8::base::Stack::GetStackStart())), v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()), prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()),
compactor_(raw_heap_),
object_allocator_(&raw_heap_, page_backend_.get(), object_allocator_(&raw_heap_, page_backend_.get(),
stats_collector_.get()), stats_collector_.get()),
sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()), sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()),
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
#include "include/cppgc/internal/persistent-node.h" #include "include/cppgc/internal/persistent-node.h"
#include "include/cppgc/macros.h" #include "include/cppgc/macros.h"
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/heap/cppgc/compactor.h"
#include "src/heap/cppgc/marker.h" #include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/object-allocator.h" #include "src/heap/cppgc/object-allocator.h"
#include "src/heap/cppgc/raw-heap.h" #include "src/heap/cppgc/raw-heap.h"
...@@ -97,6 +98,8 @@ class V8_EXPORT_PRIVATE HeapBase { ...@@ -97,6 +98,8 @@ class V8_EXPORT_PRIVATE HeapBase {
MarkerBase* marker() const { return marker_.get(); } MarkerBase* marker() const { return marker_.get(); }
Compactor& compactor() { return compactor_; }
ObjectAllocator& object_allocator() { return object_allocator_; } ObjectAllocator& object_allocator() { return object_allocator_; }
Sweeper& sweeper() { return sweeper_; } Sweeper& sweeper() { return sweeper_; }
...@@ -154,6 +157,7 @@ class V8_EXPORT_PRIVATE HeapBase { ...@@ -154,6 +157,7 @@ class V8_EXPORT_PRIVATE HeapBase {
std::unique_ptr<PreFinalizerHandler> prefinalizer_handler_; std::unique_ptr<PreFinalizerHandler> prefinalizer_handler_;
std::unique_ptr<MarkerBase> marker_; std::unique_ptr<MarkerBase> marker_;
Compactor compactor_;
ObjectAllocator object_allocator_; ObjectAllocator object_allocator_;
Sweeper sweeper_; Sweeper sweeper_;
......
...@@ -168,7 +168,10 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) { ...@@ -168,7 +168,10 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
#endif #endif
{ {
NoGCScope no_gc(*this); NoGCScope no_gc(*this);
sweeper_.Start(config_.sweeping_type); const Sweeper::SweepingConfig sweeping_config{
config_.sweeping_type,
Sweeper::SweepingConfig::CompactableSpaceHandling::kSweep};
sweeper_.Start(sweeping_config);
} }
gc_in_progress_ = false; gc_in_progress_ = false;
} }
......
...@@ -158,7 +158,8 @@ MarkerBase::MarkerBase(Key, HeapBase& heap, cppgc::Platform* platform, ...@@ -158,7 +158,8 @@ MarkerBase::MarkerBase(Key, HeapBase& heap, cppgc::Platform* platform,
config_(config), config_(config),
platform_(platform), platform_(platform),
foreground_task_runner_(platform_->GetForegroundTaskRunner()), foreground_task_runner_(platform_->GetForegroundTaskRunner()),
mutator_marking_state_(heap, marking_worklists_) {} mutator_marking_state_(heap, marking_worklists_,
heap.compactor().compaction_worklists()) {}
MarkerBase::~MarkerBase() { MarkerBase::~MarkerBase() {
// The fixed point iteration may have found not-fully-constructed objects. // The fixed point iteration may have found not-fully-constructed objects.
...@@ -435,6 +436,8 @@ void MarkerBase::MarkNotFullyConstructedObjects() { ...@@ -435,6 +436,8 @@ void MarkerBase::MarkNotFullyConstructedObjects() {
void MarkerBase::ClearAllWorklistsForTesting() { void MarkerBase::ClearAllWorklistsForTesting() {
marking_worklists_.ClearForTesting(); marking_worklists_.ClearForTesting();
auto* compaction_worklists = heap_.compactor().compaction_worklists();
if (compaction_worklists) compaction_worklists->ClearForTesting();
} }
void MarkerBase::DisableIncrementalMarkingForTesting() { void MarkerBase::DisableIncrementalMarkingForTesting() {
...@@ -445,6 +448,13 @@ void MarkerBase::WaitForConcurrentMarkingForTesting() { ...@@ -445,6 +448,13 @@ void MarkerBase::WaitForConcurrentMarkingForTesting() {
concurrent_marker_->JoinForTesting(); concurrent_marker_->JoinForTesting();
} }
void MarkerBase::NotifyCompactionCancelled() {
// Compaction cannot be cancelled while concurrent marking is active.
DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type);
DCHECK_IMPLIES(concurrent_marker_, !concurrent_marker_->IsActive());
mutator_marking_state_.NotifyCompactionCancelled();
}
Marker::Marker(Key key, HeapBase& heap, cppgc::Platform* platform, Marker::Marker(Key key, HeapBase& heap, cppgc::Platform* platform,
MarkingConfig config) MarkingConfig config)
: MarkerBase(key, heap, platform, config), : MarkerBase(key, heap, platform, config),
......
...@@ -124,6 +124,8 @@ class V8_EXPORT_PRIVATE MarkerBase { ...@@ -124,6 +124,8 @@ class V8_EXPORT_PRIVATE MarkerBase {
void WaitForConcurrentMarkingForTesting(); void WaitForConcurrentMarkingForTesting();
void NotifyCompactionCancelled();
protected: protected:
static constexpr v8::base::TimeDelta kMaximumIncrementalStepDuration = static constexpr v8::base::TimeDelta kMaximumIncrementalStepDuration =
v8::base::TimeDelta::FromMilliseconds(2); v8::base::TimeDelta::FromMilliseconds(2);
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#define V8_HEAP_CPPGC_MARKING_STATE_H_ #define V8_HEAP_CPPGC_MARKING_STATE_H_
#include "include/cppgc/trace-trait.h" #include "include/cppgc/trace-trait.h"
#include "src/heap/cppgc/compaction-worklists.h"
#include "src/heap/cppgc/globals.h" #include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-page.h"
...@@ -18,7 +19,8 @@ namespace internal { ...@@ -18,7 +19,8 @@ namespace internal {
// C++ marking implementation. // C++ marking implementation.
class MarkingStateBase { class MarkingStateBase {
public: public:
inline MarkingStateBase(HeapBase& heap, MarkingWorklists&); inline MarkingStateBase(HeapBase& heap, MarkingWorklists&,
CompactionWorklists*);
MarkingStateBase(const MarkingStateBase&) = delete; MarkingStateBase(const MarkingStateBase&) = delete;
MarkingStateBase& operator=(const MarkingStateBase&) = delete; MarkingStateBase& operator=(const MarkingStateBase&) = delete;
...@@ -32,6 +34,11 @@ class MarkingStateBase { ...@@ -32,6 +34,11 @@ class MarkingStateBase {
WeakCallback, const void*); WeakCallback, const void*);
inline void RegisterWeakCallback(WeakCallback, const void*); inline void RegisterWeakCallback(WeakCallback, const void*);
void RegisterMovableReference(const void** slot) {
if (!movable_slots_worklist_) return;
movable_slots_worklist_->Push(slot);
}
// Weak containers are special in that they may require re-tracing if // Weak containers are special in that they may require re-tracing if
// reachable through stack, even if the container was already traced before. // reachable through stack, even if the container was already traced before.
// ProcessWeakContainer records which weak containers were already marked so // ProcessWeakContainer records which weak containers were already marked so
...@@ -53,6 +60,7 @@ class MarkingStateBase { ...@@ -53,6 +60,7 @@ class MarkingStateBase {
concurrent_marking_bailout_worklist_.Publish(); concurrent_marking_bailout_worklist_.Publish();
discovered_ephemeron_pairs_worklist_.Publish(); discovered_ephemeron_pairs_worklist_.Publish();
ephemeron_pairs_for_processing_worklist_.Publish(); ephemeron_pairs_for_processing_worklist_.Publish();
if (IsCompactionEnabled()) movable_slots_worklist_->Publish();
} }
MarkingWorklists::MarkingWorklist::Local& marking_worklist() { MarkingWorklists::MarkingWorklist::Local& marking_worklist() {
...@@ -88,6 +96,17 @@ class MarkingStateBase { ...@@ -88,6 +96,17 @@ class MarkingStateBase {
return weak_containers_worklist_; return weak_containers_worklist_;
} }
CompactionWorklists::MovableReferencesWorklist::Local*
movable_slots_worklist() {
return movable_slots_worklist_.get();
}
void NotifyCompactionCancelled() {
DCHECK(IsCompactionEnabled());
movable_slots_worklist_->Clear();
movable_slots_worklist_.reset();
}
protected: protected:
inline void MarkAndPush(HeapObjectHeader&, TraceDescriptor); inline void MarkAndPush(HeapObjectHeader&, TraceDescriptor);
...@@ -95,6 +114,10 @@ class MarkingStateBase { ...@@ -95,6 +114,10 @@ class MarkingStateBase {
inline void RegisterWeakContainer(HeapObjectHeader&); inline void RegisterWeakContainer(HeapObjectHeader&);
inline bool IsCompactionEnabled() const {
return movable_slots_worklist_.get();
}
#ifdef DEBUG #ifdef DEBUG
HeapBase& heap_; HeapBase& heap_;
#endif // DEBUG #endif // DEBUG
...@@ -113,12 +136,17 @@ class MarkingStateBase { ...@@ -113,12 +136,17 @@ class MarkingStateBase {
MarkingWorklists::EphemeronPairsWorklist::Local MarkingWorklists::EphemeronPairsWorklist::Local
ephemeron_pairs_for_processing_worklist_; ephemeron_pairs_for_processing_worklist_;
MarkingWorklists::WeakContainersWorklist& weak_containers_worklist_; MarkingWorklists::WeakContainersWorklist& weak_containers_worklist_;
// Existence of the worklist (|movable_slot_worklist_| != nullptr) denotes
// that compaction is currently enabled and slots must be recorded.
std::unique_ptr<CompactionWorklists::MovableReferencesWorklist::Local>
movable_slots_worklist_;
size_t marked_bytes_ = 0; size_t marked_bytes_ = 0;
}; };
MarkingStateBase::MarkingStateBase(HeapBase& heap, MarkingStateBase::MarkingStateBase(HeapBase& heap,
MarkingWorklists& marking_worklists) MarkingWorklists& marking_worklists,
CompactionWorklists* compaction_worklists)
: :
#ifdef DEBUG #ifdef DEBUG
heap_(heap), heap_(heap),
...@@ -137,6 +165,11 @@ MarkingStateBase::MarkingStateBase(HeapBase& heap, ...@@ -137,6 +165,11 @@ MarkingStateBase::MarkingStateBase(HeapBase& heap,
ephemeron_pairs_for_processing_worklist_( ephemeron_pairs_for_processing_worklist_(
marking_worklists.ephemeron_pairs_for_processing_worklist()), marking_worklists.ephemeron_pairs_for_processing_worklist()),
weak_containers_worklist_(*marking_worklists.weak_containers_worklist()) { weak_containers_worklist_(*marking_worklists.weak_containers_worklist()) {
if (compaction_worklists) {
movable_slots_worklist_ =
std::make_unique<CompactionWorklists::MovableReferencesWorklist::Local>(
compaction_worklists->movable_slots_worklist());
}
} }
void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) { void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) {
...@@ -260,8 +293,9 @@ void MarkingStateBase::AccountMarkedBytes(size_t marked_bytes) { ...@@ -260,8 +293,9 @@ void MarkingStateBase::AccountMarkedBytes(size_t marked_bytes) {
class MutatorMarkingState : public MarkingStateBase { class MutatorMarkingState : public MarkingStateBase {
public: public:
MutatorMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists) MutatorMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
: MarkingStateBase(heap, marking_worklists) {} CompactionWorklists* compaction_worklists)
: MarkingStateBase(heap, marking_worklists, compaction_worklists) {}
inline bool MarkNoPush(HeapObjectHeader& header) { inline bool MarkNoPush(HeapObjectHeader& header) {
return MutatorMarkingState::MarkingStateBase::MarkNoPush(header); return MutatorMarkingState::MarkingStateBase::MarkNoPush(header);
...@@ -327,8 +361,9 @@ bool MutatorMarkingState::IsMarkedWeakContainer(HeapObjectHeader& header) { ...@@ -327,8 +361,9 @@ bool MutatorMarkingState::IsMarkedWeakContainer(HeapObjectHeader& header) {
class ConcurrentMarkingState : public MarkingStateBase { class ConcurrentMarkingState : public MarkingStateBase {
public: public:
ConcurrentMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists) ConcurrentMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
: MarkingStateBase(heap, marking_worklists) {} CompactionWorklists* compaction_worklists)
: MarkingStateBase(heap, marking_worklists, compaction_worklists) {}
~ConcurrentMarkingState() { DCHECK_EQ(last_marked_bytes_, marked_bytes_); } ~ConcurrentMarkingState() { DCHECK_EQ(last_marked_bytes_, marked_bytes_); }
......
...@@ -43,6 +43,10 @@ void MarkingVisitorBase::RegisterWeakCallback(WeakCallback callback, ...@@ -43,6 +43,10 @@ void MarkingVisitorBase::RegisterWeakCallback(WeakCallback callback,
marking_state_.RegisterWeakCallback(callback, object); marking_state_.RegisterWeakCallback(callback, object);
} }
void MarkingVisitorBase::HandleMovableReference(const void** slot) {
marking_state_.RegisterMovableReference(slot);
}
ConservativeMarkingVisitor::ConservativeMarkingVisitor( ConservativeMarkingVisitor::ConservativeMarkingVisitor(
HeapBase& heap, MutatorMarkingState& marking_state, cppgc::Visitor& visitor) HeapBase& heap, MutatorMarkingState& marking_state, cppgc::Visitor& visitor)
: ConservativeTracingVisitor(heap, *heap.page_backend(), visitor), : ConservativeTracingVisitor(heap, *heap.page_backend(), visitor),
......
...@@ -33,6 +33,7 @@ class V8_EXPORT_PRIVATE MarkingVisitorBase : public VisitorBase { ...@@ -33,6 +33,7 @@ class V8_EXPORT_PRIVATE MarkingVisitorBase : public VisitorBase {
TraceDescriptor weak_desc, WeakCallback callback, TraceDescriptor weak_desc, WeakCallback callback,
const void* data) final; const void* data) final;
void RegisterWeakCallback(WeakCallback, const void*) final; void RegisterWeakCallback(WeakCallback, const void*) final;
void HandleMovableReference(const void**) final;
MarkingStateBase& marking_state_; MarkingStateBase& marking_state_;
}; };
......
...@@ -446,10 +446,19 @@ class ConcurrentSweepTask final : public cppgc::JobTask, ...@@ -446,10 +446,19 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
// - moves all Heap pages to local Sweeper's state (SpaceStates). // - moves all Heap pages to local Sweeper's state (SpaceStates).
class PrepareForSweepVisitor final class PrepareForSweepVisitor final
: public HeapVisitor<PrepareForSweepVisitor> { : public HeapVisitor<PrepareForSweepVisitor> {
using CompactableSpaceHandling =
Sweeper::SweepingConfig::CompactableSpaceHandling;
public: public:
explicit PrepareForSweepVisitor(SpaceStates* states) : states_(states) {} PrepareForSweepVisitor(SpaceStates* states,
CompactableSpaceHandling compactable_space_handling)
: states_(states),
compactable_space_handling_(compactable_space_handling) {}
bool VisitNormalPageSpace(NormalPageSpace* space) { bool VisitNormalPageSpace(NormalPageSpace* space) {
if ((compactable_space_handling_ == CompactableSpaceHandling::kIgnore) &&
space->is_compactable())
return true;
DCHECK(!space->linear_allocation_buffer().size()); DCHECK(!space->linear_allocation_buffer().size());
space->free_list().Clear(); space->free_list().Clear();
ExtractPages(space); ExtractPages(space);
...@@ -469,6 +478,7 @@ class PrepareForSweepVisitor final ...@@ -469,6 +478,7 @@ class PrepareForSweepVisitor final
} }
SpaceStates* states_; SpaceStates* states_;
CompactableSpaceHandling compactable_space_handling_;
}; };
} // namespace } // namespace
...@@ -485,17 +495,20 @@ class Sweeper::SweeperImpl final { ...@@ -485,17 +495,20 @@ class Sweeper::SweeperImpl final {
~SweeperImpl() { CancelSweepers(); } ~SweeperImpl() { CancelSweepers(); }
void Start(Config config) { void Start(SweepingConfig config) {
is_in_progress_ = true; is_in_progress_ = true;
#if DEBUG #if DEBUG
// Verify bitmap for all spaces regardless of |compactable_space_handling|.
ObjectStartBitmapVerifier().Verify(heap_); ObjectStartBitmapVerifier().Verify(heap_);
#endif #endif
PrepareForSweepVisitor(&space_states_).Traverse(heap_); PrepareForSweepVisitor(&space_states_, config.compactable_space_handling)
.Traverse(heap_);
if (config == Config::kAtomic) { if (config.sweeping_type == SweepingConfig::SweepingType::kAtomic) {
Finish(); Finish();
} else { } else {
DCHECK_EQ(Config::kIncrementalAndConcurrent, config); DCHECK_EQ(SweepingConfig::SweepingType::kIncrementalAndConcurrent,
config.sweeping_type);
ScheduleIncrementalSweeping(); ScheduleIncrementalSweeping();
ScheduleConcurrentSweeping(); ScheduleConcurrentSweeping();
} }
...@@ -620,7 +633,7 @@ Sweeper::Sweeper(RawHeap* heap, cppgc::Platform* platform, ...@@ -620,7 +633,7 @@ Sweeper::Sweeper(RawHeap* heap, cppgc::Platform* platform,
Sweeper::~Sweeper() = default; Sweeper::~Sweeper() = default;
void Sweeper::Start(Config config) { impl_->Start(config); } void Sweeper::Start(SweepingConfig config) { impl_->Start(config); }
void Sweeper::FinishIfRunning() { impl_->FinishIfRunning(); } void Sweeper::FinishIfRunning() { impl_->FinishIfRunning(); }
void Sweeper::WaitForConcurrentSweepingForTesting() { void Sweeper::WaitForConcurrentSweepingForTesting() {
impl_->WaitForConcurrentSweepingForTesting(); impl_->WaitForConcurrentSweepingForTesting();
......
...@@ -21,7 +21,14 @@ class ConcurrentSweeperTest; ...@@ -21,7 +21,14 @@ class ConcurrentSweeperTest;
class V8_EXPORT_PRIVATE Sweeper final { class V8_EXPORT_PRIVATE Sweeper final {
public: public:
enum class Config { kAtomic, kIncrementalAndConcurrent }; struct SweepingConfig {
enum class SweepingType : uint8_t { kAtomic, kIncrementalAndConcurrent };
enum class CompactableSpaceHandling { kSweep, kIgnore };
SweepingType sweeping_type = SweepingType::kIncrementalAndConcurrent;
CompactableSpaceHandling compactable_space_handling =
CompactableSpaceHandling::kSweep;
};
Sweeper(RawHeap*, cppgc::Platform*, StatsCollector*); Sweeper(RawHeap*, cppgc::Platform*, StatsCollector*);
~Sweeper(); ~Sweeper();
...@@ -30,7 +37,7 @@ class V8_EXPORT_PRIVATE Sweeper final { ...@@ -30,7 +37,7 @@ class V8_EXPORT_PRIVATE Sweeper final {
Sweeper& operator=(const Sweeper&) = delete; Sweeper& operator=(const Sweeper&) = delete;
// Sweeper::Start assumes the heap holds no linear allocation buffers. // Sweeper::Start assumes the heap holds no linear allocation buffers.
void Start(Config); void Start(SweepingConfig);
void FinishIfRunning(); void FinishIfRunning();
private: private:
......
...@@ -80,6 +80,7 @@ v8_source_set("cppgc_unittests_sources") { ...@@ -80,6 +80,7 @@ v8_source_set("cppgc_unittests_sources") {
testonly = true testonly = true
sources = [ sources = [
"heap/cppgc/compactor-unittest.cc",
"heap/cppgc/concurrent-marking-unittest.cc", "heap/cppgc/concurrent-marking-unittest.cc",
"heap/cppgc/concurrent-sweeper-unittest.cc", "heap/cppgc/concurrent-sweeper-unittest.cc",
"heap/cppgc/cross-thread-persistent-unittest.cc", "heap/cppgc/cross-thread-persistent-unittest.cc",
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/compactor.h"
#include "include/cppgc/allocation.h"
#include "include/cppgc/custom-space.h"
#include "include/cppgc/persistent.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/stats-collector.h"
#include "test/unittests/heap/cppgc/tests.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace cppgc {
class CompactableCustomSpace : public CustomSpace<CompactableCustomSpace> {
public:
static constexpr size_t kSpaceIndex = 0;
static constexpr bool kSupportsCompaction = true;
};
namespace internal {
namespace {
struct CompactableGCed : public GarbageCollected<CompactableGCed> {
public:
~CompactableGCed() { ++g_destructor_callcount; }
void Trace(Visitor* visitor) const {
visitor->Trace(other);
visitor->RegisterMovableReference(other.GetSlotForTesting());
}
static size_t g_destructor_callcount;
Member<CompactableGCed> other;
size_t id = 0;
};
// static
size_t CompactableGCed::g_destructor_callcount = 0;
template <int kNumObjects>
struct CompactableHolder
: public GarbageCollected<CompactableHolder<kNumObjects>> {
public:
explicit CompactableHolder(cppgc::AllocationHandle& allocation_handle) {
for (int i = 0; i < kNumObjects; ++i)
objects[i] = MakeGarbageCollected<CompactableGCed>(allocation_handle);
}
void Trace(Visitor* visitor) const {
for (int i = 0; i < kNumObjects; ++i) {
visitor->Trace(objects[i]);
visitor->RegisterMovableReference(objects[i].GetSlotForTesting());
}
}
Member<CompactableGCed> objects[kNumObjects];
};
class CompactorTest : public testing::TestWithPlatform {
public:
CompactorTest() {
Heap::HeapOptions options;
options.custom_spaces.emplace_back(
std::make_unique<CompactableCustomSpace>());
heap_ = Heap::Create(platform_, std::move(options));
}
void StartCompaction() {
compactor().EnableForNextGCForTesting();
compactor().InitializeIfShouldCompact(
GarbageCollector::Config::MarkingType::kIncremental,
GarbageCollector::Config::StackState::kNoHeapPointers);
EXPECT_TRUE(compactor().IsEnabledForTesting());
}
void CancelCompaction() {
bool cancelled = compactor().CancelIfShouldNotCompact(
GarbageCollector::Config::MarkingType::kAtomic,
GarbageCollector::Config::StackState::kMayContainHeapPointers);
EXPECT_TRUE(cancelled);
}
void FinishCompaction() { compactor().CompactSpacesIfEnabled(); }
void StartGC() {
CompactableGCed::g_destructor_callcount = 0u;
StartCompaction();
heap()->StartIncrementalGarbageCollection(
GarbageCollector::Config::PreciseIncrementalConfig());
}
void EndGC() {
heap()->marker()->FinishMarking(
GarbageCollector::Config::StackState::kNoHeapPointers);
FinishCompaction();
// Sweeping also verifies the object start bitmap.
const Sweeper::SweepingConfig sweeping_config{
Sweeper::SweepingConfig::SweepingType::kAtomic,
Sweeper::SweepingConfig::CompactableSpaceHandling::kIgnore};
heap()->sweeper().Start(sweeping_config);
}
Heap* heap() { return Heap::From(heap_.get()); }
cppgc::AllocationHandle& GetAllocationHandle() {
return heap_->GetAllocationHandle();
}
Compactor& compactor() { return heap()->compactor(); }
private:
std::unique_ptr<cppgc::Heap> heap_;
};
} // namespace
} // namespace internal
template <>
struct SpaceTrait<internal::CompactableGCed> {
using Space = CompactableCustomSpace;
};
namespace internal {
TEST_F(CompactorTest, NothingToCompact) {
StartCompaction();
FinishCompaction();
}
TEST_F(CompactorTest, CancelledNothingToCompact) {
StartCompaction();
CancelCompaction();
}
TEST_F(CompactorTest, NonEmptySpaceAllLive) {
static constexpr int kNumObjects = 10;
Persistent<CompactableHolder<kNumObjects>> holder =
MakeGarbageCollected<CompactableHolder<kNumObjects>>(
GetAllocationHandle(), GetAllocationHandle());
CompactableGCed* references[kNumObjects] = {nullptr};
for (int i = 0; i < kNumObjects; ++i) {
references[i] = holder->objects[i];
}
StartGC();
EndGC();
EXPECT_EQ(0u, CompactableGCed::g_destructor_callcount);
for (int i = 0; i < kNumObjects; ++i) {
EXPECT_EQ(holder->objects[i], references[i]);
}
}
TEST_F(CompactorTest, NonEmptySpaceAllDead) {
static constexpr int kNumObjects = 10;
Persistent<CompactableHolder<kNumObjects>> holder =
MakeGarbageCollected<CompactableHolder<kNumObjects>>(
GetAllocationHandle(), GetAllocationHandle());
CompactableGCed::g_destructor_callcount = 0u;
StartGC();
for (int i = 0; i < kNumObjects; ++i) {
holder->objects[i] = nullptr;
}
EndGC();
EXPECT_EQ(10u, CompactableGCed::g_destructor_callcount);
}
TEST_F(CompactorTest, NonEmptySpaceHalfLive) {
static constexpr int kNumObjects = 10;
Persistent<CompactableHolder<kNumObjects>> holder =
MakeGarbageCollected<CompactableHolder<kNumObjects>>(
GetAllocationHandle(), GetAllocationHandle());
CompactableGCed* references[kNumObjects] = {nullptr};
for (int i = 0; i < kNumObjects; ++i) {
references[i] = holder->objects[i];
}
StartGC();
for (int i = 0; i < kNumObjects; i += 2) {
holder->objects[i] = nullptr;
}
EndGC();
// Half of object were destroyed.
EXPECT_EQ(5u, CompactableGCed::g_destructor_callcount);
// Remaining objects are compacted.
for (int i = 1; i < kNumObjects; i += 2) {
EXPECT_EQ(holder->objects[i], references[i / 2]);
}
}
TEST_F(CompactorTest, CompactAcrossPages) {
Persistent<CompactableHolder<1>> holder =
MakeGarbageCollected<CompactableHolder<1>>(GetAllocationHandle(),
GetAllocationHandle());
CompactableGCed* reference = holder->objects[0];
static constexpr size_t kObjectsPerPage =
kPageSize / (sizeof(CompactableGCed) + sizeof(HeapObjectHeader));
for (size_t i = 0; i < kObjectsPerPage; ++i) {
holder->objects[0] =
MakeGarbageCollected<CompactableGCed>(GetAllocationHandle());
}
// Last allocated object should be on a new page.
EXPECT_NE(reference, holder->objects[0]);
EXPECT_NE(BasePage::FromInnerAddress(heap(), reference),
BasePage::FromInnerAddress(heap(), holder->objects[0].Get()));
StartGC();
EndGC();
// Half of object were destroyed.
EXPECT_EQ(kObjectsPerPage, CompactableGCed::g_destructor_callcount);
EXPECT_EQ(reference, holder->objects[0]);
}
TEST_F(CompactorTest, InteriorSlotToPreviousObject) {
static constexpr int kNumObjects = 3;
Persistent<CompactableHolder<kNumObjects>> holder =
MakeGarbageCollected<CompactableHolder<kNumObjects>>(
GetAllocationHandle(), GetAllocationHandle());
CompactableGCed* references[kNumObjects] = {nullptr};
for (int i = 0; i < kNumObjects; ++i) {
references[i] = holder->objects[i];
}
holder->objects[2]->other = holder->objects[1];
holder->objects[1] = nullptr;
holder->objects[0] = nullptr;
StartGC();
EndGC();
EXPECT_EQ(1u, CompactableGCed::g_destructor_callcount);
EXPECT_EQ(references[1], holder->objects[2]);
EXPECT_EQ(references[0], holder->objects[2]->other);
}
TEST_F(CompactorTest, InteriorSlotToNextObject) {
static constexpr int kNumObjects = 3;
Persistent<CompactableHolder<kNumObjects>> holder =
MakeGarbageCollected<CompactableHolder<kNumObjects>>(
GetAllocationHandle(), GetAllocationHandle());
CompactableGCed* references[kNumObjects] = {nullptr};
for (int i = 0; i < kNumObjects; ++i) {
references[i] = holder->objects[i];
}
holder->objects[1]->other = holder->objects[2];
holder->objects[2] = nullptr;
holder->objects[0] = nullptr;
StartGC();
EndGC();
EXPECT_EQ(1u, CompactableGCed::g_destructor_callcount);
EXPECT_EQ(references[0], holder->objects[1]);
EXPECT_EQ(references[1], holder->objects[1]->other);
}
} // namespace internal
} // namespace cppgc
...@@ -75,7 +75,10 @@ class ConcurrentSweeperTest : public testing::TestWithHeap { ...@@ -75,7 +75,10 @@ class ConcurrentSweeperTest : public testing::TestWithHeap {
heap->stats_collector()->NotifyMarkingStarted(); heap->stats_collector()->NotifyMarkingStarted();
heap->stats_collector()->NotifyMarkingCompleted(0); heap->stats_collector()->NotifyMarkingCompleted(0);
Sweeper& sweeper = heap->sweeper(); Sweeper& sweeper = heap->sweeper();
sweeper.Start(Sweeper::Config::kIncrementalAndConcurrent); const Sweeper::SweepingConfig sweeping_config{
Sweeper::SweepingConfig::SweepingType::kIncrementalAndConcurrent,
Sweeper::SweepingConfig::CompactableSpaceHandling::kSweep};
sweeper.Start(sweeping_config);
} }
void WaitForConcurrentSweeping() { void WaitForConcurrentSweeping() {
......
...@@ -48,7 +48,10 @@ class SweeperTest : public testing::TestWithHeap { ...@@ -48,7 +48,10 @@ class SweeperTest : public testing::TestWithHeap {
// methods are called in the right order. // methods are called in the right order.
heap->stats_collector()->NotifyMarkingStarted(); heap->stats_collector()->NotifyMarkingStarted();
heap->stats_collector()->NotifyMarkingCompleted(0); heap->stats_collector()->NotifyMarkingCompleted(0);
sweeper.Start(Sweeper::Config::kAtomic); const Sweeper::SweepingConfig sweeping_config{
Sweeper::SweepingConfig::SweepingType::kAtomic,
Sweeper::SweepingConfig::CompactableSpaceHandling::kSweep};
sweeper.Start(sweeping_config);
sweeper.FinishIfRunning(); sweeper.FinishIfRunning();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment