Commit de3e1205 authored by Zhi An Ng's avatar Zhi An Ng Committed by Commit Bot

[cleanup][heap] Remove uses of DISALLOW_COPY_AND_ASSIGN

Bug: v8:11074
Change-Id: I26969322948c1d062b1bc5478f547d52cba3f1b9
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2567312Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Zhi An Ng <zhin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71548}
parent 257b266e
......@@ -87,6 +87,8 @@ class AllocationObserver {
DCHECK_LE(kTaggedSize, step_size);
}
virtual ~AllocationObserver() = default;
AllocationObserver(const AllocationObserver&) = delete;
AllocationObserver& operator=(const AllocationObserver&) = delete;
protected:
// Pure virtual method provided by the subclasses that gets called when at
......@@ -110,17 +112,18 @@ class AllocationObserver {
intptr_t step_size_;
friend class AllocationCounter;
DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
};
class V8_EXPORT_PRIVATE V8_NODISCARD PauseAllocationObserversScope {
public:
explicit PauseAllocationObserversScope(Heap* heap);
~PauseAllocationObserversScope();
PauseAllocationObserversScope(const PauseAllocationObserversScope&) = delete;
PauseAllocationObserversScope& operator=(
const PauseAllocationObserversScope&) = delete;
private:
Heap* heap_;
DISALLOW_COPY_AND_ASSIGN(PauseAllocationObserversScope);
};
} // namespace internal
......
......@@ -21,6 +21,9 @@ class Heap;
// BaseSpace is the abstract superclass for all allocation spaces.
class V8_EXPORT_PRIVATE BaseSpace : public Malloced {
public:
BaseSpace(const BaseSpace&) = delete;
BaseSpace& operator=(const BaseSpace&) = delete;
Heap* heap() const {
DCHECK_NOT_NULL(heap_);
return heap_;
......@@ -71,8 +74,6 @@ class V8_EXPORT_PRIVATE BaseSpace : public Malloced {
// Keeps track of committed memory in a space.
std::atomic<size_t> committed_;
size_t max_committed_;
DISALLOW_COPY_AND_ASSIGN(BaseSpace);
};
} // namespace internal
......
......@@ -32,13 +32,16 @@ class BackgroundCollectionInterruptTask : public CancelableTask {
: CancelableTask(heap->isolate()), heap_(heap) {}
~BackgroundCollectionInterruptTask() override = default;
BackgroundCollectionInterruptTask(const BackgroundCollectionInterruptTask&) =
delete;
BackgroundCollectionInterruptTask& operator=(
const BackgroundCollectionInterruptTask&) = delete;
private:
// v8::internal::CancelableTask overrides.
void RunInternal() override { heap_->CheckCollectionRequested(); }
Heap* heap_;
DISALLOW_COPY_AND_ASSIGN(BackgroundCollectionInterruptTask);
};
void CollectionBarrier::AwaitCollectionBackground() {
......
......@@ -60,6 +60,8 @@ class ConcurrentMarkingState final
class SlotSnapshot {
public:
SlotSnapshot() : number_of_slots_(0) {}
SlotSnapshot(const SlotSnapshot&) = delete;
SlotSnapshot& operator=(const SlotSnapshot&) = delete;
int number_of_slots() const { return number_of_slots_; }
ObjectSlot slot(int i) const { return snapshot_[i].first; }
Object value(int i) const { return snapshot_[i].second; }
......@@ -72,7 +74,6 @@ class SlotSnapshot {
static const int kMaxSnapshotSize = JSObject::kMaxInstanceSize / kTaggedSize;
int number_of_slots_;
std::pair<ObjectSlot, Object> snapshot_[kMaxSnapshotSize];
DISALLOW_COPY_AND_ASSIGN(SlotSnapshot);
};
class ConcurrentMarkingVisitor final
......@@ -363,6 +364,8 @@ class ConcurrentMarking::JobTask : public v8::JobTask {
is_forced_gc_(is_forced_gc) {}
~JobTask() override = default;
JobTask(const JobTask&) = delete;
JobTask& operator=(const JobTask&) = delete;
// v8::JobTask overrides.
void Run(JobDelegate* delegate) override {
......@@ -377,7 +380,6 @@ class ConcurrentMarking::JobTask : public v8::JobTask {
ConcurrentMarking* concurrent_marking_;
const unsigned mark_compact_epoch_;
const bool is_forced_gc_;
DISALLOW_COPY_AND_ASSIGN(JobTask);
};
ConcurrentMarking::ConcurrentMarking(Heap* heap,
......
......@@ -51,6 +51,8 @@ class V8_EXPORT GCInfoTable final {
// of testing code.
explicit GCInfoTable(PageAllocator* page_allocator);
~GCInfoTable();
GCInfoTable(const GCInfoTable&) = delete;
GCInfoTable& operator=(const GCInfoTable&) = delete;
GCInfoIndex RegisterNewGCInfo(const GCInfo& info);
......@@ -84,12 +86,13 @@ class V8_EXPORT GCInfoTable final {
GCInfoIndex limit_ = 0;
v8::base::Mutex table_mutex_;
DISALLOW_COPY_AND_ASSIGN(GCInfoTable);
};
class V8_EXPORT GlobalGCInfoTable final {
public:
GlobalGCInfoTable(const GlobalGCInfoTable&) = delete;
GlobalGCInfoTable& operator=(const GlobalGCInfoTable&) = delete;
// Sets up a singleton table that can be acquired using Get().
static void Create(PageAllocator* page_allocator);
......@@ -106,7 +109,6 @@ class V8_EXPORT GlobalGCInfoTable final {
static GCInfoTable* global_table_;
DISALLOW_NEW_AND_DELETE()
DISALLOW_COPY_AND_ASSIGN(GlobalGCInfoTable);
};
} // namespace internal
......
......@@ -157,6 +157,9 @@ class V8_EXPORT_PRIVATE StatsCollector final {
IncreaseScopeTime();
}
InternalScope(const InternalScope&) = delete;
InternalScope& operator=(const InternalScope&) = delete;
private:
void* operator new(size_t, void*) = delete;
void* operator new(size_t) = delete;
......@@ -181,8 +184,6 @@ class V8_EXPORT_PRIVATE StatsCollector final {
StatsCollector* const stats_collector_;
const v8::base::TimeTicks start_time_;
const ScopeIdType scope_id_;
DISALLOW_COPY_AND_ASSIGN(InternalScope);
};
public:
......
......@@ -52,6 +52,8 @@ class V8_EXPORT_PRIVATE GCIdleTimeHandler {
static const double kHighContextDisposalRate;
GCIdleTimeHandler() = default;
GCIdleTimeHandler(const GCIdleTimeHandler&) = delete;
GCIdleTimeHandler& operator=(const GCIdleTimeHandler&) = delete;
GCIdleTimeAction Compute(double idle_time_in_ms,
GCIdleTimeHeapState heap_state);
......@@ -67,9 +69,6 @@ class V8_EXPORT_PRIVATE GCIdleTimeHandler {
static bool ShouldDoContextDisposalMarkCompact(int context_disposed,
double contexts_disposal_rate,
size_t size_of_objects);
private:
DISALLOW_COPY_AND_ASSIGN(GCIdleTimeHandler);
};
} // namespace internal
......
......@@ -45,6 +45,9 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
// invocation IFF --trace_gc is used.
class V8_EXPORT_PRIVATE GCTracer {
public:
GCTracer(const GCTracer&) = delete;
GCTracer& operator=(const GCTracer&) = delete;
struct IncrementalMarkingInfos {
IncrementalMarkingInfos() : duration(0), longest_step(0), steps(0) {}
......@@ -93,6 +96,8 @@ class V8_EXPORT_PRIVATE GCTracer {
Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind);
~Scope();
Scope(const Scope&) = delete;
Scope& operator=(const Scope&) = delete;
static const char* Name(ScopeId id);
private:
......@@ -103,8 +108,6 @@ class V8_EXPORT_PRIVATE GCTracer {
RuntimeCallTimer timer_;
RuntimeCallStats* runtime_stats_ = nullptr;
base::Optional<WorkerThreadRuntimeCallStatsScope> runtime_call_stats_scope_;
DISALLOW_COPY_AND_ASSIGN(Scope);
};
class Event {
......@@ -471,8 +474,6 @@ class V8_EXPORT_PRIVATE GCTracer {
base::Mutex background_counter_mutex_;
BackgroundCounter background_counter_[Scope::NUMBER_OF_SCOPES];
DISALLOW_COPY_AND_ASSIGN(GCTracer);
};
} // namespace internal
......
......@@ -2949,6 +2949,11 @@ class LeftTrimmerVerifierRootVisitor : public RootVisitor {
explicit LeftTrimmerVerifierRootVisitor(FixedArrayBase to_check)
: to_check_(to_check) {}
LeftTrimmerVerifierRootVisitor(const LeftTrimmerVerifierRootVisitor&) =
delete;
LeftTrimmerVerifierRootVisitor& operator=(
const LeftTrimmerVerifierRootVisitor&) = delete;
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
for (FullObjectSlot p = start; p < end; ++p) {
......@@ -2966,8 +2971,6 @@ class LeftTrimmerVerifierRootVisitor : public RootVisitor {
private:
FixedArrayBase to_check_;
DISALLOW_COPY_AND_ASSIGN(LeftTrimmerVerifierRootVisitor);
};
} // namespace
#endif // ENABLE_SLOW_DCHECKS
......@@ -3646,13 +3649,15 @@ class MemoryPressureInterruptTask : public CancelableTask {
: CancelableTask(heap->isolate()), heap_(heap) {}
~MemoryPressureInterruptTask() override = default;
MemoryPressureInterruptTask(const MemoryPressureInterruptTask&) = delete;
MemoryPressureInterruptTask& operator=(const MemoryPressureInterruptTask&) =
delete;
private:
// v8::internal::CancelableTask overrides.
void RunInternal() override { heap_->CheckMemoryPressure(); }
Heap* heap_;
DISALLOW_COPY_AND_ASSIGN(MemoryPressureInterruptTask);
};
void Heap::CheckMemoryPressure() {
......
......@@ -1568,6 +1568,8 @@ class Heap {
class ExternalStringTable {
public:
explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
ExternalStringTable(const ExternalStringTable&) = delete;
ExternalStringTable& operator=(const ExternalStringTable&) = delete;
// Registers an external string.
inline void AddString(String string);
......@@ -1600,8 +1602,6 @@ class Heap {
// strings.
std::vector<Object> young_strings_;
std::vector<Object> old_strings_;
DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
};
struct StringTypeTable {
......@@ -1654,6 +1654,9 @@ class Heap {
Heap();
~Heap();
Heap(const Heap&) = delete;
Heap& operator=(const Heap&) = delete;
static bool IsRegularObjectAllocation(AllocationType allocation) {
return AllocationType::kYoung == allocation ||
AllocationType::kOld == allocation;
......@@ -2370,8 +2373,6 @@ class Heap {
// Used in cctest.
friend class heap::HeapTester;
DISALLOW_COPY_AND_ASSIGN(Heap);
};
class HeapStats {
......
......@@ -42,6 +42,8 @@ class V8_EXPORT_PRIVATE ItemParallelJob {
public:
Item() = default;
virtual ~Item() = default;
Item(const Item&) = delete;
Item& operator=(const Item&) = delete;
// Marks an item as being finished.
void MarkFinished() { CHECK_EQ(kProcessing, state_.exchange(kFinished)); }
......@@ -59,8 +61,6 @@ class V8_EXPORT_PRIVATE ItemParallelJob {
friend class ItemParallelJob;
friend class ItemParallelJob::Task;
DISALLOW_COPY_AND_ASSIGN(Item);
};
class V8_EXPORT_PRIVATE Task : public CancelableTask {
......@@ -68,6 +68,8 @@ class V8_EXPORT_PRIVATE ItemParallelJob {
enum class Runner { kForeground, kBackground };
explicit Task(Isolate* isolate);
~Task() override = default;
Task(const Task&) = delete;
Task& operator=(const Task&) = delete;
virtual void RunInParallel(Runner runner) = 0;
......@@ -109,8 +111,6 @@ class V8_EXPORT_PRIVATE ItemParallelJob {
size_t items_considered_ = 0;
Runner runner_ = Runner::kBackground;
base::Semaphore* on_finish_ = nullptr;
DISALLOW_COPY_AND_ASSIGN(Task);
};
ItemParallelJob(CancelableTaskManager* cancelable_task_manager,
......@@ -118,6 +118,9 @@ class V8_EXPORT_PRIVATE ItemParallelJob {
~ItemParallelJob();
ItemParallelJob(const ItemParallelJob&) = delete;
ItemParallelJob& operator=(const ItemParallelJob&) = delete;
// Adds a task to the job. Transfers ownership to the job.
void AddTask(Task* task) { tasks_.push_back(std::unique_ptr<Task>(task)); }
......@@ -135,8 +138,6 @@ class V8_EXPORT_PRIVATE ItemParallelJob {
std::vector<std::unique_ptr<Task>> tasks_;
CancelableTaskManager* cancelable_task_manager_;
base::Semaphore* pending_tasks_;
DISALLOW_COPY_AND_ASSIGN(ItemParallelJob);
};
} // namespace internal
......
......@@ -159,6 +159,9 @@ class MemoryAllocator::Unmapper::UnmapFreeMemoryJob : public JobTask {
explicit UnmapFreeMemoryJob(Isolate* isolate, Unmapper* unmapper)
: unmapper_(unmapper), tracer_(isolate->heap()->tracer()) {}
UnmapFreeMemoryJob(const UnmapFreeMemoryJob&) = delete;
UnmapFreeMemoryJob& operator=(const UnmapFreeMemoryJob&) = delete;
void Run(JobDelegate* delegate) override {
TRACE_GC1(tracer_, GCTracer::Scope::BACKGROUND_UNMAPPER,
ThreadKind::kBackground);
......@@ -181,7 +184,6 @@ class MemoryAllocator::Unmapper::UnmapFreeMemoryJob : public JobTask {
private:
Unmapper* const unmapper_;
GCTracer* const tracer_;
DISALLOW_COPY_AND_ASSIGN(UnmapFreeMemoryJob);
};
void MemoryAllocator::Unmapper::FreeQueuedChunks() {
......
......@@ -115,6 +115,8 @@ class V8_EXPORT_PRIVATE MemoryReducer {
};
explicit MemoryReducer(Heap* heap);
MemoryReducer(const MemoryReducer&) = delete;
MemoryReducer& operator=(const MemoryReducer&) = delete;
// Callbacks.
void NotifyMarkCompact(const Event& event);
void NotifyPossibleGarbage(const Event& event);
......@@ -146,12 +148,13 @@ class V8_EXPORT_PRIVATE MemoryReducer {
class TimerTask : public v8::internal::CancelableTask {
public:
explicit TimerTask(MemoryReducer* memory_reducer);
TimerTask(const TimerTask&) = delete;
TimerTask& operator=(const TimerTask&) = delete;
private:
// v8::internal::CancelableTask overrides.
void RunInternal() override;
MemoryReducer* memory_reducer_;
DISALLOW_COPY_AND_ASSIGN(TimerTask);
};
void NotifyTimer(const Event& event);
......@@ -166,7 +169,6 @@ class V8_EXPORT_PRIVATE MemoryReducer {
// Used in cctest.
friend class heap::HeapTester;
DISALLOW_COPY_AND_ASSIGN(MemoryReducer);
};
} // namespace internal
......
......@@ -39,6 +39,9 @@ class ReadOnlyHeap {
virtual ~ReadOnlyHeap() = default;
ReadOnlyHeap(const ReadOnlyHeap&) = delete;
ReadOnlyHeap& operator=(const ReadOnlyHeap&) = delete;
// If necessary creates read-only heap and initializes its artifacts (if the
// deserializer is provided). Then attaches the read-only heap to the isolate.
// If the deserializer is not provided, then the read-only heap will be only
......@@ -121,8 +124,6 @@ class ReadOnlyHeap {
explicit ReadOnlyHeap(ReadOnlySpace* ro_space) : read_only_space_(ro_space) {}
ReadOnlyHeap(ReadOnlyHeap* ro_heap, ReadOnlySpace* ro_space);
DISALLOW_COPY_AND_ASSIGN(ReadOnlyHeap);
};
// This is used without pointer compression when there is just a single
......
......@@ -39,6 +39,9 @@ class PossiblyEmptyBuckets {
~PossiblyEmptyBuckets() { Release(); }
PossiblyEmptyBuckets(const PossiblyEmptyBuckets&) = delete;
PossiblyEmptyBuckets& operator=(const PossiblyEmptyBuckets&) = delete;
void Initialize() {
bitmap_ = kNullAddress;
DCHECK(!IsAllocated());
......@@ -117,8 +120,6 @@ class PossiblyEmptyBuckets {
}
FRIEND_TEST(PossiblyEmptyBucketsTest, WordsForBuckets);
DISALLOW_COPY_AND_ASSIGN(PossiblyEmptyBuckets);
};
STATIC_ASSERT(std::is_standard_layout<PossiblyEmptyBuckets>::value);
......
......@@ -121,6 +121,9 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace {
0;
}
Space(const Space&) = delete;
Space& operator=(const Space&) = delete;
static inline void MoveExternalBackingStoreBytes(
ExternalBackingStoreType type, Space* from, Space* to, size_t amount);
......@@ -194,8 +197,6 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace {
std::atomic<size_t>* external_backing_store_bytes_;
std::unique_ptr<FreeList> free_list_;
DISALLOW_COPY_AND_ASSIGN(Space);
};
STATIC_ASSERT(sizeof(std::atomic<intptr_t>) == kSystemPointerSize);
......
......@@ -81,6 +81,9 @@ class Sweeper::SweeperJob final : public JobTask {
~SweeperJob() override = default;
SweeperJob(const SweeperJob&) = delete;
SweeperJob& operator=(const SweeperJob&) = delete;
void Run(JobDelegate* delegate) final {
if (delegate->IsJoiningThread()) {
TRACE_GC(tracer_, GCTracer::Scope::MC_SWEEP);
......@@ -116,8 +119,6 @@ class Sweeper::SweeperJob final : public JobTask {
}
Sweeper* const sweeper_;
GCTracer* const tracer_;
DISALLOW_COPY_AND_ASSIGN(SweeperJob);
};
class Sweeper::IncrementalSweeperTask final : public CancelableTask {
......@@ -127,6 +128,9 @@ class Sweeper::IncrementalSweeperTask final : public CancelableTask {
~IncrementalSweeperTask() override = default;
IncrementalSweeperTask(const IncrementalSweeperTask&) = delete;
IncrementalSweeperTask& operator=(const IncrementalSweeperTask&) = delete;
private:
void RunInternal() final {
VMState<GC> state(isolate_);
......@@ -143,7 +147,6 @@ class Sweeper::IncrementalSweeperTask final : public CancelableTask {
Isolate* const isolate_;
Sweeper* const sweeper_;
DISALLOW_COPY_AND_ASSIGN(IncrementalSweeperTask);
};
void Sweeper::TearDown() {
......@@ -594,6 +597,9 @@ class Sweeper::IterabilityTask final : public CancelableTask {
~IterabilityTask() override = default;
IterabilityTask(const IterabilityTask&) = delete;
IterabilityTask& operator=(const IterabilityTask&) = delete;
private:
void RunInternal() final {
TRACE_GC1(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING,
......@@ -608,8 +614,6 @@ class Sweeper::IterabilityTask final : public CancelableTask {
Sweeper* const sweeper_;
base::Semaphore* const pending_iterability_task_;
GCTracer* const tracer_;
DISALLOW_COPY_AND_ASSIGN(IterabilityTask);
};
void Sweeper::StartIterabilityTasks() {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment