Commit 2c95775d authored by Zhi An Ng's avatar Zhi An Ng Committed by Commit Bot

[cleanup] Remove DISALLOW_COPY_AND_ASSIGN in profiler/

Bug: v8:11074
Change-Id: I11632ad59ec3826b71e901e0eb34ef6dc1295637
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2524419Reviewed-by: 's avatarPeter Marshall <petermarshall@chromium.org>
Commit-Queue: Zhi An Ng <zhin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71098}
parent 0839a72e
......@@ -28,6 +28,8 @@ class AllocationTraceNode {
AllocationTraceNode(AllocationTraceTree* tree,
unsigned function_info_index);
~AllocationTraceNode();
AllocationTraceNode(const AllocationTraceNode&) = delete;
AllocationTraceNode& operator=(const AllocationTraceNode&) = delete;
AllocationTraceNode* FindChild(unsigned function_info_index);
AllocationTraceNode* FindOrAddChild(unsigned function_info_index);
void AddAllocation(unsigned size);
......@@ -49,8 +51,6 @@ class AllocationTraceNode {
unsigned allocation_count_;
unsigned id_;
std::vector<AllocationTraceNode*> children_;
DISALLOW_COPY_AND_ASSIGN(AllocationTraceNode);
};
......@@ -58,6 +58,8 @@ class AllocationTraceTree {
public:
AllocationTraceTree();
~AllocationTraceTree() = default;
AllocationTraceTree(const AllocationTraceTree&) = delete;
AllocationTraceTree& operator=(const AllocationTraceTree&) = delete;
AllocationTraceNode* AddPathFromEnd(const Vector<unsigned>& path);
AllocationTraceNode* root() { return &root_; }
unsigned next_node_id() { return next_node_id_++; }
......@@ -66,8 +68,6 @@ class AllocationTraceTree {
private:
unsigned next_node_id_;
AllocationTraceNode root_;
DISALLOW_COPY_AND_ASSIGN(AllocationTraceTree);
};
class V8_EXPORT_PRIVATE AddressToTraceMap {
......@@ -108,6 +108,8 @@ class AllocationTracker {
AllocationTracker(HeapObjectsMap* ids, StringsStorage* names);
~AllocationTracker();
AllocationTracker(const AllocationTracker&) = delete;
AllocationTracker& operator=(const AllocationTracker&) = delete;
V8_EXPORT_PRIVATE void PrepareForSerialization();
void AllocationEvent(Address addr, int size);
......@@ -146,8 +148,6 @@ class AllocationTracker {
std::vector<UnresolvedLocation*> unresolved_locations_;
unsigned info_index_for_other_state_;
AddressToTraceMap address_to_trace_;
DISALLOW_COPY_AND_ASSIGN(AllocationTracker);
};
} // namespace internal
......
......@@ -23,6 +23,8 @@ class SamplingCircularQueue {
// Executed on the application thread.
SamplingCircularQueue();
~SamplingCircularQueue();
SamplingCircularQueue(const SamplingCircularQueue&) = delete;
SamplingCircularQueue& operator=(const SamplingCircularQueue&) = delete;
// StartEnqueue returns a pointer to a memory location for storing the next
// record or nullptr if all entries are full at the moment.
......@@ -57,8 +59,6 @@ class SamplingCircularQueue {
Entry buffer_[Length];
alignas(PROCESSOR_CACHE_LINE_SIZE) Entry* enqueue_pos_;
alignas(PROCESSOR_CACHE_LINE_SIZE) Entry* dequeue_pos_;
DISALLOW_COPY_AND_ASSIGN(SamplingCircularQueue);
};
......
......@@ -302,6 +302,8 @@ class V8_EXPORT_PRIVATE CpuProfiler {
ProfilerEventsProcessor* test_processor);
~CpuProfiler();
CpuProfiler(const CpuProfiler&) = delete;
CpuProfiler& operator=(const CpuProfiler&) = delete;
static void CollectSample(Isolate* isolate);
......@@ -366,8 +368,6 @@ class V8_EXPORT_PRIVATE CpuProfiler {
std::unique_ptr<ProfilingScope> profiling_scope_;
ProfilerCodeObserver code_observer_;
bool is_profiling_;
DISALLOW_COPY_AND_ASSIGN(CpuProfiler);
};
} // namespace internal
......
......@@ -28,6 +28,8 @@ class HeapProfiler : public HeapObjectAllocationTracker {
public:
explicit HeapProfiler(Heap* heap);
~HeapProfiler() override;
HeapProfiler(const HeapProfiler&) = delete;
HeapProfiler& operator=(const HeapProfiler&) = delete;
HeapSnapshot* TakeSnapshot(v8::ActivityControl* control,
v8::HeapProfiler::ObjectNameResolver* resolver,
......@@ -109,8 +111,6 @@ class HeapProfiler : public HeapObjectAllocationTracker {
build_embedder_graph_callbacks_;
std::pair<v8::HeapProfiler::GetDetachednessCallback, void*>
get_detachedness_callback_;
DISALLOW_COPY_AND_ASSIGN(HeapProfiler);
};
} // namespace internal
......
......@@ -188,6 +188,8 @@ class HeapEntry {
class HeapSnapshot {
public:
explicit HeapSnapshot(HeapProfiler* profiler, bool global_objects_as_roots);
HeapSnapshot(const HeapSnapshot&) = delete;
HeapSnapshot& operator=(const HeapSnapshot&) = delete;
void Delete();
HeapProfiler* profiler() const { return profiler_; }
......@@ -242,8 +244,6 @@ class HeapSnapshot {
std::vector<SourceLocation> locations_;
SnapshotObjectId max_snapshot_js_object_id_ = -1;
bool treat_global_objects_as_roots_;
DISALLOW_COPY_AND_ASSIGN(HeapSnapshot);
};
......@@ -260,6 +260,8 @@ class HeapObjectsMap {
};
explicit HeapObjectsMap(Heap* heap);
HeapObjectsMap(const HeapObjectsMap&) = delete;
HeapObjectsMap& operator=(const HeapObjectsMap&) = delete;
Heap* heap() const { return heap_; }
......@@ -308,8 +310,6 @@ class HeapObjectsMap {
// Map from NativeObject to EntryInfo index in entries_.
std::unordered_map<NativeObject, size_t> merged_native_entries_map_;
Heap* heap_;
DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap);
};
// A typedef for referencing anything that can be snapshotted living
......@@ -337,6 +337,8 @@ class V8_EXPORT_PRIVATE V8HeapExplorer : public HeapEntriesAllocator {
SnapshottingProgressReportingInterface* progress,
v8::HeapProfiler::ObjectNameResolver* resolver);
~V8HeapExplorer() override = default;
V8HeapExplorer(const V8HeapExplorer&) = delete;
V8HeapExplorer& operator=(const V8HeapExplorer&) = delete;
HeapEntry* AllocateEntry(HeapThing ptr) override;
int EstimateObjectsCount();
......@@ -462,8 +464,6 @@ class V8_EXPORT_PRIVATE V8HeapExplorer : public HeapEntriesAllocator {
friend class IndexedReferencesExtractor;
friend class RootsReferencesExtractor;
DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer);
};
// An implementation of retained native objects extractor.
......@@ -471,6 +471,8 @@ class NativeObjectsExplorer {
public:
NativeObjectsExplorer(HeapSnapshot* snapshot,
SnapshottingProgressReportingInterface* progress);
NativeObjectsExplorer(const NativeObjectsExplorer&) = delete;
NativeObjectsExplorer& operator=(const NativeObjectsExplorer&) = delete;
bool IterateAndExtractReferences(HeapSnapshotGenerator* generator);
private:
......@@ -491,8 +493,6 @@ class NativeObjectsExplorer {
static HeapThing const kNativesRootObject;
friend class GlobalHandlesExtractor;
DISALLOW_COPY_AND_ASSIGN(NativeObjectsExplorer);
};
class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
......@@ -505,6 +505,8 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
v8::ActivityControl* control,
v8::HeapProfiler::ObjectNameResolver* resolver,
Heap* heap);
HeapSnapshotGenerator(const HeapSnapshotGenerator&) = delete;
HeapSnapshotGenerator& operator=(const HeapSnapshotGenerator&) = delete;
bool GenerateSnapshot();
HeapEntry* FindEntry(HeapThing ptr) {
......@@ -538,8 +540,6 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
int progress_counter_;
int progress_total_;
Heap* heap_;
DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator);
};
class OutputStreamWriter;
......@@ -552,6 +552,9 @@ class HeapSnapshotJSONSerializer {
next_node_id_(1),
next_string_id_(1),
writer_(nullptr) {}
HeapSnapshotJSONSerializer(const HeapSnapshotJSONSerializer&) = delete;
HeapSnapshotJSONSerializer& operator=(const HeapSnapshotJSONSerializer&) =
delete;
void Serialize(v8::OutputStream* stream);
private:
......@@ -591,8 +594,6 @@ class HeapSnapshotJSONSerializer {
friend class HeapSnapshotJSONSerializerEnumerator;
friend class HeapSnapshotJSONSerializerIterator;
DISALLOW_COPY_AND_ASSIGN(HeapSnapshotJSONSerializer);
};
......
......@@ -31,6 +31,8 @@ struct TickSample;
class V8_EXPORT_PRIVATE SourcePositionTable : public Malloced {
public:
SourcePositionTable() = default;
SourcePositionTable(const SourcePositionTable&) = delete;
SourcePositionTable& operator=(const SourcePositionTable&) = delete;
void SetPosition(int pc_offset, int line, int inlining_id);
int GetSourceLineNumber(int pc_offset) const;
......@@ -51,7 +53,6 @@ class V8_EXPORT_PRIVATE SourcePositionTable : public Malloced {
// the pc offset, so that we can save space and look up items using binary
// search.
std::vector<SourcePositionTuple> pc_offsets_to_lines_;
DISALLOW_COPY_AND_ASSIGN(SourcePositionTable);
};
struct CodeEntryAndLineNumber;
......@@ -65,6 +66,8 @@ class CodeEntry {
int column_number = v8::CpuProfileNode::kNoColumnNumberInfo,
std::unique_ptr<SourcePositionTable> line_info = nullptr,
bool is_shared_cross_origin = false);
CodeEntry(const CodeEntry&) = delete;
CodeEntry& operator=(const CodeEntry&) = delete;
const char* name() const { return name_; }
const char* resource_name() const { return resource_name_; }
......@@ -219,8 +222,6 @@ class CodeEntry {
int position_;
std::unique_ptr<SourcePositionTable> line_info_;
std::unique_ptr<RareData> rare_data_;
DISALLOW_COPY_AND_ASSIGN(CodeEntry);
};
struct CodeEntryAndLineNumber {
......@@ -236,6 +237,8 @@ class V8_EXPORT_PRIVATE ProfileNode {
public:
inline ProfileNode(ProfileTree* tree, CodeEntry* entry, ProfileNode* parent,
int line_number = 0);
ProfileNode(const ProfileNode&) = delete;
ProfileNode& operator=(const ProfileNode&) = delete;
ProfileNode* FindChild(
CodeEntry* entry,
......@@ -295,14 +298,14 @@ class V8_EXPORT_PRIVATE ProfileNode {
std::unordered_map<int, int> line_ticks_;
std::vector<CpuProfileDeoptInfo> deopt_infos_;
DISALLOW_COPY_AND_ASSIGN(ProfileNode);
};
class V8_EXPORT_PRIVATE ProfileTree {
public:
explicit ProfileTree(Isolate* isolate);
~ProfileTree();
ProfileTree(const ProfileTree&) = delete;
ProfileTree& operator=(const ProfileTree&) = delete;
using ProfilingMode = v8::CpuProfilingMode;
......@@ -337,8 +340,6 @@ class V8_EXPORT_PRIVATE ProfileTree {
unsigned next_node_id_;
ProfileNode* root_;
Isolate* isolate_;
DISALLOW_COPY_AND_ASSIGN(ProfileTree);
};
class CpuProfiler;
......@@ -353,6 +354,8 @@ class CpuProfile {
V8_EXPORT_PRIVATE CpuProfile(CpuProfiler* profiler, const char* title,
CpuProfilingOptions options);
CpuProfile(const CpuProfile&) = delete;
CpuProfile& operator=(const CpuProfile&) = delete;
// Checks whether or not the given TickSample should be (sub)sampled, given
// the sampling interval of the profiler that recorded it (in microseconds).
......@@ -398,14 +401,14 @@ class CpuProfile {
base::TimeDelta next_sample_delta_;
static std::atomic<uint32_t> last_id_;
DISALLOW_COPY_AND_ASSIGN(CpuProfile);
};
class V8_EXPORT_PRIVATE CodeMap {
public:
CodeMap();
~CodeMap();
CodeMap(const CodeMap&) = delete;
CodeMap& operator=(const CodeMap&) = delete;
void AddCode(Address addr, CodeEntry* entry, unsigned size);
void MoveCode(Address from, Address to);
......@@ -437,13 +440,13 @@ class V8_EXPORT_PRIVATE CodeMap {
std::deque<CodeEntrySlotInfo> code_entries_;
std::map<Address, CodeEntryMapInfo> code_map_;
unsigned free_list_head_ = kNoFreeSlot;
DISALLOW_COPY_AND_ASSIGN(CodeMap);
};
class V8_EXPORT_PRIVATE CpuProfilesCollection {
public:
explicit CpuProfilesCollection(Isolate* isolate);
CpuProfilesCollection(const CpuProfilesCollection&) = delete;
CpuProfilesCollection& operator=(const CpuProfilesCollection&) = delete;
void set_cpu_profiler(CpuProfiler* profiler) { profiler_ = profiler; }
CpuProfilingStatus StartProfiling(const char* title,
......@@ -479,8 +482,6 @@ class V8_EXPORT_PRIVATE CpuProfilesCollection {
// Accessed by VM thread and profile generator thread.
std::vector<std::unique_ptr<CpuProfile>> current_profiles_;
base::Semaphore current_profiles_semaphore_;
DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection);
};
} // namespace internal
......
......@@ -29,6 +29,8 @@ class V8_EXPORT_PRIVATE ProfilerListener : public CodeEventListener {
ProfilerListener(Isolate*, CodeEventObserver*,
CpuProfilingNamingMode mode = kDebugNaming);
~ProfilerListener() override;
ProfilerListener(const ProfilerListener&) = delete;
ProfilerListener& operator=(const ProfilerListener&) = delete;
void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
const char* name) override;
......@@ -88,8 +90,6 @@ class V8_EXPORT_PRIVATE ProfilerListener : public CodeEventListener {
CodeEventObserver* observer_;
StringsStorage function_and_resource_names_;
const CpuProfilingNamingMode naming_mode_;
DISALLOW_COPY_AND_ASSIGN(ProfilerListener);
};
} // namespace internal
......
......@@ -25,6 +25,8 @@ namespace internal {
class AllocationProfile : public v8::AllocationProfile {
public:
AllocationProfile() = default;
AllocationProfile(const AllocationProfile&) = delete;
AllocationProfile& operator=(const AllocationProfile&) = delete;
v8::AllocationProfile::Node* GetRootNode() override {
return nodes_.size() == 0 ? nullptr : &nodes_.front();
......@@ -39,8 +41,6 @@ class AllocationProfile : public v8::AllocationProfile {
std::vector<v8::AllocationProfile::Sample> samples_;
friend class SamplingHeapProfiler;
DISALLOW_COPY_AND_ASSIGN(AllocationProfile);
};
class SamplingHeapProfiler {
......@@ -55,6 +55,8 @@ class SamplingHeapProfiler {
script_position_(start_position),
name_(name),
id_(id) {}
AllocationNode(const AllocationNode&) = delete;
AllocationNode& operator=(const AllocationNode&) = delete;
AllocationNode* FindChildNode(FunctionId id) {
auto it = children_.find(id);
......@@ -95,8 +97,6 @@ class SamplingHeapProfiler {
bool pinned_ = false;
friend class SamplingHeapProfiler;
DISALLOW_COPY_AND_ASSIGN(AllocationNode);
};
struct Sample {
......@@ -107,19 +107,20 @@ class SamplingHeapProfiler {
global(reinterpret_cast<v8::Isolate*>(profiler_->isolate_), local_),
profiler(profiler_),
sample_id(sample_id) {}
Sample(const Sample&) = delete;
Sample& operator=(const Sample&) = delete;
const size_t size;
AllocationNode* const owner;
Global<Value> global;
SamplingHeapProfiler* const profiler;
const uint64_t sample_id;
private:
DISALLOW_COPY_AND_ASSIGN(Sample);
};
SamplingHeapProfiler(Heap* heap, StringsStorage* names, uint64_t rate,
int stack_depth, v8::HeapProfiler::SamplingFlags flags);
~SamplingHeapProfiler();
SamplingHeapProfiler(const SamplingHeapProfiler&) = delete;
SamplingHeapProfiler& operator=(const SamplingHeapProfiler&) = delete;
v8::AllocationProfile* GetAllocationProfile();
StringsStorage* names() const { return names_; }
......@@ -193,8 +194,6 @@ class SamplingHeapProfiler {
const int stack_depth_;
const uint64_t rate_;
v8::HeapProfiler::SamplingFlags flags_;
DISALLOW_COPY_AND_ASSIGN(SamplingHeapProfiler);
};
} // namespace internal
......
......@@ -22,6 +22,8 @@ class V8_EXPORT_PRIVATE StringsStorage {
public:
StringsStorage();
~StringsStorage();
StringsStorage(const StringsStorage&) = delete;
StringsStorage& operator=(const StringsStorage&) = delete;
// Copies the given c-string and stores it, returning the stored copy, or just
// returns the existing string in storage if it already exists.
......@@ -53,8 +55,6 @@ class V8_EXPORT_PRIVATE StringsStorage {
const char* GetVFormatted(const char* format, va_list args);
base::CustomMatcherHashMap names_;
DISALLOW_COPY_AND_ASSIGN(StringsStorage);
};
} // namespace internal
......
......@@ -17,6 +17,8 @@ class CodeMap;
class V8_EXPORT_PRIVATE Symbolizer {
public:
explicit Symbolizer(CodeMap* code_map);
Symbolizer(const Symbolizer&) = delete;
Symbolizer& operator=(const Symbolizer&) = delete;
struct SymbolizedSample {
ProfileStackTrace stack_trace;
......@@ -34,8 +36,6 @@ class V8_EXPORT_PRIVATE Symbolizer {
Address* out_instruction_start = nullptr);
CodeMap* const code_map_;
DISALLOW_COPY_AND_ASSIGN(Symbolizer);
};
} // namespace internal
......
......@@ -23,6 +23,8 @@ class TracingCpuProfilerImpl final
public:
explicit TracingCpuProfilerImpl(Isolate*);
~TracingCpuProfilerImpl() override;
TracingCpuProfilerImpl(const TracingCpuProfilerImpl&) = delete;
TracingCpuProfilerImpl& operator=(const TracingCpuProfilerImpl&) = delete;
// v8::TracingController::TraceStateObserver
void OnTraceEnabled() final;
......@@ -36,8 +38,6 @@ class TracingCpuProfilerImpl final
std::unique_ptr<CpuProfiler> profiler_;
bool profiling_enabled_;
base::Mutex mutex_;
DISALLOW_COPY_AND_ASSIGN(TracingCpuProfilerImpl);
};
} // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment