Commit dc7425b6 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

heap: Replace WeakObjects Worklist with ::heap::base::Worklist

The CL converts uses of v8::internal::Worklist to heap::base::Worklist
which does not require to know the number of tasks working with the
work list upfront. heap::base::Worklist is the common implementation
for V8's heap and cppgc and should be used/optimized going forward.

Bug: v8:12426
Change-Id: Id6ef1aa05df858b01df90d653b6421a9bb68b997
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3306382Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78151}
parent 5a663fd9
......@@ -60,6 +60,9 @@ class Worklist {
// marking worklist.
void Merge(Worklist<EntryType, SegmentSize>* other);
// Swaps the segments with the given marking worklist.
void Swap(Worklist<EntryType, SegmentSize>* other);
// These functions are not thread-safe. They should be called only
// if all local marking worklists that use the current worklist have
// been published and are empty.
......@@ -190,6 +193,17 @@ void Worklist<EntryType, SegmentSize>::Merge(
}
}
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Swap(
Worklist<EntryType, SegmentSize>* other) {
Segment* top = top_;
set_top(other->top_);
other->set_top(top);
size_t other_size = other->size_.exchange(
size_.load(std::memory_order_relaxed), std::memory_order_relaxed);
size_.store(other_size, std::memory_order_relaxed);
}
template <typename EntryType, uint16_t SegmentSize>
class Worklist<EntryType, SegmentSize>::Segment : public internal::SegmentBase {
public:
......
......@@ -23,6 +23,7 @@
#include "src/heap/memory-measurement.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/objects-visiting.h"
#include "src/heap/weak-object-worklists.h"
#include "src/heap/worklist.h"
#include "src/init/v8.h"
#include "src/objects/data-handler-inl.h"
......@@ -86,13 +87,13 @@ class ConcurrentMarkingVisitor final
public:
ConcurrentMarkingVisitor(int task_id,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap,
WeakObjects::Local* local_weak_objects, Heap* heap,
unsigned mark_compact_epoch,
base::EnumSet<CodeFlushMode> code_flush_mode,
bool embedder_tracing_enabled,
bool should_keep_ages_unchanged,
MemoryChunkDataMap* memory_chunk_data)
: MarkingVisitorBase(task_id, local_marking_worklists, weak_objects, heap,
: MarkingVisitorBase(local_marking_worklists, local_weak_objects, heap,
mark_compact_epoch, code_flush_mode,
embedder_tracing_enabled,
should_keep_ages_unchanged),
......@@ -160,7 +161,7 @@ class ConcurrentMarkingVisitor final
}
} else if (marking_state_.IsWhite(value)) {
weak_objects_->next_ephemerons.Push(task_id_, Ephemeron{key, value});
local_weak_objects_->next_ephemerons_local.Push(Ephemeron{key, value});
}
return false;
}
......@@ -447,8 +448,9 @@ void ConcurrentMarking::Run(JobDelegate* delegate,
uint8_t task_id = delegate->GetTaskId() + 1;
TaskState* task_state = &task_state_[task_id];
MarkingWorklists::Local local_marking_worklists(marking_worklists_);
WeakObjects::Local local_weak_objects(weak_objects_);
ConcurrentMarkingVisitor visitor(
task_id, &local_marking_worklists, weak_objects_, heap_,
task_id, &local_marking_worklists, &local_weak_objects, heap_,
mark_compact_epoch, code_flush_mode,
heap_->local_embedder_heap_tracer()->InUse(), should_keep_ages_unchanged,
&task_state->memory_chunk_data);
......@@ -469,8 +471,7 @@ void ConcurrentMarking::Run(JobDelegate* delegate,
{
Ephemeron ephemeron;
while (weak_objects_->current_ephemerons.Pop(task_id, &ephemeron)) {
while (local_weak_objects.current_ephemerons_local.Pop(&ephemeron)) {
if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) {
another_ephemeron_iteration = true;
}
......@@ -538,8 +539,7 @@ void ConcurrentMarking::Run(JobDelegate* delegate,
if (done) {
Ephemeron ephemeron;
while (weak_objects_->discovered_ephemerons.Pop(task_id, &ephemeron)) {
while (local_weak_objects.discovered_ephemerons_local.Pop(&ephemeron)) {
if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) {
another_ephemeron_iteration = true;
}
......@@ -547,18 +547,7 @@ void ConcurrentMarking::Run(JobDelegate* delegate,
}
local_marking_worklists.Publish();
weak_objects_->transition_arrays.FlushToGlobal(task_id);
weak_objects_->ephemeron_hash_tables.FlushToGlobal(task_id);
weak_objects_->current_ephemerons.FlushToGlobal(task_id);
weak_objects_->next_ephemerons.FlushToGlobal(task_id);
weak_objects_->discovered_ephemerons.FlushToGlobal(task_id);
weak_objects_->weak_references.FlushToGlobal(task_id);
weak_objects_->js_weak_refs.FlushToGlobal(task_id);
weak_objects_->weak_cells.FlushToGlobal(task_id);
weak_objects_->weak_objects_in_code.FlushToGlobal(task_id);
weak_objects_->code_flushing_candidates.FlushToGlobal(task_id);
weak_objects_->baseline_flushing_candidates.FlushToGlobal(task_id);
weak_objects_->flushed_js_functions.FlushToGlobal(task_id);
local_weak_objects.Publish();
base::AsAtomicWord::Relaxed_Store<size_t>(&task_state->marked_bytes, 0);
total_marked_bytes_ += marked_bytes;
......@@ -579,10 +568,10 @@ size_t ConcurrentMarking::GetMaxConcurrency(size_t worker_count) {
marking_items += worklist.worklist->Size();
return std::min<size_t>(
kMaxTasks,
worker_count + std::max<size_t>(
{marking_items,
weak_objects_->discovered_ephemerons.GlobalPoolSize(),
weak_objects_->current_ephemerons.GlobalPoolSize()}));
worker_count +
std::max<size_t>({marking_items,
weak_objects_->discovered_ephemerons.Size(),
weak_objects_->current_ephemerons.Size()}));
}
void ConcurrentMarking::ScheduleJob(TaskPriority priority) {
......@@ -603,8 +592,8 @@ void ConcurrentMarking::RescheduleJobIfNeeded(TaskPriority priority) {
if (heap_->IsTearingDown()) return;
if (marking_worklists_->shared()->IsEmpty() &&
weak_objects_->current_ephemerons.IsGlobalPoolEmpty() &&
weak_objects_->discovered_ephemerons.IsGlobalPoolEmpty()) {
weak_objects_->current_ephemerons.IsEmpty() &&
weak_objects_->discovered_ephemerons.IsEmpty()) {
return;
}
if (!job_handle_ || !job_handle_->IsValid()) {
......
......@@ -504,6 +504,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
}
});
collector_->local_weak_objects()->Publish();
weak_objects_->UpdateAfterScavenge();
}
......
......@@ -89,7 +89,7 @@ void MarkCompactCollector::RecordSlot(MemoryChunk* source_page,
}
void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
weak_objects_.transition_arrays.Push(kMainThreadTask, array);
local_weak_objects()->transition_arrays_local.Push(array);
}
template <typename MarkingState>
......
This diff is collapsed.
......@@ -390,13 +390,13 @@ class MainMarkingVisitor final
MainMarkingVisitor(MarkingState* marking_state,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap,
WeakObjects::Local* local_weak_objects, Heap* heap,
unsigned mark_compact_epoch,
base::EnumSet<CodeFlushMode> code_flush_mode,
bool embedder_tracing_enabled,
bool should_keep_ages_unchanged)
: MarkingVisitorBase<MainMarkingVisitor<MarkingState>, MarkingState>(
kMainThreadTask, local_marking_worklists, weak_objects, heap,
local_marking_worklists, local_weak_objects, heap,
mark_compact_epoch, code_flush_mode, embedder_tracing_enabled,
should_keep_ages_unchanged),
marking_state_(marking_state),
......@@ -552,6 +552,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
WeakObjects* weak_objects() { return &weak_objects_; }
WeakObjects::Local* local_weak_objects() { return local_weak_objects_.get(); }
inline void AddTransitionArray(TransitionArray array);
void AddNewlyDiscovered(HeapObject object) {
......@@ -719,8 +721,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// those with dead values.
void ClearJSWeakRefs();
void AbortWeakObjects();
// Starts sweeping of spaces by contributing on the main thread and setting
// up other pages for sweeping. Does not start sweeper tasks.
void StartSweepSpaces();
......@@ -782,6 +782,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
std::unique_ptr<MarkingVisitor> marking_visitor_;
std::unique_ptr<MarkingWorklists::Local> local_marking_worklists_;
std::unique_ptr<WeakObjects::Local> local_weak_objects_;
NativeContextInferrer native_context_inferrer_;
NativeContextStats native_context_stats_;
......
......@@ -63,7 +63,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessWeakHeapObject(
// If we do not know about liveness of the value, we have to process
// the reference when we know the liveness of the whole transitive
// closure.
weak_objects_->weak_references.Push(task_id_, std::make_pair(host, slot));
local_weak_objects_->weak_references_local.Push(std::make_pair(host, slot));
}
}
......@@ -114,8 +114,8 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer(
rinfo->target_object(ObjectVisitorWithCageBases::cage_base());
if (!concrete_visitor()->marking_state()->IsBlackOrGrey(object)) {
if (host.IsWeakObject(object)) {
weak_objects_->weak_objects_in_code.Push(task_id_,
std::make_pair(object, host));
local_weak_objects_->weak_objects_in_code_local.Push(
std::make_pair(object, host));
} else {
MarkObject(host, object);
}
......@@ -155,7 +155,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction(
int size = concrete_visitor()->VisitJSObjectSubclass(map, js_function);
if (js_function.ShouldFlushBaselineCode(code_flush_mode_)) {
DCHECK(IsBaselineCodeFlushingEnabled(code_flush_mode_));
weak_objects_->baseline_flushing_candidates.Push(task_id_, js_function);
local_weak_objects_->baseline_flushing_candidates_local.Push(js_function);
} else {
VisitPointer(js_function, js_function.RawField(JSFunction::kCodeOffset));
// TODO(mythria): Consider updating the check for ShouldFlushBaselineCode to
......@@ -163,7 +163,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction(
// baseline code and remove this check here.
if (IsByteCodeFlushingEnabled(code_flush_mode_) &&
js_function.NeedsResetDueToFlushedBytecode()) {
weak_objects_->flushed_js_functions.Push(task_id_, js_function);
local_weak_objects_->flushed_js_functions_local.Push(js_function);
}
}
return size;
......@@ -194,11 +194,11 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo(
VisitPointer(baseline_code,
baseline_code.RawField(
Code::kDeoptimizationDataOrInterpreterDataOffset));
weak_objects_->code_flushing_candidates.Push(task_id_, shared_info);
local_weak_objects_->code_flushing_candidates_local.Push(shared_info);
} else {
// In other cases, record as a flushing candidate since we have old
// bytecode.
weak_objects_->code_flushing_candidates.Push(task_id_, shared_info);
local_weak_objects_->code_flushing_candidates_local.Push(shared_info);
}
return size;
}
......@@ -306,7 +306,7 @@ template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
Map map, EphemeronHashTable table) {
if (!concrete_visitor()->ShouldVisit(table)) return 0;
weak_objects_->ephemeron_hash_tables.Push(task_id_, table);
local_weak_objects_->ephemeron_hash_tables_local.Push(table);
for (InternalIndex i : table.IterateEntries()) {
ObjectSlot key_slot =
......@@ -332,8 +332,8 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
// Revisit ephemerons with both key and value unreachable at end
// of concurrent marking cycle.
if (concrete_visitor()->marking_state()->IsWhite(value)) {
weak_objects_->discovered_ephemerons.Push(task_id_,
Ephemeron{key, value});
local_weak_objects_->discovered_ephemerons_local.Push(
Ephemeron{key, value});
}
}
}
......@@ -357,7 +357,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
} else {
// JSWeakRef points to a potentially dead object. We have to process
// them when we know the liveness of the whole transitive closure.
weak_objects_->js_weak_refs.Push(task_id_, weak_ref);
local_weak_objects_->js_weak_refs_local.Push(weak_ref);
}
}
return size;
......@@ -387,7 +387,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
// WeakCell points to a potentially dead object or a dead unregister
// token. We have to process them when we know the liveness of the whole
// transitive closure.
weak_objects_->weak_cells.Push(task_id_, weak_cell);
local_weak_objects_->weak_cells_local.Push(weak_cell);
}
return size;
}
......@@ -505,7 +505,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray(
this->VisitMapPointer(array);
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
weak_objects_->transition_arrays.Push(task_id_, array);
local_weak_objects_->transition_arrays_local.Push(array);
return size;
}
......
......@@ -127,18 +127,17 @@ class MarkingStateBase {
template <typename ConcreteVisitor, typename MarkingState>
class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
public:
MarkingVisitorBase(int task_id,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch,
MarkingVisitorBase(MarkingWorklists::Local* local_marking_worklists,
WeakObjects::Local* local_weak_objects,
// WeakObjects* weak_objects,
Heap* heap, unsigned mark_compact_epoch,
base::EnumSet<CodeFlushMode> code_flush_mode,
bool is_embedder_tracing_enabled,
bool should_keep_ages_unchanged)
: HeapVisitor<int, ConcreteVisitor>(heap),
local_marking_worklists_(local_marking_worklists),
weak_objects_(weak_objects),
local_weak_objects_(local_weak_objects),
heap_(heap),
task_id_(task_id),
mark_compact_epoch_(mark_compact_epoch),
code_flush_mode_(code_flush_mode),
is_embedder_tracing_enabled_(is_embedder_tracing_enabled),
......@@ -231,9 +230,8 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
MarkingWorklists::Local* const local_marking_worklists_;
WeakObjects* const weak_objects_;
WeakObjects::Local* const local_weak_objects_;
Heap* const heap_;
const int task_id_;
const unsigned mark_compact_epoch_;
const base::EnumSet<CodeFlushMode> code_flush_mode_;
const bool is_embedder_tracing_enabled_;
......
......@@ -19,12 +19,32 @@ namespace v8 {
namespace internal {
WeakObjects::Local::Local(WeakObjects* weak_objects)
: WeakObjects::UnusedBase()
#define INIT_LOCAL_WORKLIST(_, name, __) , name##_local(&weak_objects->name)
WEAK_OBJECT_WORKLISTS(INIT_LOCAL_WORKLIST)
#undef INIT_LOCAL_WORKLIST
{
}
void WeakObjects::Local::Publish() {
#define INVOKE_PUBLISH(_, name, __) name##_local.Publish();
WEAK_OBJECT_WORKLISTS(INVOKE_PUBLISH)
#undef INVOKE_PUBLISH
}
void WeakObjects::UpdateAfterScavenge() {
#define INVOKE_UPDATE(_, name, Name) Update##Name(name);
WEAK_OBJECT_WORKLISTS(INVOKE_UPDATE)
#undef INVOKE_UPDATE
}
void WeakObjects::Clear() {
#define INVOKE_CLEAR(_, name, __) name.Clear();
WEAK_OBJECT_WORKLISTS(INVOKE_CLEAR)
#undef INVOKE_CLEAR
}
// static
void WeakObjects::UpdateTransitionArrays(
WeakObjectWorklist<TransitionArray>& transition_arrays) {
......
......@@ -6,7 +6,7 @@
#define V8_HEAP_WEAK_OBJECT_WORKLISTS_H_
#include "src/common/globals.h"
#include "src/heap/worklist.h"
#include "src/heap/base/worklist.h"
#include "src/objects/heap-object.h"
#include "src/objects/js-weak-refs.h"
......@@ -61,16 +61,32 @@ class TransitionArray;
F(JSFunction, baseline_flushing_candidates, BaselineFlushingCandidates) \
F(JSFunction, flushed_js_functions, FlushedJSFunctions)
class WeakObjects {
class WeakObjects final {
private:
class UnusedBase {}; // Base class to allow using macro in initializer list.
public:
template <typename Type>
using WeakObjectWorklist = Worklist<Type, 64>;
using WeakObjectWorklist = ::heap::base::Worklist<Type, 64>;
class Local final : public UnusedBase {
public:
explicit Local(WeakObjects* weak_objects);
V8_EXPORT_PRIVATE void Publish();
#define DECLARE_WORKLIST(Type, name, _) \
WeakObjectWorklist<Type>::Local name##_local;
WEAK_OBJECT_WORKLISTS(DECLARE_WORKLIST)
#undef DECLARE_WORKLIST
};
#define DECLARE_WORKLIST(Type, name, _) WeakObjectWorklist<Type> name;
WEAK_OBJECT_WORKLISTS(DECLARE_WORKLIST)
#undef DECLARE_WORKLIST
void UpdateAfterScavenge();
void Clear();
private:
#define DECLARE_UPDATE_METHODS(Type, _, Name) \
......
......@@ -910,6 +910,7 @@ TEST(JSWeakRefScavengedInWorklist) {
CHECK(
heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
heap::SimulateIncrementalMarking(heap, true);
heap->mark_compact_collector()->local_weak_objects()->Publish();
CHECK(!heap->mark_compact_collector()
->weak_objects()
->js_weak_refs.IsEmpty());
......@@ -957,6 +958,7 @@ TEST(JSWeakRefTenuredInWorklist) {
// since its target isn't marked.
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
heap::SimulateIncrementalMarking(heap, true);
heap->mark_compact_collector()->local_weak_objects()->Publish();
CHECK(
!heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment