Commit a282d2e9 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

Revert "[heap] Convert WeakObjects to heap::base::Worklist"

This reverts commit 969cdfe6.

Reason for revert: speculative revert for crbug.com/1135472

Original change's description:
> [heap] Convert WeakObjects to heap::base::Worklist
>
> This splits WeakObjects into explicit global and local worklists.
> The latter are defined in WeakObjects::Local and are thread-local.
>
> The main thread local worklist is stored in
> MarkCompactCollector::local_weak_objects and exists during marking
> similar to local_marking_worklists. Concurrent markers create their
> own local worklists that are published at the end.
>
> Change-Id: I093fdc580b4609ce83455b860b90a5099085beac
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2440607
> Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#70317}

TBR=ulan@chromium.org,dinfuehr@chromium.org

Change-Id: I3fa3bfdcf3c359f46a3b56c19fb4e486883cde9d
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2452749Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70344}
parent 0738b224
......@@ -60,9 +60,6 @@ class Worklist {
// marking worklist.
void Merge(Worklist<EntryType, SegmentSize>* other);
// Swaps the segments with the given marking worklist.
void Swap(Worklist<EntryType, SegmentSize>* other);
// These functions are not thread-safe. They should be called only
// if all local marking worklists that use the current worklist have
// been published and are empty.
......@@ -193,17 +190,6 @@ void Worklist<EntryType, SegmentSize>::Merge(
}
}
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Swap(
Worklist<EntryType, SegmentSize>* other) {
Segment* top = top_;
set_top(other->top_);
other->set_top(top);
size_t other_size = other->size_.exchange(
size_.load(std::memory_order_relaxed), std::memory_order_relaxed);
size_.store(other_size, std::memory_order_relaxed);
}
template <typename EntryType, uint16_t SegmentSize>
class Worklist<EntryType, SegmentSize>::Segment : public internal::SegmentBase {
public:
......@@ -297,12 +283,10 @@ class Worklist<EntryType, SegmentSize>::Local {
bool IsGlobalEmpty() const;
void Publish();
void Merge(Local* other);
void Merge(Worklist<EntryType, SegmentSize>::Local* other);
size_t PushSegmentSize() const { return push_segment_->Size(); }
void Swap(Local* other);
private:
void PublishPushSegment();
void PublishPopSegment();
......@@ -435,14 +419,6 @@ void Worklist<EntryType, SegmentSize>::Local::Merge(
worklist_->Merge(other->worklist_);
}
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::Swap(
Worklist<EntryType, SegmentSize>::Local* other) {
CHECK(IsLocalEmpty());
CHECK(other->IsLocalEmpty());
worklist_->Swap(other->worklist_);
}
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::PublishPushSegment() {
if (push_segment_ != internal::SegmentBase::GetSentinelSegmentAddress())
......
......@@ -79,13 +79,14 @@ class ConcurrentMarkingVisitor final
: public MarkingVisitorBase<ConcurrentMarkingVisitor,
ConcurrentMarkingState> {
public:
ConcurrentMarkingVisitor(MarkingWorklists::Local* local_marking_worklists,
WeakObjects::Local* local_weak_objects, Heap* heap,
ConcurrentMarkingVisitor(int task_id,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc,
MemoryChunkDataMap* memory_chunk_data)
: MarkingVisitorBase(local_marking_worklists, local_weak_objects, heap,
: MarkingVisitorBase(task_id, local_marking_worklists, weak_objects, heap,
mark_compact_epoch, bytecode_flush_mode,
embedder_tracing_enabled, is_forced_gc),
marking_state_(memory_chunk_data),
......@@ -150,7 +151,7 @@ class ConcurrentMarkingVisitor final
}
} else if (marking_state_.IsWhite(value)) {
local_weak_objects_->next_ephemerons.Push(Ephemeron{key, value});
weak_objects_->next_ephemerons.Push(task_id_, Ephemeron{key, value});
}
return false;
}
......@@ -391,9 +392,8 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000;
MarkingWorklists::Local local_marking_worklists(marking_worklists_);
WeakObjects::Local local_weak_objects(weak_objects_);
ConcurrentMarkingVisitor visitor(
&local_marking_worklists, &local_weak_objects, heap_,
task_id, &local_marking_worklists, weak_objects_, heap_,
task_state->mark_compact_epoch, Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(), task_state->is_forced_gc,
&task_state->memory_chunk_data);
......@@ -415,7 +415,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
{
Ephemeron ephemeron;
while (local_weak_objects.current_ephemerons.Pop(&ephemeron)) {
while (weak_objects_->current_ephemerons.Pop(task_id, &ephemeron)) {
if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
......@@ -471,7 +471,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
if (done) {
Ephemeron ephemeron;
while (local_weak_objects.discovered_ephemerons.Pop(&ephemeron)) {
while (weak_objects_->discovered_ephemerons.Pop(task_id, &ephemeron)) {
if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
......@@ -479,7 +479,17 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
}
local_marking_worklists.Publish();
local_weak_objects.Publish();
weak_objects_->transition_arrays.FlushToGlobal(task_id);
weak_objects_->ephemeron_hash_tables.FlushToGlobal(task_id);
weak_objects_->current_ephemerons.FlushToGlobal(task_id);
weak_objects_->next_ephemerons.FlushToGlobal(task_id);
weak_objects_->discovered_ephemerons.FlushToGlobal(task_id);
weak_objects_->weak_references.FlushToGlobal(task_id);
weak_objects_->js_weak_refs.FlushToGlobal(task_id);
weak_objects_->weak_cells.FlushToGlobal(task_id);
weak_objects_->weak_objects_in_code.FlushToGlobal(task_id);
weak_objects_->bytecode_flushing_candidates.FlushToGlobal(task_id);
weak_objects_->flushed_js_functions.FlushToGlobal(task_id);
base::AsAtomicWord::Relaxed_Store<size_t>(&task_state->marked_bytes, 0);
total_marked_bytes_ += marked_bytes;
......@@ -559,8 +569,8 @@ void ConcurrentMarking::RescheduleTasksIfNeeded() {
}
}
if (!marking_worklists_->shared()->IsEmpty() ||
!weak_objects_->current_ephemerons.IsEmpty() ||
!weak_objects_->discovered_ephemerons.IsEmpty()) {
!weak_objects_->current_ephemerons.IsGlobalPoolEmpty() ||
!weak_objects_->discovered_ephemerons.IsGlobalPoolEmpty()) {
ScheduleTasks();
}
}
......
......@@ -5295,7 +5295,8 @@ void Heap::SetUp() {
scavenger_collector_.reset(new ScavengerCollector(this));
incremental_marking_.reset(new IncrementalMarking(this));
incremental_marking_.reset(
new IncrementalMarking(this, mark_compact_collector_->weak_objects()));
if (FLAG_concurrent_marking || FLAG_parallel_marking) {
concurrent_marking_.reset(new ConcurrentMarking(
......
......@@ -49,9 +49,11 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
incremental_marking_->EnsureBlackAllocated(addr, size);
}
IncrementalMarking::IncrementalMarking(Heap* heap)
IncrementalMarking::IncrementalMarking(Heap* heap,
WeakObjects* weak_objects)
: heap_(heap),
collector_(heap->mark_compact_collector()),
weak_objects_(weak_objects),
new_generation_observer_(this, kYoungGenerationAllocatedThreshold),
old_generation_observer_(this, kOldGenerationAllocatedThreshold) {
SetState(STOPPED);
......@@ -499,8 +501,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
}
});
collector_->local_weak_objects()->Publish();
collector_->weak_objects()->UpdateAfterScavenge();
weak_objects_->UpdateAfterScavenge();
}
void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
......
......@@ -87,7 +87,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
static const AccessMode kAtomicity = AccessMode::NON_ATOMIC;
#endif
explicit IncrementalMarking(Heap* heap);
IncrementalMarking(Heap* heap, WeakObjects* weak_objects);
MarkingState* marking_state() { return &marking_state_; }
......@@ -286,6 +286,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
Heap* const heap_;
MarkCompactCollector* const collector_;
WeakObjects* weak_objects_;
double start_time_ms_ = 0.0;
double time_to_force_completion_ = 0.0;
......
......@@ -86,7 +86,7 @@ void MarkCompactCollector::RecordSlot(MemoryChunk* source_page,
}
void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
local_weak_objects()->transition_arrays.Push(array);
weak_objects_.transition_arrays.Push(kMainThreadTask, array);
}
template <typename MarkingState>
......
This diff is collapsed.
......@@ -378,12 +378,12 @@ class MainMarkingVisitor final
MainMarkingVisitor(MarkingState* marking_state,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects::Local* local_weak_objects, Heap* heap,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc)
: MarkingVisitorBase<MainMarkingVisitor<MarkingState>, MarkingState>(
local_marking_worklists, local_weak_objects, heap,
kMainThreadTask, local_marking_worklists, weak_objects, heap,
mark_compact_epoch, bytecode_flush_mode, embedder_tracing_enabled,
is_forced_gc),
marking_state_(marking_state),
......@@ -533,7 +533,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
}
WeakObjects* weak_objects() { return &weak_objects_; }
WeakObjects::Local* local_weak_objects() { return local_weak_objects_.get(); }
inline void AddTransitionArray(TransitionArray array);
......@@ -759,7 +758,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
MarkingWorklists marking_worklists_;
WeakObjects weak_objects_;
std::unique_ptr<WeakObjects::Local> local_weak_objects_;
EphemeronMarking ephemeron_marking_;
std::unique_ptr<MarkingVisitor> marking_visitor_;
......
......@@ -56,7 +56,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessWeakHeapObject(
// If we do not know about liveness of the value, we have to process
// the reference when we know the liveness of the whole transitive
// closure.
local_weak_objects_->weak_references.Push(std::make_pair(host, slot));
weak_objects_->weak_references.Push(task_id_, std::make_pair(host, slot));
}
}
......@@ -89,8 +89,8 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer(
HeapObject object = rinfo->target_object();
if (!concrete_visitor()->marking_state()->IsBlackOrGrey(object)) {
if (host.IsWeakObject(object)) {
local_weak_objects_->weak_objects_in_code.Push(
std::make_pair(object, host));
weak_objects_->weak_objects_in_code.Push(task_id_,
std::make_pair(object, host));
} else {
MarkObject(host, object);
}
......@@ -131,7 +131,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction(
// Check if the JSFunction needs reset due to bytecode being flushed.
if (bytecode_flush_mode_ != BytecodeFlushMode::kDoNotFlushBytecode &&
object.NeedsResetDueToFlushedBytecode()) {
local_weak_objects_->flushed_js_functions.Push(object);
weak_objects_->flushed_js_functions.Push(task_id_, object);
}
return size;
}
......@@ -148,7 +148,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo(
// If the SharedFunctionInfo has old bytecode, mark it as flushable,
// otherwise visit the function data field strongly.
if (shared_info.ShouldFlushBytecode(bytecode_flush_mode_)) {
local_weak_objects_->bytecode_flushing_candidates.Push(shared_info);
weak_objects_->bytecode_flushing_candidates.Push(task_id_, shared_info);
} else {
VisitPointer(shared_info,
shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
......@@ -258,7 +258,7 @@ template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
Map map, EphemeronHashTable table) {
if (!concrete_visitor()->ShouldVisit(table)) return 0;
local_weak_objects_->ephemeron_hash_tables.Push(table);
weak_objects_->ephemeron_hash_tables.Push(task_id_, table);
for (InternalIndex i : table.IterateEntries()) {
ObjectSlot key_slot =
......@@ -284,8 +284,8 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
// Revisit ephemerons with both key and value unreachable at end
// of concurrent marking cycle.
if (concrete_visitor()->marking_state()->IsWhite(value)) {
local_weak_objects_->discovered_ephemerons.Push(
Ephemeron{key, value});
weak_objects_->discovered_ephemerons.Push(task_id_,
Ephemeron{key, value});
}
}
}
......@@ -309,7 +309,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
} else {
// JSWeakRef points to a potentially dead object. We have to process
// them when we know the liveness of the whole transitive closure.
local_weak_objects_->js_weak_refs.Push(weak_ref);
weak_objects_->js_weak_refs.Push(task_id_, weak_ref);
}
}
return size;
......@@ -339,7 +339,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
// WeakCell points to a potentially dead object or a dead unregister
// token. We have to process them when we know the liveness of the whole
// transitive closure.
local_weak_objects_->weak_cells.Push(weak_cell);
weak_objects_->weak_cells.Push(task_id_, weak_cell);
}
return size;
}
......@@ -430,7 +430,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray(
this->VisitMapPointer(array);
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
local_weak_objects_->transition_arrays.Push(array);
weak_objects_->transition_arrays.Push(task_id_, array);
return size;
}
......
......@@ -101,14 +101,16 @@ class MarkingStateBase {
template <typename ConcreteVisitor, typename MarkingState>
class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
public:
MarkingVisitorBase(MarkingWorklists::Local* local_marking_worklists,
WeakObjects::Local* local_weak_objects, Heap* heap,
MarkingVisitorBase(int task_id,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool is_embedder_tracing_enabled, bool is_forced_gc)
: local_marking_worklists_(local_marking_worklists),
local_weak_objects_(local_weak_objects),
weak_objects_(weak_objects),
heap_(heap),
task_id_(task_id),
mark_compact_epoch_(mark_compact_epoch),
bytecode_flush_mode_(bytecode_flush_mode),
is_embedder_tracing_enabled_(is_embedder_tracing_enabled),
......@@ -184,8 +186,9 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
MarkingWorklists::Local* const local_marking_worklists_;
WeakObjects::Local* const local_weak_objects_;
WeakObjects* const weak_objects_;
Heap* const heap_;
const int task_id_;
const unsigned mark_compact_epoch_;
const BytecodeFlushMode bytecode_flush_mode_;
const bool is_embedder_tracing_enabled_;
......
......@@ -19,36 +19,6 @@ namespace v8 {
namespace internal {
WeakObjects::Local::Local(WeakObjects* weak_objects)
:
#define CONSTRUCT_FIELD(Type, name, _) name(&weak_objects->name),
WEAK_OBJECT_WORKLISTS(CONSTRUCT_FIELD)
#undef CONSTRUCT_FIELD
end_of_initializer_list_(false) {
USE(end_of_initializer_list_);
}
bool WeakObjects::Local::IsLocalAndGlobalEmpty() {
bool result = true;
#define INVOKE_PREDICATE(Type, name, _) \
result = result && name.IsLocalAndGlobalEmpty();
WEAK_OBJECT_WORKLISTS(INVOKE_PREDICATE)
#undef INVOKE_PREDICATE
return result;
}
void WeakObjects::Local::Publish() {
#define INVOKE_PUBLISH(Type, name, _) name.Publish();
WEAK_OBJECT_WORKLISTS(INVOKE_PUBLISH)
#undef INVOKE_PUBLISH
}
void WeakObjects::Clear() {
#define INVOKE_CLEAR(Type, name, _) name.Clear();
WEAK_OBJECT_WORKLISTS(INVOKE_CLEAR)
#undef INVOKE_CLEAR
}
void WeakObjects::UpdateAfterScavenge() {
#define INVOKE_UPDATE(_, name, Name) Update##Name(name);
WEAK_OBJECT_WORKLISTS(INVOKE_UPDATE)
......
......@@ -6,7 +6,7 @@
#define V8_HEAP_WEAK_OBJECT_WORKLISTS_H_
#include "src/common/globals.h"
#include "src/heap/base/worklist.h"
#include "src/heap/worklist.h"
#include "src/objects/heap-object.h"
#include "src/objects/js-weak-refs.h"
......@@ -64,31 +64,14 @@ class TransitionArray;
class WeakObjects {
public:
template <typename Type>
using WeakObjectWorklist = ::heap::base::Worklist<Type, 64>;
class Local {
public:
explicit Local(WeakObjects* weak_objects);
bool IsLocalAndGlobalEmpty();
void Publish();
#define DECLARE_WORKLIST(Type, name, _) WeakObjectWorklist<Type>::Local name;
WEAK_OBJECT_WORKLISTS(DECLARE_WORKLIST)
#undef DECLARE_WORKLIST
private:
// Dummy field used for terminating the initializer list
// in the constructor.
bool end_of_initializer_list_;
};
void Clear();
void UpdateAfterScavenge();
using WeakObjectWorklist = Worklist<Type, 64>;
#define DECLARE_WORKLIST(Type, name, _) WeakObjectWorklist<Type> name;
WEAK_OBJECT_WORKLISTS(DECLARE_WORKLIST)
#undef DECLARE_WORKLIST
void UpdateAfterScavenge();
private:
#define DECLARE_UPDATE_METHODS(Type, _, Name) \
void Update##Name(WeakObjectWorklist<Type>&);
......
......@@ -911,11 +911,17 @@ TEST(JSWeakRefScavengedInWorklist) {
// Do marking. This puts the WeakRef above into the js_weak_refs worklist
// since its target isn't marked.
CHECK(
heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
heap::SimulateIncrementalMarking(heap, true);
CHECK(!heap->mark_compact_collector()
->weak_objects()
->js_weak_refs.IsEmpty());
}
// Now collect both weak_ref and its target. The worklist should be empty.
CcTest::CollectGarbage(NEW_SPACE);
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
// The mark-compactor shouldn't see zapped WeakRefs in the worklist.
CcTest::CollectAllGarbage();
......@@ -950,16 +956,22 @@ TEST(JSWeakRefTenuredInWorklist) {
// Do marking. This puts the WeakRef above into the js_weak_refs worklist
// since its target isn't marked.
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
heap::SimulateIncrementalMarking(heap, true);
CHECK(
!heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
// Now collect weak_ref's target. We still have a Handle to weak_ref, so it is
// moved and remains on the worklist.
CcTest::CollectGarbage(NEW_SPACE);
JSWeakRef new_weak_ref_location = *weak_ref;
CHECK_NE(old_weak_ref_location, new_weak_ref_location);
CHECK(
!heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
// The mark-compactor should see the moved WeakRef in the worklist.
CcTest::CollectAllGarbage();
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
CHECK(weak_ref->target().IsUndefined(isolate));
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment