Commit bc0a32be authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Update weak container handling

Ports updates to weak container handling in blink performed in [1].
These changes were needed to resolve timeouts for android.
See [1] for more details.

[1] https://chromium-review.googlesource.com/c/chromium/src/+/2516363

Bug: chromium:1056170
Change-Id: I2b2c451d494438a37886a3c2bc73481bc9228664
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2538211
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71202}
parent a9f49138
......@@ -5,6 +5,8 @@
#ifndef V8_HEAP_CPPGC_MARKING_STATE_H_
#define V8_HEAP_CPPGC_MARKING_STATE_H_
#include <algorithm>
#include "include/cppgc/trace-trait.h"
#include "src/heap/cppgc/compaction-worklists.h"
#include "src/heap/cppgc/globals.h"
......@@ -317,11 +319,28 @@ class MutatorMarkingState : public MarkingStateBase {
WeakCallback, const void*);
inline bool IsMarkedWeakContainer(HeapObjectHeader&);
private:
// Weak containers are strongly retraced during conservative stack scanning.
// Stack scanning happens once per GC at the start of the atomic pause.
// Because the visitor is not retained between GCs, there is no need to clear
// the set at the end of GC.
class RecentlyRetracedWeakContainers {
static constexpr size_t kMaxCacheSize = 8;
public:
inline bool Contains(const HeapObjectHeader*);
inline void Insert(const HeapObjectHeader*);
private:
std::vector<const HeapObjectHeader*> recently_retraced_cache_;
size_t last_used_index_ = -1;
} recently_retraced_weak_containers_;
};
void MutatorMarkingState::PushMarkedWeakContainer(HeapObjectHeader& header) {
DCHECK(weak_containers_worklist_.Contains(&header));
weak_containers_worklist_.Erase(&header);
recently_retraced_weak_containers_.Insert(&header);
PushMarked(
header,
{header.Payload(),
......@@ -354,11 +373,29 @@ void MutatorMarkingState::InvokeWeakRootsCallbackIfNeeded(
}
bool MutatorMarkingState::IsMarkedWeakContainer(HeapObjectHeader& header) {
const bool result = weak_containers_worklist_.Contains(&header);
const bool result = weak_containers_worklist_.Contains(&header) &&
!recently_retraced_weak_containers_.Contains(&header);
DCHECK_IMPLIES(result, header.IsMarked());
DCHECK_IMPLIES(result, !header.IsInConstruction());
return result;
}
bool MutatorMarkingState::RecentlyRetracedWeakContainers::Contains(
const HeapObjectHeader* header) {
return std::find(recently_retraced_cache_.begin(),
recently_retraced_cache_.end(),
header) != recently_retraced_cache_.end();
}
void MutatorMarkingState::RecentlyRetracedWeakContainers::Insert(
const HeapObjectHeader* header) {
last_used_index_ = (last_used_index_ + 1) % kMaxCacheSize;
if (recently_retraced_cache_.size() <= last_used_index_)
recently_retraced_cache_.push_back(header);
else
recently_retraced_cache_[last_used_index_] = header;
}
class ConcurrentMarkingState : public MarkingStateBase {
public:
ConcurrentMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
......
......@@ -22,8 +22,6 @@ class MarkingWorklists {
template <AccessMode = AccessMode::kNonAtomic>
void Push(HeapObjectHeader*);
template <AccessMode = AccessMode::kNonAtomic>
void Erase(HeapObjectHeader*);
template <AccessMode = AccessMode::kNonAtomic>
bool Contains(HeapObjectHeader*);
template <AccessMode = AccessMode::kNonAtomic>
std::unordered_set<HeapObjectHeader*> Extract();
......@@ -150,14 +148,6 @@ void MarkingWorklists::ExternalMarkingWorklist::Push(HeapObjectHeader* object) {
objects_.insert(object);
}
template <AccessMode mode>
void MarkingWorklists::ExternalMarkingWorklist::Erase(
HeapObjectHeader* object) {
DCHECK_NOT_NULL(object);
ConditionalMutexGuard<mode> guard(&lock_);
objects_.erase(object);
}
template <AccessMode mode>
bool MarkingWorklists::ExternalMarkingWorklist::Contains(
HeapObjectHeader* object) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment