Commit b813b0b0 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Page: RecursiveMutex -> Mutex

All use cases of the RecursiveMutex have been removed.

Bug: v8:6923
Change-Id: I25aeee2447db185dbaacf96ab06a660834a408b7
Reviewed-on: https://chromium-review.googlesource.com/735345Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48908}
parent a32eabe1
......@@ -19,7 +19,7 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
const size_t length = NumberToSize(buffer->byte_length());
Page* page = Page::FromAddress(buffer->address());
{
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
base::LockGuard<base::Mutex> guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) {
page->AllocateLocalTracker();
......@@ -40,7 +40,7 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
Page* page = Page::FromAddress(buffer->address());
const size_t length = NumberToSize(buffer->byte_length());
{
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
base::LockGuard<base::Mutex> guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
DCHECK_NOT_NULL(tracker);
tracker->Remove(buffer, length);
......
......@@ -31,7 +31,7 @@ void LocalArrayBufferTracker::Process(Callback callback) {
DCHECK_NOT_NULL(new_buffer);
Page* target_page = Page::FromAddress(new_buffer->address());
{
base::LockGuard<base::RecursiveMutex> guard(target_page->mutex());
base::LockGuard<base::Mutex> guard(target_page->mutex());
LocalArrayBufferTracker* tracker = target_page->local_tracker();
if (tracker == nullptr) {
target_page->AllocateLocalTracker();
......@@ -111,7 +111,7 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
bool ArrayBufferTracker::IsTracked(JSArrayBuffer* buffer) {
Page* page = Page::FromAddress(buffer->address());
{
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
base::LockGuard<base::Mutex> guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return false;
return tracker->IsTracked(buffer);
......
......@@ -4678,7 +4678,7 @@ void CollectSlots(MemoryChunk* chunk, Address start, Address end,
void Heap::VerifyRememberedSetFor(HeapObject* object) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
base::LockGuard<base::RecursiveMutex> lock_guard(chunk->mutex());
base::LockGuard<base::Mutex> lock_guard(chunk->mutex());
Address start = object->address();
Address end = start + object->Size();
std::set<Address> old_to_new;
......
......@@ -2111,7 +2111,7 @@ class PageMarkingItem : public MarkingItem {
virtual ~PageMarkingItem() { global_slots_->Increment(slots_); }
void Process(YoungGenerationMarkingTask* task) override {
base::LockGuard<base::RecursiveMutex> guard(chunk_->mutex());
base::LockGuard<base::Mutex> guard(chunk_->mutex());
MarkUntypedPointers(task);
MarkTypedPointers(task);
}
......@@ -3909,7 +3909,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
virtual ~RememberedSetUpdatingItem() {}
void Process() override {
base::LockGuard<base::RecursiveMutex> guard(chunk_->mutex());
base::LockGuard<base::Mutex> guard(chunk_->mutex());
UpdateUntypedPointers();
UpdateTypedPointers();
}
......@@ -4441,7 +4441,7 @@ int MarkCompactCollector::Sweeper::ParallelSweepPage(Page* page,
int max_freed = 0;
{
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
base::LockGuard<base::Mutex> guard(page->mutex());
// If this page was already swept in the meantime, we can return here.
if (page->SweepingDone()) return 0;
......
......@@ -598,7 +598,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->concurrent_sweeping_state().SetValue(kSweepingDone);
chunk->page_protection_change_mutex_ = new base::Mutex();
chunk->write_unprotect_counter_ = 0;
chunk->mutex_ = new base::RecursiveMutex();
chunk->mutex_ = new base::Mutex();
chunk->allocated_bytes_ = chunk->area_size();
chunk->wasted_memory_ = 0;
chunk->young_generation_bitmap_ = nullptr;
......
......@@ -364,7 +364,7 @@ class MemoryChunk {
+ kPointerSize // InvalidatedSlots* invalidated_slots_
+ kPointerSize // SkipList* skip_list_
+ kPointerSize // AtomicValue high_water_mark_
+ kPointerSize // base::RecursiveMutex* mutex_
+ kPointerSize // base::Mutex* mutex_
+ kPointerSize // base::AtomicWord concurrent_sweeping_
+ kPointerSize // base::Mutex* page_protection_change_mutex_
+ kPointerSize // unitptr_t write_unprotect_counter_
......@@ -427,7 +427,7 @@ class MemoryChunk {
return reinterpret_cast<Address>(const_cast<MemoryChunk*>(this));
}
base::RecursiveMutex* mutex() { return mutex_; }
base::Mutex* mutex() { return mutex_; }
bool Contains(Address addr) {
return addr >= area_start() && addr < area_end();
......@@ -686,7 +686,7 @@ class MemoryChunk {
// count highest number of bytes ever allocated on the page.
base::AtomicValue<intptr_t> high_water_mark_;
base::RecursiveMutex* mutex_;
base::Mutex* mutex_;
base::AtomicValue<ConcurrentSweepingState> concurrent_sweeping_;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment