Commit 0b8d4bda authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[heap] Factor out read-only-spaces.h from spaces.h

Moves ReadOnlyPage, ReadOnlyArtifacts, ReadOnlySpace and
SharedReadOnlySpace out of spaces.h and into read-only-spaces.h, as well
as creating a corresponding .cc file.

Bug: v8:10473
Change-Id: I9d8b49d61ed643fd6e16919d571a909ab6fce407
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2171197Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dan Elphick <delphick@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67531}
parent 567a9bdf
......@@ -2460,6 +2460,8 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/read-only-heap-inl.h",
"src/heap/read-only-heap.cc",
"src/heap/read-only-heap.h",
"src/heap/read-only-spaces.cc",
"src/heap/read-only-spaces.h",
"src/heap/remembered-set.h",
"src/heap/safepoint.cc",
"src/heap/safepoint.h",
......
......@@ -23,6 +23,7 @@
// leak heap internals to users of this interface!
#include "src/execution/isolate-data.h"
#include "src/execution/isolate.h"
#include "src/heap/read-only-spaces.h"
#include "src/heap/spaces-inl.h"
#include "src/objects/allocation-site-inl.h"
#include "src/objects/api-callbacks-inl.h"
......
......@@ -30,6 +30,7 @@
#include "src/heap/object-stats.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/read-only-spaces.h"
#include "src/heap/spaces-inl.h"
#include "src/heap/sweeper.h"
#include "src/heap/worklist.h"
......
......@@ -24,6 +24,7 @@ class EvacuationJobTraits;
class HeapObjectVisitor;
class ItemParallelJob;
class MigrationObserver;
class ReadOnlySpace;
class RecordMigratedSlotVisitor;
class UpdatingItem;
class YoungGenerationMarkingVisitor;
......
......@@ -11,7 +11,7 @@
#include "src/base/platform/mutex.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap-write-barrier-inl.h"
#include "src/heap/spaces.h"
#include "src/heap/read-only-spaces.h"
#include "src/objects/heap-object-inl.h"
#include "src/objects/objects-inl.h"
#include "src/objects/smi.h"
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/read-only-spaces.h"
#include "src/base/lsan.h"
#include "src/execution/isolate.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap-inl.h"
#include "src/heap/read-only-heap.h"
#include "src/objects/objects-inl.h"
#include "src/objects/string.h"
namespace v8 {
namespace internal {
// -----------------------------------------------------------------------------
// ReadOnlySpace implementation
ReadOnlySpace::ReadOnlySpace(Heap* heap)
: PagedSpace(heap, RO_SPACE, NOT_EXECUTABLE, FreeList::CreateFreeList()),
is_string_padding_cleared_(heap->isolate()->initialized_from_snapshot()) {
}
ReadOnlyArtifacts::~ReadOnlyArtifacts() {
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
MemoryChunk* next_chunk;
for (MemoryChunk* chunk = pages_.front(); chunk != nullptr;
chunk = next_chunk) {
void* chunk_address = reinterpret_cast<void*>(chunk->address());
page_allocator->SetPermissions(chunk_address, chunk->size(),
PageAllocator::kReadWrite);
next_chunk = chunk->list_node().next();
size_t size = RoundUp(chunk->size(), page_allocator->AllocatePageSize());
CHECK(page_allocator->FreePages(chunk_address, size));
}
}
void ReadOnlyArtifacts::set_read_only_heap(
std::unique_ptr<ReadOnlyHeap> read_only_heap) {
read_only_heap_ = std::move(read_only_heap);
}
SharedReadOnlySpace::~SharedReadOnlySpace() {
// Clear the memory chunk list before the space is deleted, so that the
// inherited destructors don't try to destroy the MemoryChunks themselves.
memory_chunk_list_ = heap::List<MemoryChunk>();
}
SharedReadOnlySpace::SharedReadOnlySpace(
Heap* heap, std::shared_ptr<ReadOnlyArtifacts> artifacts)
: ReadOnlySpace(heap) {
artifacts->pages().ShallowCopyTo(&memory_chunk_list_);
is_marked_read_only_ = true;
accounting_stats_ = artifacts->accounting_stats();
}
void ReadOnlySpace::DetachPagesAndAddToArtifacts(
std::shared_ptr<ReadOnlyArtifacts> artifacts) {
Heap* heap = ReadOnlySpace::heap();
Seal(SealMode::kDetachFromHeapAndForget);
artifacts->set_accounting_stats(accounting_stats_);
artifacts->TransferPages(std::move(memory_chunk_list_));
artifacts->set_shared_read_only_space(
std::make_unique<SharedReadOnlySpace>(heap, artifacts));
heap->ReplaceReadOnlySpace(artifacts->shared_read_only_space());
}
void ReadOnlyPage::MakeHeaderRelocatable() {
ReleaseAllocatedMemoryNeededForWritableChunk();
// Detached read-only space needs to have a valid marking bitmap and free list
// categories. Instruct Lsan to ignore them if required.
LSAN_IGNORE_OBJECT(categories_);
for (int i = kFirstCategory; i < free_list()->number_of_categories(); i++) {
LSAN_IGNORE_OBJECT(categories_[i]);
}
LSAN_IGNORE_OBJECT(marking_bitmap_);
heap_ = nullptr;
owner_ = nullptr;
}
void ReadOnlySpace::SetPermissionsForPages(MemoryAllocator* memory_allocator,
PageAllocator::Permission access) {
for (Page* p : *this) {
// Read only pages don't have valid reservation object so we get proper
// page allocator manually.
v8::PageAllocator* page_allocator =
memory_allocator->page_allocator(p->executable());
CHECK(SetPermissions(page_allocator, p->address(), p->size(), access));
}
}
// After we have booted, we have created a map which represents free space
// on the heap. If there was already a free list then the elements on it
// were created with the wrong FreeSpaceMap (normally nullptr), so we need to
// fix them.
void ReadOnlySpace::RepairFreeListsAfterDeserialization() {
free_list_->RepairLists(heap());
// Each page may have a small free space that is not tracked by a free list.
// Those free spaces still contain null as their map pointer.
// Overwrite them with new fillers.
for (Page* page : *this) {
int size = static_cast<int>(page->wasted_memory());
if (size == 0) {
// If there is no wasted memory then all free space is in the free list.
continue;
}
Address start = page->HighWaterMark();
Address end = page->area_end();
if (start < end - size) {
// A region at the high watermark is already in free list.
HeapObject filler = HeapObject::FromAddress(start);
CHECK(filler.IsFreeSpaceOrFiller());
start += filler.Size();
}
CHECK_EQ(size, static_cast<int>(end - start));
heap()->CreateFillerObjectAt(start, size, ClearRecordedSlots::kNo);
}
}
void ReadOnlySpace::ClearStringPaddingIfNeeded() {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
// TODO(ulan): Revisit this once third-party heap supports iteration.
return;
}
if (is_string_padding_cleared_) return;
ReadOnlyHeapObjectIterator iterator(this);
for (HeapObject o = iterator.Next(); !o.is_null(); o = iterator.Next()) {
if (o.IsSeqOneByteString()) {
SeqOneByteString::cast(o).clear_padding();
} else if (o.IsSeqTwoByteString()) {
SeqTwoByteString::cast(o).clear_padding();
}
}
is_string_padding_cleared_ = true;
}
void ReadOnlySpace::Seal(SealMode ro_mode) {
DCHECK(!is_marked_read_only_);
FreeLinearAllocationArea();
is_marked_read_only_ = true;
auto* memory_allocator = heap()->memory_allocator();
if (ro_mode == SealMode::kDetachFromHeapAndForget) {
DetachFromHeap();
for (Page* p : *this) {
memory_allocator->UnregisterMemory(p);
static_cast<ReadOnlyPage*>(p)->MakeHeaderRelocatable();
}
}
SetPermissionsForPages(memory_allocator, PageAllocator::kRead);
}
void ReadOnlySpace::Unseal() {
DCHECK(is_marked_read_only_);
if (HasPages()) {
SetPermissionsForPages(heap()->memory_allocator(),
PageAllocator::kReadWrite);
}
is_marked_read_only_ = false;
}
} // namespace internal
} // namespace v8
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_READ_ONLY_SPACES_H_
#define V8_HEAP_READ_ONLY_SPACES_H_
#include <memory>
#include <utility>
#include "include/v8-platform.h"
#include "src/base/macros.h"
#include "src/heap/list.h"
#include "src/heap/spaces.h"
namespace v8 {
namespace internal {
class ReadOnlyHeap;
class ReadOnlyPage : public Page {
public:
// Clears any pointers in the header that point out of the page that would
// otherwise make the header non-relocatable.
void MakeHeaderRelocatable();
private:
friend class ReadOnlySpace;
};
// -----------------------------------------------------------------------------
// Artifacts used to construct a new SharedReadOnlySpace
class ReadOnlyArtifacts {
public:
~ReadOnlyArtifacts();
void set_accounting_stats(const AllocationStats& stats) { stats_ = stats; }
void set_shared_read_only_space(
std::unique_ptr<SharedReadOnlySpace> shared_space) {
shared_read_only_space_ = std::move(shared_space);
}
SharedReadOnlySpace* shared_read_only_space() {
return shared_read_only_space_.get();
}
heap::List<MemoryChunk>& pages() { return pages_; }
void TransferPages(heap::List<MemoryChunk>&& pages) {
pages_ = std::move(pages);
}
const AllocationStats& accounting_stats() const { return stats_; }
void set_read_only_heap(std::unique_ptr<ReadOnlyHeap> read_only_heap);
ReadOnlyHeap* read_only_heap() { return read_only_heap_.get(); }
private:
heap::List<MemoryChunk> pages_;
AllocationStats stats_;
std::unique_ptr<SharedReadOnlySpace> shared_read_only_space_;
std::unique_ptr<ReadOnlyHeap> read_only_heap_;
};
// -----------------------------------------------------------------------------
// Read Only space for all Immortal Immovable and Immutable objects
class ReadOnlySpace : public PagedSpace {
public:
explicit ReadOnlySpace(Heap* heap);
// Detach the pages and them to artifacts for using in creating a
// SharedReadOnlySpace.
void DetachPagesAndAddToArtifacts(
std::shared_ptr<ReadOnlyArtifacts> artifacts);
~ReadOnlySpace() override { Unseal(); }
bool writable() const { return !is_marked_read_only_; }
bool Contains(Address a) = delete;
bool Contains(Object o) = delete;
V8_EXPORT_PRIVATE void ClearStringPaddingIfNeeded();
enum class SealMode { kDetachFromHeapAndForget, kDoNotDetachFromHeap };
// Seal the space by marking it read-only, optionally detaching it
// from the heap and forgetting it for memory bookkeeping purposes (e.g.
// prevent space's memory from registering as leaked).
void Seal(SealMode ro_mode);
// During boot the free_space_map is created, and afterwards we may need
// to write it into the free list nodes that were already created.
void RepairFreeListsAfterDeserialization();
size_t Available() override { return 0; }
protected:
void SetPermissionsForPages(MemoryAllocator* memory_allocator,
PageAllocator::Permission access);
bool is_marked_read_only_ = false;
private:
// Unseal the space after is has been sealed, by making it writable.
// TODO(v8:7464): Only possible if the space hasn't been detached.
void Unseal();
//
// String padding must be cleared just before serialization and therefore the
// string padding in the space will already have been cleared if the space was
// deserialized.
bool is_string_padding_cleared_;
};
class SharedReadOnlySpace : public ReadOnlySpace {
public:
SharedReadOnlySpace(Heap* heap, std::shared_ptr<ReadOnlyArtifacts> artifacts);
~SharedReadOnlySpace() override;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_READ_ONLY_SPACES_H_
......@@ -9,7 +9,6 @@
#include <utility>
#include "src/base/bits.h"
#include "src/base/lsan.h"
#include "src/base/macros.h"
#include "src/base/optional.h"
#include "src/base/platform/semaphore.h"
......@@ -3997,155 +3996,5 @@ void MapSpace::SortFreeList() {
void MapSpace::VerifyObject(HeapObject object) { CHECK(object.IsMap()); }
#endif
// -----------------------------------------------------------------------------
// ReadOnlySpace implementation
ReadOnlySpace::ReadOnlySpace(Heap* heap)
: PagedSpace(heap, RO_SPACE, NOT_EXECUTABLE, FreeList::CreateFreeList()),
is_string_padding_cleared_(heap->isolate()->initialized_from_snapshot()) {
}
ReadOnlyArtifacts::~ReadOnlyArtifacts() {
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
MemoryChunk* next_chunk;
for (MemoryChunk* chunk = pages_.front(); chunk != nullptr;
chunk = next_chunk) {
void* chunk_address = reinterpret_cast<void*>(chunk->address());
page_allocator->SetPermissions(chunk_address, chunk->size(),
PageAllocator::kReadWrite);
next_chunk = chunk->list_node().next();
size_t size = RoundUp(chunk->size(), page_allocator->AllocatePageSize());
CHECK(page_allocator->FreePages(chunk_address, size));
}
}
void ReadOnlyArtifacts::set_read_only_heap(
std::unique_ptr<ReadOnlyHeap> read_only_heap) {
read_only_heap_ = std::move(read_only_heap);
}
SharedReadOnlySpace::~SharedReadOnlySpace() {
// Clear the memory chunk list before the space is deleted, so that the
// inherited destructors don't try to destroy the MemoryChunks themselves.
memory_chunk_list_ = heap::List<MemoryChunk>();
}
SharedReadOnlySpace::SharedReadOnlySpace(
Heap* heap, std::shared_ptr<ReadOnlyArtifacts> artifacts)
: ReadOnlySpace(heap) {
artifacts->pages().ShallowCopyTo(&memory_chunk_list_);
is_marked_read_only_ = true;
accounting_stats_ = artifacts->accounting_stats();
}
void ReadOnlySpace::DetachPagesAndAddToArtifacts(
std::shared_ptr<ReadOnlyArtifacts> artifacts) {
Heap* heap = ReadOnlySpace::heap();
Seal(SealMode::kDetachFromHeapAndForget);
artifacts->set_accounting_stats(accounting_stats_);
artifacts->TransferPages(std::move(memory_chunk_list_));
artifacts->set_shared_read_only_space(
std::make_unique<SharedReadOnlySpace>(heap, artifacts));
heap->ReplaceReadOnlySpace(artifacts->shared_read_only_space());
}
void ReadOnlyPage::MakeHeaderRelocatable() {
ReleaseAllocatedMemoryNeededForWritableChunk();
// Detached read-only space needs to have a valid marking bitmap and free list
// categories. Instruct Lsan to ignore them if required.
LSAN_IGNORE_OBJECT(categories_);
for (int i = kFirstCategory; i < free_list()->number_of_categories(); i++) {
LSAN_IGNORE_OBJECT(categories_[i]);
}
LSAN_IGNORE_OBJECT(marking_bitmap_);
heap_ = nullptr;
owner_ = nullptr;
}
void ReadOnlySpace::SetPermissionsForPages(MemoryAllocator* memory_allocator,
PageAllocator::Permission access) {
for (Page* p : *this) {
// Read only pages don't have valid reservation object so we get proper
// page allocator manually.
v8::PageAllocator* page_allocator =
memory_allocator->page_allocator(p->executable());
CHECK(SetPermissions(page_allocator, p->address(), p->size(), access));
}
}
// After we have booted, we have created a map which represents free space
// on the heap. If there was already a free list then the elements on it
// were created with the wrong FreeSpaceMap (normally nullptr), so we need to
// fix them.
void ReadOnlySpace::RepairFreeListsAfterDeserialization() {
free_list_->RepairLists(heap());
// Each page may have a small free space that is not tracked by a free list.
// Those free spaces still contain null as their map pointer.
// Overwrite them with new fillers.
for (Page* page : *this) {
int size = static_cast<int>(page->wasted_memory());
if (size == 0) {
// If there is no wasted memory then all free space is in the free list.
continue;
}
Address start = page->HighWaterMark();
Address end = page->area_end();
if (start < end - size) {
// A region at the high watermark is already in free list.
HeapObject filler = HeapObject::FromAddress(start);
CHECK(filler.IsFreeSpaceOrFiller());
start += filler.Size();
}
CHECK_EQ(size, static_cast<int>(end - start));
heap()->CreateFillerObjectAt(start, size, ClearRecordedSlots::kNo);
}
}
void ReadOnlySpace::ClearStringPaddingIfNeeded() {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
// TODO(ulan): Revisit this once third-party heap supports iteration.
return;
}
if (is_string_padding_cleared_) return;
ReadOnlyHeapObjectIterator iterator(this);
for (HeapObject o = iterator.Next(); !o.is_null(); o = iterator.Next()) {
if (o.IsSeqOneByteString()) {
SeqOneByteString::cast(o).clear_padding();
} else if (o.IsSeqTwoByteString()) {
SeqTwoByteString::cast(o).clear_padding();
}
}
is_string_padding_cleared_ = true;
}
void ReadOnlySpace::Seal(SealMode ro_mode) {
DCHECK(!is_marked_read_only_);
FreeLinearAllocationArea();
is_marked_read_only_ = true;
auto* memory_allocator = heap()->memory_allocator();
if (ro_mode == SealMode::kDetachFromHeapAndForget) {
DetachFromHeap();
for (Page* p : *this) {
memory_allocator->UnregisterMemory(p);
static_cast<ReadOnlyPage*>(p)->MakeHeaderRelocatable();
}
}
SetPermissionsForPages(memory_allocator, PageAllocator::kRead);
}
void ReadOnlySpace::Unseal() {
DCHECK(is_marked_read_only_);
if (HasPages()) {
SetPermissionsForPages(heap()->memory_allocator(),
PageAllocator::kReadWrite);
}
is_marked_read_only_ = false;
}
} // namespace internal
} // namespace v8
......@@ -1117,16 +1117,6 @@ class Page : public MemoryChunk {
friend class MemoryAllocator;
};
class ReadOnlyPage : public Page {
public:
// Clears any pointers in the header that point out of the page that would
// otherwise make the header non-relocatable.
void MakeHeaderRelocatable();
private:
friend class ReadOnlySpace;
};
// Validate our estimates on the header size.
STATIC_ASSERT(sizeof(BasicMemoryChunk) <= BasicMemoryChunk::kHeaderSize);
STATIC_ASSERT(sizeof(MemoryChunk) <= MemoryChunk::kHeaderSize);
......@@ -3208,97 +3198,6 @@ class V8_EXPORT_PRIVATE OffThreadSpace : public LocalSpace {
void RefillFreeList() override;
};
// -----------------------------------------------------------------------------
// Artifacts used to construct a new SharedReadOnlySpace
class ReadOnlyArtifacts {
public:
~ReadOnlyArtifacts();
void set_accounting_stats(const AllocationStats& stats) { stats_ = stats; }
void set_shared_read_only_space(
std::unique_ptr<SharedReadOnlySpace> shared_space) {
shared_read_only_space_ = std::move(shared_space);
}
SharedReadOnlySpace* shared_read_only_space() {
return shared_read_only_space_.get();
}
heap::List<MemoryChunk>& pages() { return pages_; }
void TransferPages(heap::List<MemoryChunk>&& pages) {
pages_ = std::move(pages);
}
const AllocationStats& accounting_stats() const { return stats_; }
void set_read_only_heap(std::unique_ptr<ReadOnlyHeap> read_only_heap);
ReadOnlyHeap* read_only_heap() { return read_only_heap_.get(); }
private:
heap::List<MemoryChunk> pages_;
AllocationStats stats_;
std::unique_ptr<SharedReadOnlySpace> shared_read_only_space_;
std::unique_ptr<ReadOnlyHeap> read_only_heap_;
};
// -----------------------------------------------------------------------------
// Read Only space for all Immortal Immovable and Immutable objects
class ReadOnlySpace : public PagedSpace {
public:
explicit ReadOnlySpace(Heap* heap);
// Detach the pages and them to artifacts for using in creating a
// SharedReadOnlySpace.
void DetachPagesAndAddToArtifacts(
std::shared_ptr<ReadOnlyArtifacts> artifacts);
~ReadOnlySpace() override { Unseal(); }
bool writable() const { return !is_marked_read_only_; }
bool Contains(Address a) = delete;
bool Contains(Object o) = delete;
V8_EXPORT_PRIVATE void ClearStringPaddingIfNeeded();
enum class SealMode { kDetachFromHeapAndForget, kDoNotDetachFromHeap };
// Seal the space by marking it read-only, optionally detaching it
// from the heap and forgetting it for memory bookkeeping purposes (e.g.
// prevent space's memory from registering as leaked).
void Seal(SealMode ro_mode);
// During boot the free_space_map is created, and afterwards we may need
// to write it into the free list nodes that were already created.
void RepairFreeListsAfterDeserialization();
size_t Available() override { return 0; }
protected:
void SetPermissionsForPages(MemoryAllocator* memory_allocator,
PageAllocator::Permission access);
bool is_marked_read_only_ = false;
private:
// Unseal the space after is has been sealed, by making it writable.
// TODO(v8:7464): Only possible if the space hasn't been detached.
void Unseal();
//
// String padding must be cleared just before serialization and therefore the
// string padding in the space will already have been cleared if the space was
// deserialized.
bool is_string_padding_cleared_;
};
class SharedReadOnlySpace : public ReadOnlySpace {
public:
SharedReadOnlySpace(Heap* heap, std::shared_ptr<ReadOnlyArtifacts> artifacts);
~SharedReadOnlySpace() override;
};
// Iterates over the chunks (pages and large object pages) that can contain
// pointers to new space or to evacuation candidates.
class OldGenerationMemoryChunkIterator {
......
......@@ -4,6 +4,7 @@
#include "src/api/api-inl.h"
#include "src/flags/flags.h"
#include "src/heap/read-only-spaces.h"
#include "src/heap/spaces.h"
#include "test/cctest/cctest.h"
#include "tools/debug_helper/debug-helper.h"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment