Commit 9c8f8fad authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Remove MemoryChunk::FromAnyPointerAddress

This function was only used for the write barrier since the store
buffer only stored slots and needed a way to get to the object's start.
Now that we insert into the remembered set directly from the write
barrier this isn't an issue anymore: the write barrier knows the
object start.

Change-Id: I701465ea40b7c4ee20404ecbcf3750e5fa6fd219
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1876049Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64518}
parent dfc21ed6
......@@ -27,23 +27,12 @@ STATIC_ASSERT(BasicMemoryChunk::kHeapOffset ==
BasicMemoryChunk::BasicMemoryChunk(size_t size, Address area_start,
Address area_end) {
const Address base = reinterpret_cast<Address>(this);
size_ = size;
marking_bitmap_ = static_cast<Bitmap*>(calloc(1, Bitmap::kSize));
header_sentinel_ = HeapObject::FromAddress(base).ptr();
DCHECK(HasHeaderSentinel(area_start));
area_start_ = area_start;
area_end_ = area_end;
}
// static
bool BasicMemoryChunk::HasHeaderSentinel(Address slot_addr) {
Address base = BaseAddress(slot_addr);
if (slot_addr < base + kHeaderSize) return false;
return HeapObject::FromAddress(base) ==
ObjectSlot(base + kHeaderSentinelOffset).Relaxed_Load();
}
void BasicMemoryChunk::ReleaseMarkingBitmap() {
DCHECK_NOT_NULL(marking_bitmap_);
free(marking_bitmap_);
......
......@@ -167,18 +167,13 @@ class BasicMemoryChunk {
return addr >= area_start() && addr <= area_end();
}
V8_EXPORT_PRIVATE static bool HasHeaderSentinel(Address slot_addr);
void ReleaseMarkingBitmap();
static const intptr_t kSizeOffset = 0;
static const intptr_t kFlagsOffset = kSizeOffset + kSizetSize;
static const intptr_t kMarkBitmapOffset = kFlagsOffset + kUIntptrSize;
static const intptr_t kHeapOffset = kMarkBitmapOffset + kSystemPointerSize;
static const intptr_t kHeaderSentinelOffset =
kHeapOffset + kSystemPointerSize;
static const intptr_t kAreaStartOffset =
kHeaderSentinelOffset + kSystemPointerSize;
static const intptr_t kAreaStartOffset = kHeapOffset + kSystemPointerSize;
static const intptr_t kAreaEndOffset = kAreaStartOffset + kSystemPointerSize;
static const intptr_t kOldToNewSlotSetOffset =
kAreaEndOffset + kSystemPointerSize;
......@@ -188,7 +183,6 @@ class BasicMemoryChunk {
+ kUIntptrSize // uintptr_t flags_
+ kSystemPointerSize // Bitmap* marking_bitmap_
+ kSystemPointerSize // Heap* heap_
+ kSystemPointerSize // Address header_sentinel_
+ kSystemPointerSize // Address area_start_
+ kSystemPointerSize // Address area_end_
+ kSystemPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES; // SlotSet* array
......@@ -207,12 +201,6 @@ class BasicMemoryChunk {
// layout under C++11.
Heap* heap_;
// This is used to distinguish the memory chunk header from the interior of a
// large page. The memory chunk header stores here an impossible tagged
// pointer: the tagger pointer of the page start. A field in a large object is
// guaranteed to not contain such a pointer.
Address header_sentinel_;
// Start and end of allocatable memory on this chunk.
Address area_start_;
Address area_end_;
......@@ -237,8 +225,6 @@ class BasicMemoryChunkValidator {
offsetof(BasicMemoryChunk, marking_bitmap_));
STATIC_ASSERT(BasicMemoryChunk::kHeapOffset ==
offsetof(BasicMemoryChunk, heap_));
STATIC_ASSERT(BasicMemoryChunk::kHeaderSentinelOffset ==
offsetof(BasicMemoryChunk, header_sentinel_));
STATIC_ASSERT(BasicMemoryChunk::kOldToNewSlotSetOffset ==
offsetof(BasicMemoryChunk, slot_set_));
};
......
......@@ -6148,8 +6148,6 @@ Code Heap::GcSafeFindCodeForInnerPointer(Address inner_pointer) {
return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
}
DCHECK(code_space()->Contains(inner_pointer));
// Iterate through the page until we reach the end or find an object starting
// after the inner pointer.
Page* page = Page::FromAddress(inner_pointer);
......
......@@ -160,10 +160,6 @@ bool NewSpace::ToSpaceContainsSlow(Address a) {
bool NewSpace::ToSpaceContains(Object o) { return to_space_.Contains(o); }
bool NewSpace::FromSpaceContains(Object o) { return from_space_.Contains(o); }
bool PagedSpace::Contains(Address addr) {
return MemoryChunk::FromAnyPointerAddress(addr)->owner() == this;
}
bool PagedSpace::Contains(Object o) {
if (!o.IsHeapObject()) return false;
return Page::FromAddress(o.ptr())->owner() == this;
......@@ -201,13 +197,6 @@ bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
return false;
}
MemoryChunk* MemoryChunk::FromAnyPointerAddress(Address addr) {
while (!HasHeaderSentinel(addr)) {
addr = BaseAddress(addr) - 1;
}
return FromAddress(addr);
}
void MemoryChunk::IncrementExternalBackingStoreBytes(
ExternalBackingStoreType type, size_t amount) {
base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
......
......@@ -692,7 +692,6 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
MemoryChunk* chunk = FromAddress(base);
DCHECK_EQ(base, chunk->address());
new (chunk) BasicMemoryChunk(size, area_start, area_end);
DCHECK(HasHeaderSentinel(area_start));
chunk->heap_ = heap;
chunk->set_owner(owner);
......@@ -803,15 +802,6 @@ LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk,
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(chunk->area_start(), chunk->area_size());
// Initialize the sentinel value for each page boundary since the mutator
// may initialize the object starting from its end.
Address sentinel = chunk->address() + MemoryChunk::kHeaderSentinelOffset +
MemoryChunk::kPageSize;
while (sentinel < chunk->area_end()) {
*reinterpret_cast<intptr_t*>(sentinel) = kNullAddress;
sentinel += MemoryChunk::kPageSize;
}
LargePage* page = static_cast<LargePage*>(chunk);
page->SetFlag(MemoryChunk::LARGE_PAGE);
page->list_node().Initialize();
......
......@@ -641,8 +641,6 @@ class MemoryChunk : public BasicMemoryChunk {
void SetOldGenerationPageFlags(bool is_marking);
void SetYoungGenerationPageFlags(bool is_marking);
static inline MemoryChunk* FromAnyPointerAddress(Address addr);
static inline void UpdateHighWaterMark(Address mark) {
if (mark == kNullAddress) return;
// Need to subtract one from the mark because when a chunk is full the
......
......@@ -6350,7 +6350,7 @@ HEAP_TEST(Regress5831) {
for (int i = 0; i < kMaxIterations; i++) {
Handle<Code> code = GenerateDummyImmovableCode(isolate);
array = FixedArray::SetAndGrow(isolate, array, i, code);
CHECK(heap->code_space()->Contains(code->address()) ||
CHECK(heap->code_space()->Contains(*code) ||
heap->code_lo_space()->Contains(*code));
if (heap->code_lo_space()->Contains(*code)) {
overflowed_into_lospace = true;
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment