Commit 2b79eefe authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

Reland^2: [heap] Store size with invalidated object

This is a reland of commit 23b2d571

When updating pointers during a full GC, a page might not be swept
already. In such cases there might be invalid objects and slots
recorded in free memory. Updating tagged slots in free memory is fine
even though not strictly necessary.

However, the GC also needs to calculate the size of potentially dead
invalid objects in order to be able to check whether a slot is within
that object. But since that object is dead, its map might be dead as
well which makes size calculation impossible on such objects. The CL
changes this to cache the size of invalid objects. A follow-up CL will
also check the marking bit of invalid objects.

Reason for reverts:

Revert #2: In-object slack tracking on JSObjects doesn't update the
cached size of invalidated objects. The fix here was to stop
invalidating recorded slots on JSObjects at all and avoid that problem
completely (see https://crrev.com/c/3620274).

Revert #1: Not all size changes go through NotifyObjectLayoutChange, so
https://crrev.com/c/3607992 introduced NotifyObjectSizeChange as a
bottleneck for object size changes/right-trimming. This method is
now used to update the size of invalidated objects.

Bug: v8:12578, chromium:1316289
Change-Id: I0478d04601c0270ddb39419ca6cf98719951eb4d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3623542Reviewed-by: 's avatarJakob Linke <jgruber@chromium.org>
Reviewed-by: 's avatarPatrick Thier <pthier@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80344}
parent 39d0c5e7
......@@ -13,6 +13,7 @@
#include "src/diagnostics/disasm.h"
#include "src/execution/frames.h"
#include "src/execution/isolate.h"
#include "src/heap/heap.h"
#include "src/numbers/conversions.h"
#include "src/objects/arguments.h"
#include "src/objects/heap-number-inl.h"
......@@ -1850,7 +1851,9 @@ void TranslatedState::InitializeJSObjectAt(
CHECK_GE(children_count, 2);
// Notify the concurrent marker about the layout change.
isolate()->heap()->NotifyObjectLayoutChange(*object_storage, no_gc);
isolate()->heap()->NotifyObjectLayoutChange(
*object_storage, no_gc, InvalidateRecordedSlots::kYes,
slot->GetChildrenCount() * kTaggedSize);
// Fill the property array field.
{
......@@ -1902,7 +1905,9 @@ void TranslatedState::InitializeObjectWithTaggedFieldsAt(
}
// Notify the concurrent marker about the layout change.
isolate()->heap()->NotifyObjectLayoutChange(*object_storage, no_gc);
isolate()->heap()->NotifyObjectLayoutChange(
*object_storage, no_gc, InvalidateRecordedSlots::kYes,
slot->GetChildrenCount() * kTaggedSize);
// Write the fields to the object.
for (int i = 1; i < children_count; i++) {
......
......@@ -3532,8 +3532,9 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
#ifdef DEBUG
if (MayContainRecordedSlots(object)) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object));
DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_SHARED>(object));
}
#endif
......@@ -3950,22 +3951,24 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
void Heap::NotifyObjectLayoutChange(
HeapObject object, const DisallowGarbageCollection&,
InvalidateRecordedSlots invalidate_recorded_slots) {
InvalidateRecordedSlots invalidate_recorded_slots, int new_size) {
DCHECK_IMPLIES(invalidate_recorded_slots == InvalidateRecordedSlots::kYes,
new_size > 0);
if (incremental_marking()->IsMarking()) {
incremental_marking()->MarkBlackAndVisitObjectDueToLayoutChange(object);
if (incremental_marking()->IsCompacting() &&
invalidate_recorded_slots == InvalidateRecordedSlots::kYes &&
MayContainRecordedSlots(object)) {
MemoryChunk::FromHeapObject(object)
->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object);
->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object, new_size);
}
}
if (invalidate_recorded_slots == InvalidateRecordedSlots::kYes &&
MayContainRecordedSlots(object)) {
MemoryChunk::FromHeapObject(object)
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object);
->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object, new_size);
MemoryChunk::FromHeapObject(object)
->RegisterObjectWithInvalidatedSlots<OLD_TO_SHARED>(object);
->RegisterObjectWithInvalidatedSlots<OLD_TO_SHARED>(object, new_size);
}
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
......@@ -3980,6 +3983,8 @@ void Heap::NotifyObjectSizeChange(HeapObject object, int old_size, int new_size,
DCHECK_LE(new_size, old_size);
if (new_size == old_size) return;
UpdateInvalidatedObjectSize(object, new_size);
const bool is_background = LocalHeap::Current() != nullptr;
DCHECK_IMPLIES(is_background,
clear_recorded_slots == ClearRecordedSlots::kNo);
......@@ -3996,6 +4001,20 @@ void Heap::NotifyObjectSizeChange(HeapObject object, int old_size, int new_size,
clear_recorded_slots, verify_no_slots_recorded);
}
void Heap::UpdateInvalidatedObjectSize(HeapObject object, int new_size) {
if (!MayContainRecordedSlots(object)) return;
if (incremental_marking()->IsCompacting()) {
MemoryChunk::FromHeapObject(object)
->UpdateInvalidatedObjectSize<OLD_TO_OLD>(object, new_size);
}
MemoryChunk::FromHeapObject(object)->UpdateInvalidatedObjectSize<OLD_TO_NEW>(
object, new_size);
MemoryChunk::FromHeapObject(object)
->UpdateInvalidatedObjectSize<OLD_TO_SHARED>(object, new_size);
}
#ifdef VERIFY_HEAP
// Helper class for collecting slot addresses.
class SlotCollectingVisitor final : public ObjectVisitor {
......@@ -4665,11 +4684,35 @@ void Heap::Verify() {
if (new_lo_space_) new_lo_space_->Verify(isolate());
isolate()->string_table()->VerifyIfOwnedBy(isolate());
VerifyInvalidatedObjectSize();
#if DEBUG
VerifyCommittedPhysicalMemory();
#endif // DEBUG
}
namespace {
void VerifyInvalidatedSlots(InvalidatedSlots* invalidated_slots) {
if (!invalidated_slots) return;
for (std::pair<HeapObject, int> object_and_size : *invalidated_slots) {
HeapObject object = object_and_size.first;
int size = object_and_size.second;
CHECK_EQ(object.Size(), size);
}
}
} // namespace
void Heap::VerifyInvalidatedObjectSize() {
OldGenerationMemoryChunkIterator chunk_iterator(this);
MemoryChunk* chunk;
while ((chunk = chunk_iterator.next()) != nullptr) {
VerifyInvalidatedSlots(chunk->invalidated_slots<OLD_TO_NEW>());
VerifyInvalidatedSlots(chunk->invalidated_slots<OLD_TO_OLD>());
VerifyInvalidatedSlots(chunk->invalidated_slots<OLD_TO_SHARED>());
}
}
void Heap::VerifyReadOnlyHeap() {
CHECK(!read_only_space_->writable());
read_only_space_->Verify(isolate());
......
......@@ -1127,8 +1127,7 @@ class Heap {
// manually.
void NotifyObjectLayoutChange(
HeapObject object, const DisallowGarbageCollection&,
InvalidateRecordedSlots invalidate_recorded_slots =
InvalidateRecordedSlots::kYes);
InvalidateRecordedSlots invalidate_recorded_slots, int new_size = 0);
// The runtime uses this function to inform the GC of object size changes. The
// GC will fill this area with a filler object and might clear recorded slots
......@@ -1603,6 +1602,9 @@ class Heap {
// created.
void VerifyReadOnlyHeap();
void VerifyRememberedSetFor(HeapObject object);
// Verify that cached size of invalidated object is up-to-date.
void VerifyInvalidatedObjectSize();
#endif
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
......@@ -1819,6 +1821,9 @@ class Heap {
V8_EXPORT_PRIVATE void ZapCodeObject(Address start_address,
int size_in_bytes);
// Updates invalidated object size in all remembered sets.
void UpdateInvalidatedObjectSize(HeapObject object, int new_size);
enum class VerifyNoSlotsRecorded { kYes, kNo };
// This method is used by the sweeper on free memory ranges to make the page
......
......@@ -28,22 +28,18 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
NextInvalidatedObject();
}
HeapObject invalidated_object = HeapObject::FromAddress(invalidated_start_);
if (invalidated_size_ == 0) {
DCHECK(MarkCompactCollector::IsMapOrForwarded(invalidated_object.map()));
invalidated_size_ = invalidated_object.Size();
}
int offset = static_cast<int>(slot - invalidated_start_);
// OLD_TO_OLD can have slots in map word unlike other remembered sets.
DCHECK_GE(offset, 0);
DCHECK_IMPLIES(remembered_set_type_ != OLD_TO_OLD, offset > 0);
if (offset < invalidated_size_)
return offset == 0 ||
invalidated_object.IsValidSlot(invalidated_object.map(), offset);
if (offset < invalidated_size_) {
if (offset == 0) return true;
HeapObject invalidated_object = HeapObject::FromAddress(invalidated_start_);
DCHECK(MarkCompactCollector::IsMapOrForwarded(invalidated_object.map()));
return invalidated_object.IsValidSlot(invalidated_object.map(), offset);
}
NextInvalidatedObject();
return true;
......@@ -51,12 +47,14 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
void InvalidatedSlotsFilter::NextInvalidatedObject() {
invalidated_start_ = next_invalidated_start_;
invalidated_size_ = 0;
invalidated_size_ = next_invalidated_size_;
if (iterator_ == iterator_end_) {
next_invalidated_start_ = sentinel_;
next_invalidated_size_ = 0;
} else {
next_invalidated_start_ = iterator_->address();
next_invalidated_start_ = iterator_->first.address();
next_invalidated_size_ = iterator_->second;
iterator_++;
}
}
......@@ -87,7 +85,7 @@ void InvalidatedSlotsCleanup::Free(Address free_start, Address free_end) {
void InvalidatedSlotsCleanup::NextInvalidatedObject() {
if (iterator_ != iterator_end_) {
invalidated_start_ = iterator_->address();
invalidated_start_ = iterator_->first.address();
} else {
invalidated_start_ = sentinel_;
}
......
......@@ -21,7 +21,7 @@ namespace internal {
// that potentially invalidates slots recorded concurrently. The second part
// of each element is the size of the corresponding object before the layout
// change.
using InvalidatedSlots = std::set<HeapObject, Object::Comparer>;
using InvalidatedSlots = std::map<HeapObject, int, Object::Comparer>;
// This class provides IsValid predicate that takes into account the set
// of invalidated objects in the given memory chunk.
......@@ -45,9 +45,10 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
InvalidatedSlots::const_iterator iterator_;
InvalidatedSlots::const_iterator iterator_end_;
Address sentinel_;
Address invalidated_start_;
Address next_invalidated_start_;
int invalidated_size_;
Address invalidated_start_{kNullAddress};
Address next_invalidated_start_{kNullAddress};
int invalidated_size_{0};
int next_invalidated_size_{0};
InvalidatedSlots empty_;
#ifdef DEBUG
Address last_slot_;
......
......@@ -370,14 +370,17 @@ void MemoryChunk::ReleaseInvalidatedSlots() {
}
template V8_EXPORT_PRIVATE void
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object);
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object,
int new_size);
template V8_EXPORT_PRIVATE void
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object);
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object,
int new_size);
template V8_EXPORT_PRIVATE void MemoryChunk::RegisterObjectWithInvalidatedSlots<
OLD_TO_SHARED>(HeapObject object);
OLD_TO_SHARED>(HeapObject object, int new_size);
template <RememberedSetType type>
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object) {
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object,
int new_size) {
DCHECK(!object.IsJSReceiver());
bool skip_slot_recording;
......@@ -406,13 +409,43 @@ void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object) {
AllocateInvalidatedSlots<type>();
}
invalidated_slots<type>()->insert(object);
DCHECK_GT(new_size, 0);
InvalidatedSlots& invalidated_slots = *this->invalidated_slots<type>();
DCHECK_IMPLIES(invalidated_slots[object] > 0,
new_size <= invalidated_slots[object]);
invalidated_slots.insert_or_assign(object, new_size);
}
template V8_EXPORT_PRIVATE void
MemoryChunk::UpdateInvalidatedObjectSize<OLD_TO_NEW>(HeapObject object,
int new_size);
template V8_EXPORT_PRIVATE void
MemoryChunk::UpdateInvalidatedObjectSize<OLD_TO_OLD>(HeapObject object,
int new_size);
template V8_EXPORT_PRIVATE void
MemoryChunk::UpdateInvalidatedObjectSize<OLD_TO_SHARED>(HeapObject object,
int new_size);
template <RememberedSetType type>
void MemoryChunk::UpdateInvalidatedObjectSize(HeapObject object, int new_size) {
DCHECK_GT(new_size, 0);
if (invalidated_slots<type>() == nullptr) return;
InvalidatedSlots& invalidated_slots = *this->invalidated_slots<type>();
DCHECK_IMPLIES(invalidated_slots[object] > 0,
new_size <= invalidated_slots[object]);
if (invalidated_slots.count(object) > 0) {
invalidated_slots.insert_or_assign(object, new_size);
}
}
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(
HeapObject object);
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(
HeapObject object);
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_SHARED>(
HeapObject object);
template <RememberedSetType type>
bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject object) {
......
......@@ -143,7 +143,11 @@ class MemoryChunk : public BasicMemoryChunk {
template <RememberedSetType type>
void ReleaseInvalidatedSlots();
template <RememberedSetType type>
V8_EXPORT_PRIVATE void RegisterObjectWithInvalidatedSlots(HeapObject object);
V8_EXPORT_PRIVATE void RegisterObjectWithInvalidatedSlots(HeapObject object,
int new_size);
template <RememberedSetType type>
V8_EXPORT_PRIVATE void UpdateInvalidatedObjectSize(HeapObject object,
int new_size);
template <RememberedSetType type>
bool RegisteredObjectWithInvalidatedSlots(HeapObject object);
template <RememberedSetType type>
......
......@@ -395,11 +395,6 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
bool is_internalized = this->IsInternalizedString();
bool has_pointers = StringShape(*this).IsIndirect();
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(*this, no_gc,
InvalidateRecordedSlots::kYes);
}
base::SharedMutexGuard<base::kExclusive> shared_mutex_guard(
isolate->internalized_string_access());
// Morph the string to an external string by replacing the map and
......@@ -423,6 +418,12 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
// Byte size of the external String object.
int new_size = this->SizeFromMap(new_map);
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(
*this, no_gc, InvalidateRecordedSlots::kYes, new_size);
}
if (!isolate->heap()->IsLargeObject(*this)) {
isolate->heap()->NotifyObjectSizeChange(
*this, size, new_size,
......@@ -479,11 +480,6 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
bool is_internalized = this->IsInternalizedString();
bool has_pointers = StringShape(*this).IsIndirect();
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(*this, no_gc,
InvalidateRecordedSlots::kYes);
}
base::SharedMutexGuard<base::kExclusive> shared_mutex_guard(
isolate->internalized_string_access());
// Morph the string to an external string by replacing the map and
......@@ -508,6 +504,11 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
// Byte size of the external String object.
int new_size = this->SizeFromMap(new_map);
if (has_pointers) {
isolate->heap()->NotifyObjectLayoutChange(
*this, no_gc, InvalidateRecordedSlots::kYes, new_size);
}
isolate->heap()->NotifyObjectSizeChange(
*this, size, new_size,
has_pointers ? ClearRecordedSlots::kYes : ClearRecordedSlots::kNo);
......
......@@ -234,13 +234,15 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithInvalidatedSlots) {
}
}
// First object is going to be evacuated.
HeapObject front_object = *compaction_page_handles.front();
to_be_aborted_page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
*compaction_page_handles.front());
front_object, front_object.Size());
// Last object is NOT going to be evacuated.
// This happens since not all objects fit on the only other page in the
// old space, the GC isn't allowed to allocate another page.
HeapObject back_object = *compaction_page_handles.back();
to_be_aborted_page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(
*compaction_page_handles.back());
back_object, back_object.Size());
to_be_aborted_page->SetFlag(
MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
......
......@@ -71,7 +71,9 @@ HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register every second byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i += 2) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
byte_array.Size());
}
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
......@@ -96,7 +98,9 @@ HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
byte_array.Size());
}
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
......@@ -117,16 +121,18 @@ HEAP_TEST(InvalidatedSlotsAfterTrimming) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(
byte_arrays[i], ByteArray::kHeaderSize);
}
// Trim byte arrays and check that the slots outside the byte arrays are
// considered invalid if the old space page was swept.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array.address() + ByteArray::kHeaderSize;
Address end = byte_array.address() + byte_array.Size();
heap->RightTrimFixedArray(byte_array, byte_array.length());
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK_EQ(filter.IsValid(addr), page->SweepingDone());
}
......@@ -144,7 +150,9 @@ HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
// This should be no-op because the page is marked as evacuation
// candidate.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
byte_array.Size());
}
// All slots must still be valid.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
......@@ -168,7 +176,9 @@ HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0].length() - 8);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_array,
byte_array.Size());
}
// All slots must still be invalid.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
......@@ -361,7 +371,9 @@ HEAP_TEST(InvalidatedSlotsCleanupFull) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_array,
byte_array.Size());
}
// Mark full page as free
......@@ -380,7 +392,9 @@ HEAP_TEST(InvalidatedSlotsCleanupEachObject) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
ByteArray byte_array = byte_arrays[i];
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_array,
byte_array.Size());
}
// Mark each object as free on page
......@@ -407,7 +421,8 @@ HEAP_TEST(InvalidatedSlotsCleanupRightTrim) {
ByteArray& invalidated = byte_arrays[1];
heap->RightTrimFixedArray(invalidated, invalidated.length() - 8);
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated);
page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated,
invalidated.Size());
// Free memory at end of invalidated object
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment