Commit 2480d148 authored by Wenyu Zhao's avatar Wenyu Zhao Committed by V8 LUCI CQ

Prevent memory chunk access for TPH

Bug: v8:11641
Change-Id: I675b6968219a315a4b6f4bf1899d81931b5b4e0e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2875316
Auto-Submit: Wenyu Zhao <wenyu.zhao@anu.edu.au>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74405}
parent 4ce88f56
......@@ -390,6 +390,8 @@ DEFINE_NEG_IMPLICATION(enable_third_party_heap, concurrent_inlining)
DEFINE_NEG_IMPLICATION(enable_third_party_heap,
finalize_streaming_on_background)
DEFINE_NEG_IMPLICATION(enable_third_party_heap, use_marking_progress_bar)
DEFINE_NEG_IMPLICATION(enable_third_party_heap, move_object_start)
DEFINE_NEG_IMPLICATION(enable_third_party_heap, concurrent_marking)
DEFINE_BOOL_READONLY(enable_third_party_heap, V8_ENABLE_THIRD_PARTY_HEAP_BOOL,
"Use third-party heap")
......
......@@ -299,11 +299,13 @@ class BasicMemoryChunk {
// Only works if the pointer is in the first kPageSize of the MemoryChunk.
static BasicMemoryChunk* FromAddress(Address a) {
DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
return reinterpret_cast<BasicMemoryChunk*>(BaseAddress(a));
}
// Only works if the object is in the first kPageSize of the MemoryChunk.
static BasicMemoryChunk* FromHeapObject(HeapObject o) {
DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
return reinterpret_cast<BasicMemoryChunk*>(BaseAddress(o.ptr()));
}
......
......@@ -382,12 +382,14 @@ void Heap::RegisterExternalString(String string) {
void Heap::FinalizeExternalString(String string) {
DCHECK(string.IsExternalString());
Page* page = Page::FromHeapObject(string);
ExternalString ext_string = ExternalString::cast(string);
page->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kExternalString,
ext_string.ExternalPayloadSize());
if (!FLAG_enable_third_party_heap) {
Page* page = Page::FromHeapObject(string);
page->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kExternalString,
ext_string.ExternalPayloadSize());
}
ext_string.DisposeResource(isolate());
}
......
......@@ -2622,6 +2622,8 @@ bool Heap::ExternalStringTable::Contains(String string) {
void Heap::UpdateExternalString(String string, size_t old_payload,
size_t new_payload) {
DCHECK(string.IsExternalString());
if (FLAG_enable_third_party_heap) return;
Page* page = Page::FromHeapObject(string);
if (old_payload > new_payload) {
......@@ -6131,14 +6133,14 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
bool SkipObject(HeapObject object) override {
if (object.IsFreeSpaceOrFiller()) return true;
BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(object);
Address chunk = object.ptr() & ~kLogicalChunkAlignmentMask;
if (reachable_.count(chunk) == 0) return true;
return reachable_[chunk]->count(object) == 0;
}
private:
bool MarkAsReachable(HeapObject object) {
BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(object);
Address chunk = object.ptr() & ~kLogicalChunkAlignmentMask;
if (reachable_.count(chunk) == 0) {
reachable_[chunk] = new std::unordered_set<HeapObject, Object::Hasher>();
}
......@@ -6147,6 +6149,12 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
return true;
}
static constexpr intptr_t kLogicalChunkAlignment =
(static_cast<uintptr_t>(1) << kPageSizeBits);
static constexpr intptr_t kLogicalChunkAlignmentMask =
kLogicalChunkAlignment - 1;
class MarkingVisitor : public ObjectVisitor, public RootVisitor {
public:
explicit MarkingVisitor(UnreachableObjectsFilter* filter)
......@@ -6229,8 +6237,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
Heap* heap_;
DISALLOW_GARBAGE_COLLECTION(no_gc_)
std::unordered_map<BasicMemoryChunk*,
std::unordered_set<HeapObject, Object::Hasher>*>
std::unordered_map<Address, std::unordered_set<HeapObject, Object::Hasher>*>
reachable_;
};
......
......@@ -32,6 +32,7 @@ class LargePage : public MemoryChunk {
static const int kMaxCodePageSize = 512 * MB;
static LargePage* FromHeapObject(HeapObject o) {
DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
return static_cast<LargePage*>(MemoryChunk::FromHeapObject(o));
}
......
......@@ -221,9 +221,11 @@ class Page : public MemoryChunk {
// from [page_addr .. page_addr + kPageSize[. This only works if the object
// is in fact in a page.
static Page* FromAddress(Address addr) {
DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
return reinterpret_cast<Page*>(addr & ~kPageAlignmentMask);
}
static Page* FromHeapObject(HeapObject o) {
DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
return reinterpret_cast<Page*>(o.ptr() & ~kAlignmentMask);
}
......@@ -232,6 +234,7 @@ class Page : public MemoryChunk {
// we subtract a hole word. The valid address ranges from
// [page_addr + area_start_ .. page_addr + kPageSize + kTaggedSize].
static Page* FromAllocationAreaAddress(Address address) {
DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
return Page::FromAddress(address - kTaggedSize);
}
......
......@@ -70,6 +70,7 @@ TEST(WeakReferencesBasic) {
}
TEST(WeakReferencesOldToOld) {
if (FLAG_enable_third_party_heap) return;
// Like WeakReferencesBasic, but the updated weak slot is in the old space,
// and referring to an old space object.
ManualGCScope manual_gc_scope;
......
......@@ -3881,6 +3881,7 @@ TEST(ContextIsolation) {
// Tests that when a native context that's being filtered is moved, we continue
// to track its execution.
TEST(ContextFilterMovedNativeContext) {
if (i::FLAG_enable_third_party_heap) return;
i::FLAG_allow_natives_syntax = true;
i::FLAG_manual_evacuation_candidates_selection = true;
LocalContext env;
......
......@@ -1837,6 +1837,7 @@ TEST(NativeSnapshotObjectId) {
}
TEST(NativeSnapshotObjectIdMoving) {
if (i::FLAG_enable_third_party_heap) return;
// Required to allow moving specific objects.
i::FLAG_manual_evacuation_candidates_selection = true;
......
......@@ -286,6 +286,7 @@ bool SequentialUnmapperTest::old_flag_;
// See v8:5945.
TEST_F(SequentialUnmapperTest, UnmapOnTeardownAfterAlreadyFreeingPooled) {
if (FLAG_enable_third_party_heap) return;
Page* page = allocator()->AllocatePage(
MemoryChunkLayout::AllocatableMemoryInDataPage(),
static_cast<PagedSpace*>(heap()->old_space()),
......@@ -314,6 +315,7 @@ TEST_F(SequentialUnmapperTest, UnmapOnTeardownAfterAlreadyFreeingPooled) {
// See v8:5945.
TEST_F(SequentialUnmapperTest, UnmapOnTeardown) {
if (FLAG_enable_third_party_heap) return;
Page* page = allocator()->AllocatePage(
MemoryChunkLayout::AllocatableMemoryInDataPage(),
static_cast<PagedSpace*>(heap()->old_space()),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment