Commit 8e8fe475 authored by Steve Blackburn's avatar Steve Blackburn Committed by Commit Bot

[heap] Introduce third-party heap interface

This CL adds build flags for pluging in third-party heap implementation.
Additionally it redirects allocation requests when the flags are on.

Bug: v8:9533

Change-Id: I7ef300ca9dc2b5f498a13211611ae4b4b3df8fa0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1928860
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65114}
parent caf005e8
......@@ -181,6 +181,15 @@ declare_args() {
# Enable lazy source positions by default.
v8_enable_lazy_source_positions = true
# Enable third party HEAP library
v8_enable_third_party_heap = false
# Libaries used by third party heap
v8_third_party_heap_libs = []
# Source code used by third party heap
v8_third_party_heap_files = []
# Disable write barriers when GCs are non-incremental and
# heap has single generation.
v8_disable_write_barriers = false
......@@ -436,6 +445,9 @@ config("features") {
if (v8_disable_write_barriers) {
defines += [ "V8_DISABLE_WRITE_BARRIERS" ]
}
if (v8_enable_third_party_heap) {
defines += [ "V8_ENABLE_THIRD_PARTY_HEAP" ]
}
if (v8_use_external_startup_data) {
defines += [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
}
......@@ -2940,6 +2952,10 @@ v8_source_set("v8_base_without_compiler") {
"src/zone/zone.h",
]
if (v8_enable_third_party_heap) {
sources += v8_third_party_heap_files
}
if (v8_check_header_includes) {
# This file will be generated by tools/generate-header-include-checks.py
# if the "check_v8_header_includes" gclient variable is set.
......@@ -3289,13 +3305,19 @@ v8_source_set("v8_base_without_compiler") {
deps += [ ":postmortem-metadata" ]
}
libs = []
if (v8_enable_third_party_heap) {
libs += v8_third_party_heap_libs
}
# Platforms that don't have CAS support need to link atomic library
# to implement atomic memory access
if (v8_current_cpu == "mips" || v8_current_cpu == "mipsel" ||
v8_current_cpu == "mips64" || v8_current_cpu == "mips64el" ||
v8_current_cpu == "ppc" || v8_current_cpu == "ppc64" ||
v8_current_cpu == "s390" || v8_current_cpu == "s390x") {
libs = [ "atomic" ]
libs += [ "atomic" ]
}
}
......
......@@ -23,27 +23,32 @@ inline Isolate* GetIsolateForPtrCompr(HeapObject object) {
}
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
#ifdef V8_COMPRESS_POINTERS
#if defined V8_COMPRESS_POINTERS || defined V8_ENABLE_THIRD_PARTY_HEAP
return GetIsolateFromWritableObject(object)->heap();
#else
heap_internals::MemoryChunk* chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
return chunk->GetHeap();
#endif // V8_COMPRESS_POINTERS
#endif // V8_COMPRESS_POINTERS || V8_ENABLE_THIRD_PARTY_HEAP
}
V8_INLINE Isolate* GetIsolateFromWritableObject(HeapObject object) {
#ifdef V8_COMPRESS_POINTERS
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
return Heap::GetIsolateFromWritableObject(object);
#elif defined V8_COMPRESS_POINTERS
Isolate* isolate = Isolate::FromRoot(GetIsolateRoot(object.ptr()));
DCHECK_NOT_NULL(isolate);
return isolate;
#else
return Isolate::FromHeap(GetHeapFromWritableObject(object));
#endif // V8_COMPRESS_POINTERS
#endif // V8_COMPRESS_POINTERS, V8_ENABLE_THIRD_PARTY_HEAP
}
V8_INLINE bool GetIsolateFromHeapObject(HeapObject object, Isolate** isolate) {
#ifdef V8_COMPRESS_POINTERS
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
*isolate = Heap::GetIsolateFromWritableObject(object);
return true;
#elif defined V8_COMPRESS_POINTERS
*isolate = GetIsolateFromWritableObject(object);
return true;
#else
......@@ -55,7 +60,7 @@ V8_INLINE bool GetIsolateFromHeapObject(HeapObject object, Isolate** isolate) {
}
*isolate = Isolate::FromHeap(chunk->GetHeap());
return true;
#endif // V8_COMPRESS_POINTERS
#endif // V8_COMPRESS_POINTERS, V8_ENABLE_THIRD_PARTY_HEAP
}
} // namespace internal
......
......@@ -313,6 +313,15 @@ DEFINE_IMPLICATION(lite_mode, jitless)
DEFINE_IMPLICATION(lite_mode, lazy_feedback_allocation)
DEFINE_IMPLICATION(lite_mode, optimize_for_size)
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
#define V8_ENABLE_THIRD_PARTY_HEAP_BOOL true
#else
#define V8_ENABLE_THIRD_PARTY_HEAP_BOOL false
#endif
DEFINE_BOOL_READONLY(enable_third_party_heap, V8_ENABLE_THIRD_PARTY_HEAP_BOOL,
"Use third-party heap")
#ifdef V8_DISABLE_WRITE_BARRIERS
#define V8_DISABLE_WRITE_BARRIERS_BOOL true
#else
......
......@@ -149,7 +149,8 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
if (is_executable_) {
DCHECK(IsAligned(code->address(), kCodeAlignment));
DCHECK_IMPLIES(
!heap->memory_allocator()->code_range().is_empty(),
!V8_ENABLE_THIRD_PARTY_HEAP_BOOL &&
!heap->memory_allocator()->code_range().is_empty(),
heap->memory_allocator()->code_range().contains(code->address()));
}
......@@ -1894,7 +1895,8 @@ Map Factory::InitializeMap(Map map, InstanceType type, int instance_size,
map.set_constructor_or_backpointer(*null_value(), SKIP_WRITE_BARRIER);
map.set_instance_size(instance_size);
if (map.IsJSObjectMap()) {
DCHECK(!ReadOnlyHeap::Contains(map));
DCHECK_IMPLIES(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL,
!ReadOnlyHeap::Contains(map));
map.SetInObjectPropertiesStartInWords(instance_size / kTaggedSize -
inobject_properties);
DCHECK_EQ(map.GetInObjectProperties(), inobject_properties);
......@@ -2612,8 +2614,7 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
// The trampoline code object must inherit specific flags from the original
// builtin (e.g. the safepoint-table offset). We set them manually here.
{
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*result);
CodePageMemoryModificationScope code_allocation(chunk);
CodePageMemoryModificationScope code_allocation(*result);
const bool set_is_off_heap_trampoline = true;
const int stack_slots =
......
......@@ -12,6 +12,7 @@
// write barrier here!
#include "src/heap/heap-write-barrier.h"
#include "src/heap/heap.h"
#include "src/heap/third-party/heap-api.h"
#include "src/base/atomic-utils.h"
#include "src/base/platform/platform.h"
......@@ -194,33 +195,38 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
} else {
allocation = new_space_->AllocateRaw(size_in_bytes, alignment, origin);
}
} else if (AllocationType::kOld == type) {
} else if (AllocationType::kOld == type && !V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
if (large_object) {
allocation = lo_space_->AllocateRaw(size_in_bytes);
} else {
allocation = old_space_->AllocateRaw(size_in_bytes, alignment, origin);
}
} else if (AllocationType::kCode == type) {
if (size_in_bytes <= code_space()->AreaSize() && !large_object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
allocation = tp_heap_->AllocateCode(size_in_bytes);
} else if (size_in_bytes <= code_space()->AreaSize() && !large_object) {
allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
} else {
allocation = code_lo_space_->AllocateRaw(size_in_bytes);
}
} else if (AllocationType::kMap == type) {
} else if (AllocationType::kMap == type && !V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
} else if (AllocationType::kReadOnly == type) {
} else if (AllocationType::kReadOnly == type &&
!V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
DCHECK(isolate_->serializer_enabled());
DCHECK(!large_object);
DCHECK(CanAllocateInReadOnlySpace());
DCHECK_EQ(AllocationOrigin::kRuntime, origin);
allocation =
read_only_space_->AllocateRaw(size_in_bytes, alignment, origin);
} else if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
allocation = tp_heap_->Allocate(size_in_bytes);
} else {
UNREACHABLE();
}
if (allocation.To(&object)) {
if (AllocationType::kCode == type) {
if (AllocationType::kCode == type && !V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
// Unprotect the memory chunk of the object if it was not unprotected
// already.
UnprotectAndRegisterMemoryChunk(object);
......@@ -356,6 +362,7 @@ bool Heap::InYoungGeneration(MaybeObject object) {
// static
bool Heap::InYoungGeneration(HeapObject heap_object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return false;
bool result = MemoryChunk::FromHeapObject(heap_object)->InYoungGeneration();
#ifdef DEBUG
// If in the young generation, then check we're either not in the middle of
......@@ -408,6 +415,9 @@ bool Heap::InOldSpace(Object object) { return old_space_->Contains(object); }
// static
Heap* Heap::FromWritableHeapObject(HeapObject obj) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
return Heap::GetIsolateFromWritableObject(obj)->heap();
}
MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
// RO_SPACE can be shared between heaps, so we can't use RO_SPACE objects to
// find a heap. The exception is when the ReadOnlySpace is writeable, during
......@@ -640,6 +650,14 @@ CodePageCollectionMemoryModificationScope::
}
}
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
CodePageMemoryModificationScope::CodePageMemoryModificationScope(Code code)
: chunk_(nullptr), scope_active_(false) {}
#else
CodePageMemoryModificationScope::CodePageMemoryModificationScope(Code code)
: CodePageMemoryModificationScope(MemoryChunk::FromHeapObject(code)) {}
#endif
CodePageMemoryModificationScope::CodePageMemoryModificationScope(
MemoryChunk* chunk)
: chunk_(chunk),
......
......@@ -63,6 +63,7 @@ struct MemoryChunk {
V8_INLINE bool IsMarking() const { return GetFlags() & kMarkingBit; }
V8_INLINE bool InYoungGeneration() const {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return false;
constexpr uintptr_t kYoungGenerationMask = kFromPageBit | kToPageBit;
return GetFlags() & kYoungGenerationMask;
}
......@@ -142,6 +143,7 @@ inline void WriteBarrierForCode(Code host) {
inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
Object value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
DCHECK(!HasWeakHeapObjectTag(*slot));
DCHECK(!HasWeakHeapObjectTag(value));
if (!value.IsHeapObject()) return;
......@@ -151,6 +153,7 @@ inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
inline void GenerationalEphemeronKeyBarrier(EphemeronHashTable table,
ObjectSlot slot, Object value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
DCHECK(!HasWeakHeapObjectTag(*slot));
DCHECK(!HasWeakHeapObjectTag(value));
DCHECK(value.IsHeapObject());
......@@ -160,6 +163,7 @@ inline void GenerationalEphemeronKeyBarrier(EphemeronHashTable table,
inline void GenerationalBarrier(HeapObject object, MaybeObjectSlot slot,
MaybeObject value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
HeapObject value_heap_object;
if (!value->GetHeapObject(&value_heap_object)) return;
heap_internals::GenerationalBarrierInternal(object, slot.address(),
......@@ -168,6 +172,7 @@ inline void GenerationalBarrier(HeapObject object, MaybeObjectSlot slot,
inline void GenerationalBarrierForCode(Code host, RelocInfo* rinfo,
HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
heap_internals::MemoryChunk* object_chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
if (!object_chunk->InYoungGeneration()) return;
......@@ -175,6 +180,7 @@ inline void GenerationalBarrierForCode(Code host, RelocInfo* rinfo,
}
inline void MarkingBarrier(HeapObject object, ObjectSlot slot, Object value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
DCHECK_IMPLIES(slot.address() != kNullAddress, !HasWeakHeapObjectTag(*slot));
DCHECK(!HasWeakHeapObjectTag(value));
if (!value.IsHeapObject()) return;
......@@ -184,6 +190,7 @@ inline void MarkingBarrier(HeapObject object, ObjectSlot slot, Object value) {
inline void MarkingBarrier(HeapObject object, MaybeObjectSlot slot,
MaybeObject value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
HeapObject value_heap_object;
if (!value->GetHeapObject(&value_heap_object)) return;
heap_internals::MarkingBarrierInternal(object, slot.address(),
......@@ -192,6 +199,7 @@ inline void MarkingBarrier(HeapObject object, MaybeObjectSlot slot,
inline void MarkingBarrierForCode(Code host, RelocInfo* rinfo,
HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
DCHECK(!HasWeakHeapObjectTag(object));
heap_internals::MemoryChunk* object_chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
......@@ -202,6 +210,7 @@ inline void MarkingBarrierForCode(Code host, RelocInfo* rinfo,
inline void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject host,
HeapObject descriptor_array,
int number_of_own_descriptors) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
heap_internals::MemoryChunk* chunk =
heap_internals::MemoryChunk::FromHeapObject(descriptor_array);
if (!chunk->IsMarking()) return;
......
......@@ -81,6 +81,13 @@
namespace v8 {
namespace internal {
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) {
return reinterpret_cast<Isolate*>(
third_party_heap::Heap::GetIsolate(object.address()));
}
#endif
// These are outside the Heap class so they can be forward-declared
// in heap-write-barrier-inl.h.
bool Heap_PageFlagsAreConsistent(HeapObject object) {
......@@ -1058,8 +1065,7 @@ void Heap::ProcessPretenuringFeedback() {
}
void Heap::InvalidateCodeDeoptimizationData(Code code) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(code);
CodePageMemoryModificationScope modification_scope(chunk);
CodePageMemoryModificationScope modification_scope(code);
code.set_deoptimization_data(ReadOnlyRoots(this).empty_fixed_array());
}
......@@ -2859,7 +2865,8 @@ HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
(size / kTaggedSize) - 2);
}
}
if (clear_slots_mode == ClearRecordedSlots::kYes) {
if (clear_slots_mode == ClearRecordedSlots::kYes &&
!V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
ClearRecordedSlotRange(addr, addr + size);
}
......@@ -2884,6 +2891,12 @@ bool Heap::CanMoveObjectStart(HeapObject object) {
}
bool Heap::IsImmovable(HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
// TODO(steveblackburn): For now all objects are immovable.
// Will need to revisit once moving is supported.
return true;
}
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
return chunk->NeverEvacuate() || IsLargeObject(object);
}
......@@ -3849,6 +3862,9 @@ const char* Heap::GarbageCollectionReasonToString(
}
bool Heap::Contains(HeapObject value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
return true;
}
if (ReadOnlyHeap::Contains(value)) {
return false;
}
......@@ -4862,7 +4878,9 @@ HeapObject Heap::EnsureImmovableCode(HeapObject heap_object, int object_size) {
// in the first page of code space, in large object space, or (during
// snapshot creation) the containing page is marked as immovable.
DCHECK(!heap_object.is_null());
#ifndef V8_ENABLE_THIRD_PARTY_HEAP
DCHECK(code_space_->Contains(heap_object));
#endif
DCHECK_GE(object_size, 0);
if (!Heap::IsImmovable(heap_object)) {
if (isolate()->serializer_enabled() ||
......@@ -4966,6 +4984,11 @@ void Heap::SetUp() {
allocation_timeout_ = NextAllocationTimeout();
#endif
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
tp_heap_ =
third_party_heap::Heap::New(reinterpret_cast<v8::Isolate*>(isolate()));
#endif
// Initialize heap spaces and initial maps and objects.
//
// If the heap is not yet configured (e.g. through the API), configure it.
......@@ -6128,6 +6151,11 @@ Code Heap::GcSafeFindCodeForInnerPointer(Address inner_pointer) {
Code code = InstructionStream::TryLookupCode(isolate(), inner_pointer);
if (!code.is_null()) return code;
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
Address start = tp_heap_->GetObjectFromInnerPointer(inner_pointer);
return GcSafeCastToCode(HeapObject::FromAddress(start), inner_pointer);
}
// Check if the inner pointer points into a large object chunk.
LargePage* large_page = code_lo_space()->FindPage(inner_pointer);
if (large_page != nullptr) {
......@@ -6357,6 +6385,9 @@ void Heap::MarkingBarrierForDescriptorArraySlow(Heap* heap, HeapObject host,
}
bool Heap::PageFlagsAreConsistent(HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
return true;
}
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
heap_internals::MemoryChunk* slim_chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
......
......@@ -45,6 +45,10 @@ class HeapTester;
class TestMemoryAllocatorScope;
} // namespace heap
namespace third_party_heap {
class Heap;
}
class IncrementalMarking;
class BackingStore;
class JSArrayBuffer;
......@@ -1365,6 +1369,8 @@ class Heap {
// Calculates the nof entries for the full sized number to string cache.
inline int MaxNumberToStringCacheSize() const;
static Isolate* GetIsolateFromWritableObject(HeapObject object);
private:
using ExternalStringTableUpdaterCallback = String (*)(Heap* heap,
FullObjectSlot pointer);
......@@ -2084,6 +2090,8 @@ class Heap {
std::vector<HeapObjectAllocationTracker*> allocation_trackers_;
std::unique_ptr<third_party_heap::Heap> tp_heap_;
// Classes in "heap" can be friends.
friend class AlwaysAllocateScope;
friend class ArrayBufferCollector;
......@@ -2197,6 +2205,7 @@ class CodePageCollectionMemoryModificationScope {
class CodePageMemoryModificationScope {
public:
explicit inline CodePageMemoryModificationScope(MemoryChunk* chunk);
explicit inline CodePageMemoryModificationScope(Code object);
inline ~CodePageMemoryModificationScope();
private:
......
......@@ -163,6 +163,9 @@ bool NewSpace::ToSpaceContains(Object o) { return to_space_.Contains(o); }
bool NewSpace::FromSpaceContains(Object o) { return from_space_.Contains(o); }
bool PagedSpace::Contains(Address addr) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
return true;
}
return Page::FromAddress(addr)->owner() == this;
}
......@@ -205,14 +208,18 @@ bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
void MemoryChunk::IncrementExternalBackingStoreBytes(
ExternalBackingStoreType type, size_t amount) {
#ifndef V8_ENABLE_THIRD_PARTY_HEAP
base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
owner()->IncrementExternalBackingStoreBytes(type, amount);
#endif
}
void MemoryChunk::DecrementExternalBackingStoreBytes(
ExternalBackingStoreType type, size_t amount) {
#ifndef V8_ENABLE_THIRD_PARTY_HEAP
base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
owner()->DecrementExternalBackingStoreBytes(type, amount);
#endif
}
void MemoryChunk::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
......
......@@ -1535,6 +1535,7 @@ void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object) {
}
void MemoryChunk::InvalidateRecordedSlots(HeapObject object) {
if (V8_DISABLE_WRITE_BARRIERS_BOOL) return;
if (heap()->incremental_marking()->IsCompacting()) {
// We cannot check slot_set_[OLD_TO_OLD] here, since the
// concurrent markers might insert slots concurrently.
......@@ -3924,6 +3925,10 @@ void ReadOnlySpace::RepairFreeListsAfterDeserialization() {
}
void ReadOnlySpace::ClearStringPaddingIfNeeded() {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
// TODO(ulan): Revisit this once third-party heap supports iteration.
return;
}
if (is_string_padding_cleared_) return;
ReadOnlyHeapObjectIterator iterator(this);
......
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_THIRD_PARTY_HEAP_API_H_
#define V8_HEAP_THIRD_PARTY_HEAP_API_H_
#include "include/v8.h"
#include "src/heap/heap.h"
namespace v8 {
namespace internal {
namespace third_party_heap {
class Heap {
public:
static std::unique_ptr<Heap> New(v8::Isolate* isolate);
static v8::Isolate* GetIsolate(Address address);
AllocationResult Allocate(size_t size_in_bytes);
AllocationResult AllocateCode(size_t size_in_bytes);
Address GetObjectFromInnerPointer(Address inner_pointer);
void CollectGarbage();
};
} // namespace third_party_heap
} // namespace internal
} // namespace v8
#endif // V8_HEAP_THIRD_PARTY_HEAP_API_H_
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment