Commit 76968a2f authored by Jakob Kummerow's avatar Jakob Kummerow Committed by Commit Bot

[ubsan] Introduce ObjectPtr and port PropertyArray

This CL gives a first look at the new way to represent tagged object
pointers in C++.
It adds infrastructure in Handles and the garbage collector to deal
with the new object type, and ports a first class to the new world.

Design overview: https://goo.gl/Ph4CGz

Bug: v8:3770
Change-Id: I3e37fbf399612f95540cb386710a595069fb9d55
Reviewed-on: https://chromium-review.googlesource.com/c/1292673Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Jakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56964}
parent e76e44f0
...@@ -880,6 +880,8 @@ action("postmortem-metadata") { ...@@ -880,6 +880,8 @@ action("postmortem-metadata") {
"src/objects/data-handler-inl.h", "src/objects/data-handler-inl.h",
"src/objects/fixed-array-inl.h", "src/objects/fixed-array-inl.h",
"src/objects/fixed-array.h", "src/objects/fixed-array.h",
"src/objects/heap-object-inl.h",
"src/objects/heap-object.h",
"src/objects/js-array-inl.h", "src/objects/js-array-inl.h",
"src/objects/js-array.h", "src/objects/js-array.h",
"src/objects/js-array-buffer-inl.h", "src/objects/js-array-buffer-inl.h",
...@@ -2212,6 +2214,8 @@ v8_source_set("v8_base") { ...@@ -2212,6 +2214,8 @@ v8_source_set("v8_base") {
"src/objects/frame-array.h", "src/objects/frame-array.h",
"src/objects/hash-table-inl.h", "src/objects/hash-table-inl.h",
"src/objects/hash-table.h", "src/objects/hash-table.h",
"src/objects/heap-object-inl.h",
"src/objects/heap-object.h",
"src/objects/intl-objects.cc", "src/objects/intl-objects.cc",
"src/objects/intl-objects.h", "src/objects/intl-objects.h",
"src/objects/js-array-buffer-inl.h", "src/objects/js-array-buffer-inl.h",
......
...@@ -190,6 +190,7 @@ struct MachineRepresentationOf< ...@@ -190,6 +190,7 @@ struct MachineRepresentationOf<
template <class T> template <class T>
struct is_valid_type_tag { struct is_valid_type_tag {
static const bool value = std::is_base_of<Object, T>::value || static const bool value = std::is_base_of<Object, T>::value ||
std::is_base_of<ObjectPtr, T>::value ||
std::is_base_of<UntaggedT, T>::value || std::is_base_of<UntaggedT, T>::value ||
std::is_base_of<MaybeObject, T>::value || std::is_base_of<MaybeObject, T>::value ||
std::is_same<ExternalReference, T>::value; std::is_same<ExternalReference, T>::value;
...@@ -314,9 +315,15 @@ typedef ZoneVector<CodeAssemblerVariable*> CodeAssemblerVariableList; ...@@ -314,9 +315,15 @@ typedef ZoneVector<CodeAssemblerVariable*> CodeAssemblerVariableList;
typedef std::function<void()> CodeAssemblerCallback; typedef std::function<void()> CodeAssemblerCallback;
// TODO(3770): The HeapObject/HeapObjectPtr dance is temporary (while the
// incremental transition is in progress, we want to pretend that subclasses
// of HeapObjectPtr are also subclasses of Object/HeapObject); it can be
// removed when the migration is complete.
template <class T, class U> template <class T, class U>
struct is_subtype { struct is_subtype {
static const bool value = std::is_base_of<U, T>::value; static const bool value = std::is_base_of<U, T>::value ||
(std::is_base_of<U, HeapObject>::value &&
std::is_base_of<HeapObjectPtr, T>::value);
}; };
template <class T1, class T2, class U> template <class T1, class T2, class U>
struct is_subtype<UnionT<T1, T2>, U> { struct is_subtype<UnionT<T1, T2>, U> {
...@@ -395,6 +402,7 @@ struct types_have_common_values<MaybeObject, T> { ...@@ -395,6 +402,7 @@ struct types_have_common_values<MaybeObject, T> {
// TNode<T> is an SSA value with the static type tag T, which is one of the // TNode<T> is an SSA value with the static type tag T, which is one of the
// following: // following:
// - a subclass of internal::Object represents a tagged type // - a subclass of internal::Object represents a tagged type
// - a subclass of internal::ObjectPtr represents a tagged type
// - a subclass of internal::UntaggedT represents an untagged type // - a subclass of internal::UntaggedT represents an untagged type
// - ExternalReference // - ExternalReference
// - PairT<T1, T2> for an operation returning two values, with types T1 // - PairT<T1, T2> for an operation returning two values, with types T1
...@@ -630,7 +638,8 @@ class V8_EXPORT_PRIVATE CodeAssembler { ...@@ -630,7 +638,8 @@ class V8_EXPORT_PRIVATE CodeAssembler {
static_assert(types_have_common_values<A, PreviousType>::value, static_assert(types_have_common_values<A, PreviousType>::value,
"Incompatible types: this cast can never succeed."); "Incompatible types: this cast can never succeed.");
static_assert(std::is_convertible<TNode<A>, TNode<Object>>::value, static_assert(std::is_convertible<TNode<A>, TNode<Object>>::value ||
std::is_convertible<TNode<A>, TNode<ObjectPtr>>::value,
"Coercion to untagged values cannot be " "Coercion to untagged values cannot be "
"checked."); "checked.");
static_assert( static_assert(
......
...@@ -1545,6 +1545,10 @@ V8_INLINE static bool HasWeakHeapObjectTag(const Object* value) { ...@@ -1545,6 +1545,10 @@ V8_INLINE static bool HasWeakHeapObjectTag(const Object* value) {
kWeakHeapObjectTag); kWeakHeapObjectTag);
} }
V8_INLINE static bool HasWeakHeapObjectTag(const Address value) {
return (value & kHeapObjectTagMask) == kWeakHeapObjectTag;
}
V8_INLINE static bool IsClearedWeakHeapObject(const MaybeObject* value) { V8_INLINE static bool IsClearedWeakHeapObject(const MaybeObject* value) {
return reinterpret_cast<intptr_t>(value) == kClearedWeakHeapObject; return reinterpret_cast<intptr_t>(value) == kClearedWeakHeapObject;
} }
......
...@@ -17,10 +17,16 @@ HandleBase::HandleBase(Address object, Isolate* isolate) ...@@ -17,10 +17,16 @@ HandleBase::HandleBase(Address object, Isolate* isolate)
// Allocate a new handle for the object, do not canonicalize. // Allocate a new handle for the object, do not canonicalize.
template <typename T> template <typename T>
template <typename T1, typename>
Handle<T> Handle<T>::New(T* object, Isolate* isolate) { Handle<T> Handle<T>::New(T* object, Isolate* isolate) {
return Handle(reinterpret_cast<T**>( return Handle(reinterpret_cast<T**>(
HandleScope::CreateHandle(isolate, reinterpret_cast<Address>(object)))); HandleScope::CreateHandle(isolate, reinterpret_cast<Address>(object))));
} }
template <typename T>
template <typename T1, typename>
Handle<T> Handle<T>::New(T object, Isolate* isolate) {
return Handle(HandleScope::CreateHandle(isolate, object.ptr()));
}
template <typename T> template <typename T>
template <typename S> template <typename S>
...@@ -38,14 +44,27 @@ HandleScope::HandleScope(Isolate* isolate) { ...@@ -38,14 +44,27 @@ HandleScope::HandleScope(Isolate* isolate) {
} }
template <typename T> template <typename T>
template <typename T1, typename>
Handle<T>::Handle(T* object, Isolate* isolate) Handle<T>::Handle(T* object, Isolate* isolate)
: HandleBase(reinterpret_cast<Address>(object), isolate) {} : HandleBase(reinterpret_cast<Address>(object), isolate) {}
template <typename T> template <typename T>
template <typename T1, typename>
Handle<T>::Handle(T object, Isolate* isolate)
: HandleBase(object.ptr(), isolate) {}
template <typename T, typename = typename std::enable_if<
std::is_base_of<Object, T>::value>::type>
V8_INLINE Handle<T> handle(T* object, Isolate* isolate) { V8_INLINE Handle<T> handle(T* object, Isolate* isolate) {
return Handle<T>(object, isolate); return Handle<T>(object, isolate);
} }
template <typename T, typename = typename std::enable_if<
std::is_base_of<ObjectPtr, T>::value>::type>
V8_INLINE Handle<T> handle(T object, Isolate* isolate) {
return Handle<T>(object, isolate);
}
template <typename T> template <typename T>
inline std::ostream& operator<<(std::ostream& os, Handle<T> handle) { inline std::ostream& operator<<(std::ostream& os, Handle<T> handle) {
return os << Brief(*handle); return os << Brief(*handle);
......
...@@ -12,6 +12,11 @@ ...@@ -12,6 +12,11 @@
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/checks.h" #include "src/checks.h"
#include "src/globals.h" #include "src/globals.h"
// TODO(3770): The objects.h include is required to make the
// std::enable_if<std::is_base_of<...>> conditions below work. Once the
// migration is complete, we should be able to get by with just forward
// declarations.
#include "src/objects.h"
#include "src/zone/zone.h" #include "src/zone/zone.h"
namespace v8 { namespace v8 {
...@@ -23,7 +28,7 @@ class HandleScopeImplementer; ...@@ -23,7 +28,7 @@ class HandleScopeImplementer;
class Isolate; class Isolate;
template <typename T> template <typename T>
class MaybeHandle; class MaybeHandle;
class Object; class ObjectPtr;
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// Base class for Handle instantiations. Don't use directly. // Base class for Handle instantiations. Don't use directly.
...@@ -105,14 +110,29 @@ class Handle final : public HandleBase { ...@@ -105,14 +110,29 @@ class Handle final : public HandleBase {
} }
V8_INLINE explicit Handle(Address* location) : HandleBase(location) { V8_INLINE explicit Handle(Address* location) : HandleBase(location) {
// Type check: // Type check:
static_assert(std::is_convertible<T*, Object*>::value, static_assert(std::is_convertible<T*, Object*>::value ||
std::is_convertible<T, ObjectPtr>::value,
"static type violation"); "static type violation");
} }
// Here and below: for object types T that still derive from Object,
// enable the overloads that consume/produce a T*; for types already
// ported to deriving from ObjectPtr, use non-pointer T values.
// TODO(3770): The T* versions should disappear eventually.
template <typename T1 = T, typename = typename std::enable_if<
std::is_base_of<Object, T1>::value>::type>
V8_INLINE Handle(T* object, Isolate* isolate); V8_INLINE Handle(T* object, Isolate* isolate);
template <typename T1 = T, typename = typename std::enable_if<
std::is_base_of<ObjectPtr, T1>::value>::type>
V8_INLINE Handle(T object, Isolate* isolate);
// Allocate a new handle for the object, do not canonicalize. // Allocate a new handle for the object, do not canonicalize.
template <typename T1 = T, typename = typename std::enable_if<
std::is_base_of<Object, T1>::value>::type>
V8_INLINE static Handle<T> New(T* object, Isolate* isolate); V8_INLINE static Handle<T> New(T* object, Isolate* isolate);
template <typename T1 = T, typename = typename std::enable_if<
std::is_base_of<ObjectPtr, T1>::value>::type>
V8_INLINE static Handle<T> New(T object, Isolate* isolate);
// Constructor for handling automatic up casting. // Constructor for handling automatic up casting.
// Ex. Handle<JSFunction> can be passed when Handle<Object> is expected. // Ex. Handle<JSFunction> can be passed when Handle<Object> is expected.
...@@ -120,12 +140,34 @@ class Handle final : public HandleBase { ...@@ -120,12 +140,34 @@ class Handle final : public HandleBase {
std::is_convertible<S*, T*>::value>::type> std::is_convertible<S*, T*>::value>::type>
V8_INLINE Handle(Handle<S> handle) : HandleBase(handle) {} V8_INLINE Handle(Handle<S> handle) : HandleBase(handle) {}
V8_INLINE T* operator->() const { return operator*(); } // The NeverReadOnlySpaceObject special-case is needed for the
// ContextFromNeverReadOnlySpaceObject helper function in api.cc.
template <typename T1 = T,
typename = typename std::enable_if<
std::is_base_of<Object, T1>::value ||
std::is_base_of<NeverReadOnlySpaceObject, T1>::value>::type>
V8_INLINE T* operator->() const {
return operator*();
}
template <typename T1 = T, typename = typename std::enable_if<
std::is_base_of<ObjectPtr, T1>::value>::type>
V8_INLINE T operator->() const {
return operator*();
}
// Provides the C++ dereference operator. // Provides the C++ dereference operator.
template <typename T1 = T,
typename = typename std::enable_if<
std::is_base_of<Object, T1>::value ||
std::is_base_of<NeverReadOnlySpaceObject, T1>::value>::type>
V8_INLINE T* operator*() const { V8_INLINE T* operator*() const {
return reinterpret_cast<T*>(HandleBase::operator*()); return reinterpret_cast<T*>(HandleBase::operator*());
} }
template <typename T1 = T, typename = typename std::enable_if<
std::is_base_of<ObjectPtr, T1>::value>::type>
V8_INLINE T operator*() const {
return T::cast(ObjectPtr(HandleBase::operator*()));
}
// Returns the address to where the raw pointer is stored. // Returns the address to where the raw pointer is stored.
V8_INLINE T** location() const { V8_INLINE T** location() const {
......
...@@ -92,11 +92,18 @@ class ConcurrentMarkingVisitor final ...@@ -92,11 +92,18 @@ class ConcurrentMarkingVisitor final
task_id_(task_id), task_id_(task_id),
embedder_tracing_enabled_(embedder_tracing_enabled) {} embedder_tracing_enabled_(embedder_tracing_enabled) {}
template <typename T> template <typename T, typename = typename std::enable_if<
std::is_base_of<Object, T>::value>::type>
static V8_INLINE T* Cast(HeapObject* object) { static V8_INLINE T* Cast(HeapObject* object) {
return T::cast(object); return T::cast(object);
} }
template <typename T, typename = typename std::enable_if<
std::is_base_of<ObjectPtr, T>::value>::type>
static V8_INLINE T Cast(HeapObject* object) {
return T::cast(object);
}
bool ShouldVisit(HeapObject* object) { bool ShouldVisit(HeapObject* object) {
return marking_state_.GreyToBlack(object); return marking_state_.GreyToBlack(object);
} }
......
...@@ -1934,7 +1934,7 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite( ...@@ -1934,7 +1934,7 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
// Update properties if necessary. // Update properties if necessary.
if (source->HasFastProperties()) { if (source->HasFastProperties()) {
PropertyArray* properties = source->property_array(); PropertyArray properties = source->property_array();
if (properties->length() > 0) { if (properties->length() > 0) {
// TODO(gsathya): Do not copy hash code. // TODO(gsathya): Do not copy hash code.
Handle<PropertyArray> prop = CopyArrayWithMap( Handle<PropertyArray> prop = CopyArrayWithMap(
...@@ -1952,12 +1952,12 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite( ...@@ -1952,12 +1952,12 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
namespace { namespace {
template <typename T> template <typename T>
void initialize_length(T* array, int length) { void initialize_length(Handle<T> array, int length) {
array->set_length(length); array->set_length(length);
} }
template <> template <>
void initialize_length<PropertyArray>(PropertyArray* array, int length) { void initialize_length<PropertyArray>(Handle<PropertyArray> array, int length) {
array->initialize_length(length); array->initialize_length(length);
} }
...@@ -1969,7 +1969,7 @@ Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) { ...@@ -1969,7 +1969,7 @@ Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
HeapObject* obj = AllocateRawFixedArray(len, NOT_TENURED); HeapObject* obj = AllocateRawFixedArray(len, NOT_TENURED);
obj->set_map_after_allocation(*map, SKIP_WRITE_BARRIER); obj->set_map_after_allocation(*map, SKIP_WRITE_BARRIER);
T* result = T::cast(obj); Handle<T> result(T::cast(obj), isolate());
DisallowHeapAllocation no_gc; DisallowHeapAllocation no_gc;
WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
...@@ -1983,7 +1983,7 @@ Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) { ...@@ -1983,7 +1983,7 @@ Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
initialize_length(result, len); initialize_length(result, len);
for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
} }
return Handle<T>(result, isolate()); return result;
} }
template <typename T> template <typename T>
...@@ -1996,7 +1996,7 @@ Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by, ...@@ -1996,7 +1996,7 @@ Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by,
HeapObject* obj = AllocateRawFixedArray(new_len, pretenure); HeapObject* obj = AllocateRawFixedArray(new_len, pretenure);
obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER); obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
T* result = T::cast(obj); Handle<T> result(T::cast(obj), isolate());
initialize_length(result, new_len); initialize_length(result, new_len);
// Copy the content. // Copy the content.
...@@ -2004,7 +2004,7 @@ Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by, ...@@ -2004,7 +2004,7 @@ Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by,
WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc); WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
for (int i = 0; i < old_len; i++) result->set(i, src->get(i), mode); for (int i = 0; i < old_len; i++) result->set(i, src->get(i), mode);
MemsetPointer(result->data_start() + old_len, *undefined_value(), grow_by); MemsetPointer(result->data_start() + old_len, *undefined_value(), grow_by);
return Handle<T>(result, isolate()); return result;
} }
Handle<FixedArray> Factory::CopyFixedArrayWithMap(Handle<FixedArray> array, Handle<FixedArray> Factory::CopyFixedArrayWithMap(Handle<FixedArray> array,
......
...@@ -373,6 +373,21 @@ bool Heap::InNewSpace(HeapObject* heap_object) { ...@@ -373,6 +373,21 @@ bool Heap::InNewSpace(HeapObject* heap_object) {
return result; return result;
} }
// static
bool Heap::InNewSpace(HeapObjectPtr heap_object) {
bool result = MemoryChunk::FromHeapObject(heap_object)->InNewSpace();
#ifdef DEBUG
// If in NEW_SPACE, then check we're either not in the middle of GC or the
// object is in to-space.
if (result) {
// If the object is in NEW_SPACE, then it's not in RO_SPACE so this is safe.
Heap* heap = Heap::FromWritableHeapObject(&heap_object);
DCHECK(heap->gc_state_ != NOT_IN_GC || InToSpace(heap_object));
}
#endif
return result;
}
// static // static
bool Heap::InFromSpace(Object* object) { bool Heap::InFromSpace(Object* object) {
DCHECK(!HasWeakHeapObjectTag(object)); DCHECK(!HasWeakHeapObjectTag(object));
...@@ -408,6 +423,11 @@ bool Heap::InToSpace(HeapObject* heap_object) { ...@@ -408,6 +423,11 @@ bool Heap::InToSpace(HeapObject* heap_object) {
return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE); return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE);
} }
// static
bool Heap::InToSpace(HeapObjectPtr heap_object) {
return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE);
}
bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); } bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); }
bool Heap::InReadOnlySpace(Object* object) { bool Heap::InReadOnlySpace(Object* object) {
...@@ -435,6 +455,19 @@ Heap* Heap::FromWritableHeapObject(const HeapObject* obj) { ...@@ -435,6 +455,19 @@ Heap* Heap::FromWritableHeapObject(const HeapObject* obj) {
return heap; return heap;
} }
// static
Heap* Heap::FromWritableHeapObject(const HeapObjectPtr* obj) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*obj);
// RO_SPACE can be shared between heaps, so we can't use RO_SPACE objects to
// find a heap. The exception is when the ReadOnlySpace is writeable, during
// bootstrapping, so explicitly allow this case.
SLOW_DCHECK(chunk->owner()->identity() != RO_SPACE ||
static_cast<ReadOnlySpace*>(chunk->owner())->writable());
Heap* heap = chunk->heap();
SLOW_DCHECK(heap != nullptr);
return heap;
}
bool Heap::ShouldBePromoted(Address old_address) { bool Heap::ShouldBePromoted(Address old_address) {
Page* page = Page::FromAddress(old_address); Page* page = Page::FromAddress(old_address);
Address age_mark = new_space_->age_mark(); Address age_mark = new_space_->age_mark();
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
#include "src/globals.h" #include "src/globals.h"
#include "src/objects-inl.h" #include "src/objects-inl.h"
#include "src/objects/heap-object.h"
#include "src/objects/maybe-object-inl.h" #include "src/objects/maybe-object-inl.h"
#include "src/objects/slots.h" #include "src/objects/slots.h"
...@@ -102,6 +103,16 @@ inline void GenerationalBarrier(HeapObject* object, MaybeObjectSlot slot, ...@@ -102,6 +103,16 @@ inline void GenerationalBarrier(HeapObject* object, MaybeObjectSlot slot,
value_heap_object); value_heap_object);
} }
inline void GenerationalBarrier(HeapObjectPtr* object, ObjectSlot slot,
Object* value) {
DCHECK(!HasWeakHeapObjectTag(*slot));
DCHECK(!HasWeakHeapObjectTag(value));
if (!value->IsHeapObject()) return;
heap_internals::GenerationalBarrierInternal(
reinterpret_cast<HeapObject*>(object->ptr()), slot.address(),
HeapObject::cast(value));
}
inline void GenerationalBarrierForElements(Heap* heap, FixedArray* array, inline void GenerationalBarrierForElements(Heap* heap, FixedArray* array,
int offset, int length) { int offset, int length) {
heap_internals::MemoryChunk* array_chunk = heap_internals::MemoryChunk* array_chunk =
...@@ -135,6 +146,16 @@ inline void MarkingBarrier(HeapObject* object, MaybeObjectSlot slot, ...@@ -135,6 +146,16 @@ inline void MarkingBarrier(HeapObject* object, MaybeObjectSlot slot,
value_heap_object); value_heap_object);
} }
inline void MarkingBarrier(HeapObjectPtr* object, ObjectSlot slot,
Object* value) {
DCHECK_IMPLIES(slot.address() != kNullAddress, !HasWeakHeapObjectTag(*slot));
DCHECK(!HasWeakHeapObjectTag(value));
if (!value->IsHeapObject()) return;
heap_internals::MarkingBarrierInternal(
reinterpret_cast<HeapObject*>(object->ptr()), slot.address(),
HeapObject::cast(value));
}
inline void MarkingBarrierForElements(Heap* heap, HeapObject* object) { inline void MarkingBarrierForElements(Heap* heap, HeapObject* object) {
heap_internals::MemoryChunk* object_chunk = heap_internals::MemoryChunk* object_chunk =
heap_internals::MemoryChunk::FromHeapObject(object); heap_internals::MemoryChunk::FromHeapObject(object);
......
...@@ -5,6 +5,8 @@ ...@@ -5,6 +5,8 @@
#ifndef V8_HEAP_HEAP_WRITE_BARRIER_H_ #ifndef V8_HEAP_HEAP_WRITE_BARRIER_H_
#define V8_HEAP_HEAP_WRITE_BARRIER_H_ #define V8_HEAP_HEAP_WRITE_BARRIER_H_
#include "include/v8-internal.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -12,6 +14,7 @@ class Code; ...@@ -12,6 +14,7 @@ class Code;
class FixedArray; class FixedArray;
class Heap; class Heap;
class HeapObject; class HeapObject;
class HeapObjectPtr;
class MaybeObject; class MaybeObject;
class MaybeObjectSlot; class MaybeObjectSlot;
class Object; class Object;
...@@ -36,6 +39,11 @@ void WriteBarrierForCode(Code* host); ...@@ -36,6 +39,11 @@ void WriteBarrierForCode(Code* host);
void GenerationalBarrier(HeapObject* object, ObjectSlot slot, Object* value); void GenerationalBarrier(HeapObject* object, ObjectSlot slot, Object* value);
void GenerationalBarrier(HeapObject* object, MaybeObjectSlot slot, void GenerationalBarrier(HeapObject* object, MaybeObjectSlot slot,
MaybeObject* value); MaybeObject* value);
// This takes a HeapObjectPtr* (as opposed to a plain HeapObjectPtr)
// to keep the WRITE_BARRIER macro syntax-compatible to the HeapObject*
// version above.
// TODO(3770): This should probably take a HeapObjectPtr eventually.
void GenerationalBarrier(HeapObjectPtr* object, ObjectSlot slot, Object* value);
void GenerationalBarrierForElements(Heap* heap, FixedArray* array, int offset, void GenerationalBarrierForElements(Heap* heap, FixedArray* array, int offset,
int length); int length);
void GenerationalBarrierForCode(Code* host, RelocInfo* rinfo, void GenerationalBarrierForCode(Code* host, RelocInfo* rinfo,
...@@ -45,6 +53,11 @@ void GenerationalBarrierForCode(Code* host, RelocInfo* rinfo, ...@@ -45,6 +53,11 @@ void GenerationalBarrierForCode(Code* host, RelocInfo* rinfo,
void MarkingBarrier(HeapObject* object, ObjectSlot slot, Object* value); void MarkingBarrier(HeapObject* object, ObjectSlot slot, Object* value);
void MarkingBarrier(HeapObject* object, MaybeObjectSlot slot, void MarkingBarrier(HeapObject* object, MaybeObjectSlot slot,
MaybeObject* value); MaybeObject* value);
// This takes a HeapObjectPtr* (as opposed to a plain HeapObjectPtr)
// to keep the WRITE_BARRIER macro syntax-compatible to the HeapObject*
// version above.
// TODO(3770): This should probably take a HeapObjectPtr eventually.
void MarkingBarrier(HeapObjectPtr* object, ObjectSlot slot, Object* value);
void MarkingBarrierForElements(Heap* heap, HeapObject* object); void MarkingBarrierForElements(Heap* heap, HeapObject* object);
void MarkingBarrierForCode(Code* host, RelocInfo* rinfo, HeapObject* object); void MarkingBarrierForCode(Code* host, RelocInfo* rinfo, HeapObject* object);
......
...@@ -61,6 +61,7 @@ class GCIdleTimeHeapState; ...@@ -61,6 +61,7 @@ class GCIdleTimeHeapState;
class GCTracer; class GCTracer;
class HeapController; class HeapController;
class HeapObjectAllocationTracker; class HeapObjectAllocationTracker;
class HeapObjectPtr;
class HeapObjectsFilter; class HeapObjectsFilter;
class HeapStats; class HeapStats;
class HistogramTimer; class HistogramTimer;
...@@ -904,12 +905,14 @@ class Heap { ...@@ -904,12 +905,14 @@ class Heap {
static inline bool InNewSpace(Object* object); static inline bool InNewSpace(Object* object);
static inline bool InNewSpace(MaybeObject* object); static inline bool InNewSpace(MaybeObject* object);
static inline bool InNewSpace(HeapObject* heap_object); static inline bool InNewSpace(HeapObject* heap_object);
static inline bool InNewSpace(HeapObjectPtr heap_object);
static inline bool InFromSpace(Object* object); static inline bool InFromSpace(Object* object);
static inline bool InFromSpace(MaybeObject* object); static inline bool InFromSpace(MaybeObject* object);
static inline bool InFromSpace(HeapObject* heap_object); static inline bool InFromSpace(HeapObject* heap_object);
static inline bool InToSpace(Object* object); static inline bool InToSpace(Object* object);
static inline bool InToSpace(MaybeObject* object); static inline bool InToSpace(MaybeObject* object);
static inline bool InToSpace(HeapObject* heap_object); static inline bool InToSpace(HeapObject* heap_object);
static inline bool InToSpace(HeapObjectPtr heap_object);
// Returns whether the object resides in old space. // Returns whether the object resides in old space.
inline bool InOldSpace(Object* object); inline bool InOldSpace(Object* object);
...@@ -935,6 +938,11 @@ class Heap { ...@@ -935,6 +938,11 @@ class Heap {
// Find the heap which owns this HeapObject. Should never be called for // Find the heap which owns this HeapObject. Should never be called for
// objects in RO space. // objects in RO space.
static inline Heap* FromWritableHeapObject(const HeapObject* obj); static inline Heap* FromWritableHeapObject(const HeapObject* obj);
// This takes a HeapObjectPtr* (as opposed to a plain HeapObjectPtr)
// to keep the WRITE_BARRIER macro syntax-compatible to the HeapObject*
// version above.
// TODO(3770): This should probably take a HeapObjectPtr eventually.
static inline Heap* FromWritableHeapObject(const HeapObjectPtr* obj);
// =========================================================================== // ===========================================================================
// Object statistics tracking. =============================================== // Object statistics tracking. ===============================================
......
...@@ -473,7 +473,7 @@ void ObjectStatsCollectorImpl::RecordVirtualAllocationSiteDetails( ...@@ -473,7 +473,7 @@ void ObjectStatsCollectorImpl::RecordVirtualAllocationSiteDetails(
if (boilerplate->HasFastProperties()) { if (boilerplate->HasFastProperties()) {
// We'll mis-classify the empty_property_array here. Given that there is a // We'll mis-classify the empty_property_array here. Given that there is a
// single instance, this is negligible. // single instance, this is negligible.
PropertyArray* properties = boilerplate->property_array(); PropertyArray properties = boilerplate->property_array();
RecordSimpleVirtualObjectStats( RecordSimpleVirtualObjectStats(
site, properties, ObjectStats::BOILERPLATE_PROPERTY_ARRAY_TYPE); site, properties, ObjectStats::BOILERPLATE_PROPERTY_ARRAY_TYPE);
} else { } else {
...@@ -535,7 +535,7 @@ void ObjectStatsCollectorImpl::RecordVirtualJSObjectDetails(JSObject* object) { ...@@ -535,7 +535,7 @@ void ObjectStatsCollectorImpl::RecordVirtualJSObjectDetails(JSObject* object) {
// Properties. // Properties.
if (object->HasFastProperties()) { if (object->HasFastProperties()) {
PropertyArray* properties = object->property_array(); PropertyArray properties = object->property_array();
CHECK_EQ(PROPERTY_ARRAY_TYPE, properties->map()->instance_type()); CHECK_EQ(PROPERTY_ARRAY_TYPE, properties->map()->instance_type());
} else { } else {
NameDictionary* properties = object->property_dictionary(); NameDictionary* properties = object->property_dictionary();
......
...@@ -20,11 +20,17 @@ namespace v8 { ...@@ -20,11 +20,17 @@ namespace v8 {
namespace internal { namespace internal {
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
template <typename T> template <typename T, typename>
T* HeapVisitor<ResultType, ConcreteVisitor>::Cast(HeapObject* object) { T* HeapVisitor<ResultType, ConcreteVisitor>::Cast(HeapObject* object) {
return T::cast(object); return T::cast(object);
} }
template <typename ResultType, typename ConcreteVisitor>
template <typename T, typename>
T HeapVisitor<ResultType, ConcreteVisitor>::Cast(HeapObject* object) {
return T::cast(object);
}
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject* object) { ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject* object) {
return Visit(object->map(), object); return Visit(object->map(), object);
...@@ -35,10 +41,10 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(Map* map, ...@@ -35,10 +41,10 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(Map* map,
HeapObject* object) { HeapObject* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this); ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
switch (map->visitor_id()) { switch (map->visitor_id()) {
#define CASE(type) \ #define CASE(TypeName, Type) \
case kVisit##type: \ case kVisit##TypeName: \
return visitor->Visit##type(map, \ return visitor->Visit##TypeName( \
ConcreteVisitor::template Cast<type>(object)); map, ConcreteVisitor::template Cast<TypeName>(object));
TYPED_VISITOR_ID_LIST(CASE) TYPED_VISITOR_ID_LIST(CASE)
#undef CASE #undef CASE
case kVisitShortcutCandidate: case kVisitShortcutCandidate:
...@@ -77,10 +83,10 @@ void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(HeapObject* host, ...@@ -77,10 +83,10 @@ void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(HeapObject* host,
static_cast<ConcreteVisitor*>(this)->VisitPointer(host, map); static_cast<ConcreteVisitor*>(this)->VisitPointer(host, map);
} }
#define VISIT(type) \ #define VISIT(TypeName, Type) \
template <typename ResultType, typename ConcreteVisitor> \ template <typename ResultType, typename ConcreteVisitor> \
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##type( \ ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##TypeName( \
Map* map, type* object) { \ Map* map, Type object) { \
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this); \ ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this); \
if (!visitor->ShouldVisit(object)) return ResultType(); \ if (!visitor->ShouldVisit(object)) return ResultType(); \
if (!visitor->AllowDefaultJSObjectVisit()) { \ if (!visitor->AllowDefaultJSObjectVisit()) { \
...@@ -88,10 +94,10 @@ void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(HeapObject* host, ...@@ -88,10 +94,10 @@ void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(HeapObject* host,
"Implement custom visitor for new JSObject subclass in " \ "Implement custom visitor for new JSObject subclass in " \
"concurrent marker"); \ "concurrent marker"); \
} \ } \
int size = type::BodyDescriptor::SizeOf(map, object); \ int size = TypeName::BodyDescriptor::SizeOf(map, object); \
if (visitor->ShouldVisitMapPointer()) \ if (visitor->ShouldVisitMapPointer()) \
visitor->VisitMapPointer(object, object->map_slot()); \ visitor->VisitMapPointer(object, object->map_slot()); \
type::BodyDescriptor::IterateBody(map, object, size, visitor); \ TypeName::BodyDescriptor::IterateBody(map, object, size, visitor); \
return static_cast<ResultType>(size); \ return static_cast<ResultType>(size); \
} }
TYPED_VISITOR_ID_LIST(VISIT) TYPED_VISITOR_ID_LIST(VISIT)
......
...@@ -31,45 +31,45 @@ class UncompiledDataWithPreParsedScope; ...@@ -31,45 +31,45 @@ class UncompiledDataWithPreParsedScope;
class WasmInstanceObject; class WasmInstanceObject;
#define TYPED_VISITOR_ID_LIST(V) \ #define TYPED_VISITOR_ID_LIST(V) \
V(AllocationSite) \ V(AllocationSite, AllocationSite*) \
V(BigInt) \ V(BigInt, BigInt*) \
V(ByteArray) \ V(ByteArray, ByteArray*) \
V(BytecodeArray) \ V(BytecodeArray, BytecodeArray*) \
V(Cell) \ V(Cell, Cell*) \
V(Code) \ V(Code, Code*) \
V(CodeDataContainer) \ V(CodeDataContainer, CodeDataContainer*) \
V(ConsString) \ V(ConsString, ConsString*) \
V(DataHandler) \ V(DataHandler, DataHandler*) \
V(EphemeronHashTable) \ V(EphemeronHashTable, EphemeronHashTable*) \
V(FeedbackCell) \ V(FeedbackCell, FeedbackCell*) \
V(FeedbackVector) \ V(FeedbackVector, FeedbackVector*) \
V(FixedArray) \ V(FixedArray, FixedArray*) \
V(FixedDoubleArray) \ V(FixedDoubleArray, FixedDoubleArray*) \
V(FixedFloat64Array) \ V(FixedFloat64Array, FixedFloat64Array*) \
V(FixedTypedArrayBase) \ V(FixedTypedArrayBase, FixedTypedArrayBase*) \
V(JSArrayBuffer) \ V(JSArrayBuffer, JSArrayBuffer*) \
V(JSDataView) \ V(JSDataView, JSDataView*) \
V(JSObject) \ V(JSObject, JSObject*) \
V(JSTypedArray) \ V(JSTypedArray, JSTypedArray*) \
V(JSWeakCollection) \ V(JSWeakCollection, JSWeakCollection*) \
V(Map) \ V(Map, Map*) \
V(Oddball) \ V(Oddball, Oddball*) \
V(PreParsedScopeData) \ V(PreParsedScopeData, PreParsedScopeData*) \
V(PropertyArray) \ V(PropertyArray, PropertyArray) \
V(PropertyCell) \ V(PropertyCell, PropertyCell*) \
V(PrototypeInfo) \ V(PrototypeInfo, PrototypeInfo*) \
V(SeqOneByteString) \ V(SeqOneByteString, SeqOneByteString*) \
V(SeqTwoByteString) \ V(SeqTwoByteString, SeqTwoByteString*) \
V(SharedFunctionInfo) \ V(SharedFunctionInfo, SharedFunctionInfo*) \
V(SlicedString) \ V(SlicedString, SlicedString*) \
V(SmallOrderedHashMap) \ V(SmallOrderedHashMap, SmallOrderedHashMap*) \
V(SmallOrderedHashSet) \ V(SmallOrderedHashSet, SmallOrderedHashSet*) \
V(Symbol) \ V(Symbol, Symbol*) \
V(ThinString) \ V(ThinString, ThinString*) \
V(TransitionArray) \ V(TransitionArray, TransitionArray*) \
V(UncompiledDataWithoutPreParsedScope) \ V(UncompiledDataWithoutPreParsedScope, UncompiledDataWithoutPreParsedScope*) \
V(UncompiledDataWithPreParsedScope) \ V(UncompiledDataWithPreParsedScope, UncompiledDataWithPreParsedScope*) \
V(WasmInstanceObject) V(WasmInstanceObject, WasmInstanceObject*)
// The base class for visitors that need to dispatch on object type. The default // The base class for visitors that need to dispatch on object type. The default
// behavior of all visit functions is to iterate body of the given object using // behavior of all visit functions is to iterate body of the given object using
...@@ -101,7 +101,8 @@ class HeapVisitor : public ObjectVisitor { ...@@ -101,7 +101,8 @@ class HeapVisitor : public ObjectVisitor {
// in default Visit implemention for subclasses of JSObject. // in default Visit implemention for subclasses of JSObject.
V8_INLINE bool AllowDefaultJSObjectVisit() { return true; } V8_INLINE bool AllowDefaultJSObjectVisit() { return true; }
#define VISIT(type) V8_INLINE ResultType Visit##type(Map* map, type* object); #define VISIT(TypeName, Type) \
V8_INLINE ResultType Visit##TypeName(Map* map, Type object);
TYPED_VISITOR_ID_LIST(VISIT) TYPED_VISITOR_ID_LIST(VISIT)
#undef VISIT #undef VISIT
V8_INLINE ResultType VisitShortcutCandidate(Map* map, ConsString* object); V8_INLINE ResultType VisitShortcutCandidate(Map* map, ConsString* object);
...@@ -113,8 +114,13 @@ class HeapVisitor : public ObjectVisitor { ...@@ -113,8 +114,13 @@ class HeapVisitor : public ObjectVisitor {
V8_INLINE ResultType VisitFreeSpace(Map* map, FreeSpace* object); V8_INLINE ResultType VisitFreeSpace(Map* map, FreeSpace* object);
V8_INLINE ResultType VisitWeakArray(Map* map, HeapObject* object); V8_INLINE ResultType VisitWeakArray(Map* map, HeapObject* object);
template <typename T> template <typename T, typename = typename std::enable_if<
std::is_base_of<Object, T>::value>::type>
static V8_INLINE T* Cast(HeapObject* object); static V8_INLINE T* Cast(HeapObject* object);
template <typename T, typename = typename std::enable_if<
std::is_base_of<ObjectPtr, T>::value>::type>
static V8_INLINE T Cast(HeapObject* object);
}; };
template <typename ConcreteVisitor> template <typename ConcreteVisitor>
......
...@@ -3651,7 +3651,7 @@ void LargeObjectSpace::Verify(Isolate* isolate) { ...@@ -3651,7 +3651,7 @@ void LargeObjectSpace::Verify(Isolate* isolate) {
} }
} }
} else if (object->IsPropertyArray()) { } else if (object->IsPropertyArray()) {
PropertyArray* array = PropertyArray::cast(object); PropertyArray array = PropertyArray::cast(object);
for (int j = 0; j < array->length(); j++) { for (int j = 0; j < array->length(); j++) {
Object* property = array->get(j); Object* property = array->get(j);
if (property->IsHeapObject()) { if (property->IsHeapObject()) {
......
...@@ -25,6 +25,7 @@ ...@@ -25,6 +25,7 @@
#include "src/heap/invalidated-slots.h" #include "src/heap/invalidated-slots.h"
#include "src/heap/marking.h" #include "src/heap/marking.h"
#include "src/objects.h" #include "src/objects.h"
#include "src/objects/heap-object.h"
#include "src/objects/map.h" #include "src/objects/map.h"
#include "src/utils.h" #include "src/utils.h"
...@@ -409,6 +410,10 @@ class MemoryChunk { ...@@ -409,6 +410,10 @@ class MemoryChunk {
return reinterpret_cast<MemoryChunk*>(reinterpret_cast<Address>(o) & return reinterpret_cast<MemoryChunk*>(reinterpret_cast<Address>(o) &
~kAlignmentMask); ~kAlignmentMask);
} }
// Only works if the object is in the first kPageSize of the MemoryChunk.
static MemoryChunk* FromHeapObject(const HeapObjectPtr o) {
return reinterpret_cast<MemoryChunk*>(o.ptr() & ~kAlignmentMask);
}
void SetOldGenerationPageFlags(bool is_marking); void SetOldGenerationPageFlags(bool is_marking);
void SetYoungGenerationPageFlags(bool is_marking); void SetYoungGenerationPageFlags(bool is_marking);
......
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include "src/base/functional.h" #include "src/base/functional.h"
#include "src/handles.h" #include "src/handles.h"
#include "src/objects/heap-object.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -103,6 +104,7 @@ class IdentityMap : public IdentityMapBase { ...@@ -103,6 +104,7 @@ class IdentityMap : public IdentityMapBase {
V* Get(Object* key) { V* Get(Object* key) {
return reinterpret_cast<V*>(GetEntry(reinterpret_cast<Address>(key))); return reinterpret_cast<V*>(GetEntry(reinterpret_cast<Address>(key)));
} }
V* Get(ObjectPtr key) { return reinterpret_cast<V*>(GetEntry(key.ptr())); }
// Searches this map for the given key using the object's address // Searches this map for the given key using the object's address
// as the identity, returning: // as the identity, returning:
...@@ -112,12 +114,18 @@ class IdentityMap : public IdentityMapBase { ...@@ -112,12 +114,18 @@ class IdentityMap : public IdentityMapBase {
V* Find(Object* key) const { V* Find(Object* key) const {
return reinterpret_cast<V*>(FindEntry(reinterpret_cast<Address>(key))); return reinterpret_cast<V*>(FindEntry(reinterpret_cast<Address>(key)));
} }
V* Find(ObjectPtr key) const {
return reinterpret_cast<V*>(FindEntry(key.ptr()));
}
// Set the value for the given key. // Set the value for the given key.
void Set(Handle<Object> key, V v) { Set(*key, v); } void Set(Handle<Object> key, V v) { Set(*key, v); }
void Set(Object* key, V v) { void Set(Object* key, V v) {
*(reinterpret_cast<V*>(GetEntry(reinterpret_cast<Address>(key)))) = v; *(reinterpret_cast<V*>(GetEntry(reinterpret_cast<Address>(key)))) = v;
} }
void Set(ObjectPtr key, V v) {
*(reinterpret_cast<V*>(GetEntry(key.ptr()))) = v;
}
bool Delete(Handle<Object> key, V* deleted_value) { bool Delete(Handle<Object> key, V* deleted_value) {
return Delete(*key, deleted_value); return Delete(*key, deleted_value);
...@@ -130,6 +138,14 @@ class IdentityMap : public IdentityMapBase { ...@@ -130,6 +138,14 @@ class IdentityMap : public IdentityMapBase {
} }
return deleted_something; return deleted_something;
} }
bool Delete(ObjectPtr key, V* deleted_value) {
void* v = nullptr;
bool deleted_something = DeleteEntry(key.ptr(), &v);
if (deleted_value != nullptr && deleted_something) {
*deleted_value = *reinterpret_cast<V*>(&v);
}
return deleted_something;
}
// Removes all elements from the map. // Removes all elements from the map.
void Clear() { IdentityMapBase::Clear(); } void Clear() { IdentityMapBase::Clear(); }
......
...@@ -687,14 +687,14 @@ void WeakArrayList::WeakArrayListVerify(Isolate* isolate) { ...@@ -687,14 +687,14 @@ void WeakArrayList::WeakArrayListVerify(Isolate* isolate) {
void PropertyArray::PropertyArrayVerify(Isolate* isolate) { void PropertyArray::PropertyArrayVerify(Isolate* isolate) {
if (length() == 0) { if (length() == 0) {
CHECK_EQ(this, ReadOnlyRoots(isolate).empty_property_array()); CHECK_EQ(*this, ReadOnlyRoots(isolate).empty_property_array());
return; return;
} }
// There are no empty PropertyArrays. // There are no empty PropertyArrays.
CHECK_LT(0, length()); CHECK_LT(0, length());
for (int i = 0; i < length(); i++) { for (int i = 0; i < length(); i++) {
Object* e = get(i); Object* e = get(i);
VerifyPointer(isolate, e); Object::VerifyPointer(isolate, e);
} }
} }
......
...@@ -724,10 +724,18 @@ MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object, ...@@ -724,10 +724,18 @@ MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
return value; return value;
} }
ObjectSlot HeapObject::RawField(int byte_offset) const {
return ObjectSlot(FIELD_ADDR(this, byte_offset));
}
ObjectSlot HeapObject::RawField(const HeapObject* obj, int byte_offset) { ObjectSlot HeapObject::RawField(const HeapObject* obj, int byte_offset) {
return ObjectSlot(FIELD_ADDR(obj, byte_offset)); return ObjectSlot(FIELD_ADDR(obj, byte_offset));
} }
MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
return MaybeObjectSlot(FIELD_ADDR(this, byte_offset));
}
MaybeObjectSlot HeapObject::RawMaybeWeakField(HeapObject* obj, MaybeObjectSlot HeapObject::RawMaybeWeakField(HeapObject* obj,
int byte_offset) { int byte_offset) {
return MaybeObjectSlot(FIELD_ADDR(obj, byte_offset)); return MaybeObjectSlot(FIELD_ADDR(obj, byte_offset));
...@@ -1551,7 +1559,7 @@ int HeapObject::SizeFromMap(Map* map) const { ...@@ -1551,7 +1559,7 @@ int HeapObject::SizeFromMap(Map* map) const {
} }
if (instance_type == PROPERTY_ARRAY_TYPE) { if (instance_type == PROPERTY_ARRAY_TYPE) {
return PropertyArray::SizeFor( return PropertyArray::SizeFor(
reinterpret_cast<const PropertyArray*>(this)->synchronized_length()); PropertyArray::cast(this)->synchronized_length());
} }
if (instance_type == SMALL_ORDERED_HASH_MAP_TYPE) { if (instance_type == SMALL_ORDERED_HASH_MAP_TYPE) {
return SmallOrderedHashMap::SizeFor( return SmallOrderedHashMap::SizeFor(
......
...@@ -89,6 +89,19 @@ void HeapObject::PrintHeader(std::ostream& os, const char* id) { // NOLINT ...@@ -89,6 +89,19 @@ void HeapObject::PrintHeader(std::ostream& os, const char* id) { // NOLINT
if (!IsMap()) os << "\n - map: " << Brief(map()); if (!IsMap()) os << "\n - map: " << Brief(map());
} }
void HeapObjectPtr::PrintHeader(std::ostream& os, const char* id) { // NOLINT
os << reinterpret_cast<void*>(ptr()) << ": [";
if (id != nullptr) {
os << id;
} else {
os << map()->instance_type();
}
os << "]";
MemoryChunk* chunk = MemoryChunk::FromAddress(ptr());
if (chunk->owner()->identity() == OLD_SPACE) os << " in OldSpace";
if (!IsMap()) os << "\n - map: " << Brief(map());
}
void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT
InstanceType instance_type = map()->instance_type(); InstanceType instance_type = map()->instance_type();
...@@ -979,7 +992,7 @@ void ObjectBoilerplateDescription::ObjectBoilerplateDescriptionPrint( ...@@ -979,7 +992,7 @@ void ObjectBoilerplateDescription::ObjectBoilerplateDescriptionPrint(
} }
void PropertyArray::PropertyArrayPrint(std::ostream& os) { // NOLINT void PropertyArray::PropertyArrayPrint(std::ostream& os) { // NOLINT
HeapObject::PrintHeader(os, "PropertyArray"); PrintHeader(os, "PropertyArray");
os << "\n - length: " << length(); os << "\n - length: " << length();
os << "\n - hash: " << Hash(); os << "\n - hash: " << Hash();
PrintFixedArrayElements(os, this); PrintFixedArrayElements(os, this);
......
...@@ -1066,6 +1066,10 @@ class Object { ...@@ -1066,6 +1066,10 @@ class Object {
// Type testing. // Type testing.
bool IsObject() const { return true; } bool IsObject() const { return true; }
// Syntax compatibility with ObjectPtr, so the same macros can consume
// arguments of either type.
Address ptr() const { return reinterpret_cast<Address>(this); }
#define IS_TYPE_FUNCTION_DECL(Type) V8_INLINE bool Is##Type() const; #define IS_TYPE_FUNCTION_DECL(Type) V8_INLINE bool Is##Type() const;
OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL) OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL) HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
...@@ -1711,7 +1715,9 @@ class HeapObject: public Object { ...@@ -1711,7 +1715,9 @@ class HeapObject: public Object {
// Does no checking, and is safe to use during GC, while maps are invalid. // Does no checking, and is safe to use during GC, while maps are invalid.
// Does not invoke write barrier, so should only be assigned to // Does not invoke write barrier, so should only be assigned to
// during marking GC. // during marking GC.
inline ObjectSlot RawField(int byte_offset) const;
static inline ObjectSlot RawField(const HeapObject* obj, int offset); static inline ObjectSlot RawField(const HeapObject* obj, int offset);
inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
static inline MaybeObjectSlot RawMaybeWeakField(HeapObject* obj, int offset); static inline MaybeObjectSlot RawMaybeWeakField(HeapObject* obj, int offset);
DECL_CAST(HeapObject) DECL_CAST(HeapObject)
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_OBJECTS_HEAP_OBJECT_INL_H_
#define V8_OBJECTS_HEAP_OBJECT_INL_H_
#include "src/objects/heap-object.h"
#include "src/heap/heap-write-barrier-inl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
namespace internal {
OBJECT_CONSTRUCTORS_IMPL(HeapObjectPtr, ObjectPtr)
#define TYPE_CHECK_FORWARDER(Type) \
bool HeapObjectPtr::Is##Type() const { \
return reinterpret_cast<HeapObject*>(ptr())->Is##Type(); \
}
HEAP_OBJECT_TYPE_LIST(TYPE_CHECK_FORWARDER)
#undef TYPE_CHECK_FORWARDER
Map* HeapObjectPtr::map() const {
return Map::cast(READ_FIELD(this, kMapOffset));
}
ObjectSlot HeapObjectPtr::map_slot() {
return ObjectSlot(FIELD_ADDR(this, kMapOffset));
}
WriteBarrierMode HeapObjectPtr::GetWriteBarrierMode(
const DisallowHeapAllocation& promise) {
Heap* heap = Heap::FromWritableHeapObject(this);
if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
if (Heap::InNewSpace(*this)) return SKIP_WRITE_BARRIER;
return UPDATE_WRITE_BARRIER;
}
ObjectSlot HeapObjectPtr::RawField(int byte_offset) const {
return ObjectSlot(FIELD_ADDR(this, byte_offset));
}
} // namespace internal
} // namespace v8
#include "src/objects/object-macros-undef.h"
#endif // V8_OBJECTS_HEAP_OBJECT_INL_H_
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_OBJECTS_HEAP_OBJECT_H_
#define V8_OBJECTS_HEAP_OBJECT_H_
#include "src/globals.h"
#include "src/objects.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
namespace internal {
// This is the new way to represent the Object class. It is temporarily
// separate to allow an incremental transition.
// For a design overview, see https://goo.gl/Ph4CGz.
class ObjectPtr {
public:
ObjectPtr() : ptr_(kNullAddress) {}
explicit ObjectPtr(Address ptr) : ptr_(ptr) {}
// Enable incremental transition.
operator Object*() const { return reinterpret_cast<Object*>(ptr()); }
bool operator==(const ObjectPtr other) const {
return this->ptr() == other.ptr();
}
bool operator!=(const ObjectPtr other) const {
return this->ptr() != other.ptr();
}
// Returns the tagged "(heap) object pointer" representation of this object.
Address ptr() const { return ptr_; }
private:
Address ptr_;
};
// Replacement for HeapObject; temporarily separate for incremental transition:
class HeapObjectPtr : public ObjectPtr {
public:
inline Map* map() const;
inline ObjectSlot map_slot();
inline WriteBarrierMode GetWriteBarrierMode(
const DisallowHeapAllocation& promise);
// Enable incremental transition.
operator HeapObject*() { return reinterpret_cast<HeapObject*>(ptr()); }
operator const HeapObject*() const {
return reinterpret_cast<const HeapObject*>(ptr());
}
bool IsHeapObjectPtr() const { return true; }
#define IS_TYPE_FUNCTION_DECL(Type) V8_INLINE bool Is##Type() const;
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
#undef IS_TYPE_FUNCTION_DECL
// Untagged aligned address.
inline Address address() const { return ptr() - kHeapObjectTag; }
inline ObjectSlot RawField(int byte_offset) const;
#ifdef OBJECT_PRINT
void PrintHeader(std::ostream& os, const char* id); // NOLINT
#endif
static const int kMapOffset = HeapObject::kMapOffset;
OBJECT_CONSTRUCTORS(HeapObjectPtr)
};
} // namespace internal
} // namespace v8
#include "src/objects/object-macros-undef.h"
#endif // V8_OBJECTS_HEAP_OBJECT_H_
...@@ -803,7 +803,7 @@ NameDictionary* JSReceiver::property_dictionary() const { ...@@ -803,7 +803,7 @@ NameDictionary* JSReceiver::property_dictionary() const {
// TODO(gsathya): Pass isolate directly to this function and access // TODO(gsathya): Pass isolate directly to this function and access
// the heap from this. // the heap from this.
PropertyArray* JSReceiver::property_array() const { PropertyArray JSReceiver::property_array() const {
DCHECK(HasFastProperties()); DCHECK(HasFastProperties());
Object* prop = raw_properties_or_hash(); Object* prop = raw_properties_or_hash();
......
...@@ -28,7 +28,7 @@ class JSReceiver : public HeapObject, public NeverReadOnlySpaceObject { ...@@ -28,7 +28,7 @@ class JSReceiver : public HeapObject, public NeverReadOnlySpaceObject {
// exists. Otherwise, returns an empty_property_array when there's a // exists. Otherwise, returns an empty_property_array when there's a
// Smi (hash code) or an empty_fixed_array for a fast properties // Smi (hash code) or an empty_fixed_array for a fast properties
// map. // map.
inline PropertyArray* property_array() const; inline PropertyArray property_array() const;
// Gets slow properties for non-global objects. // Gets slow properties for non-global objects.
inline NameDictionary* property_dictionary() const; inline NameDictionary* property_dictionary() const;
......
...@@ -4,12 +4,15 @@ ...@@ -4,12 +4,15 @@
// PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD // PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD
#undef OBJECT_CONSTRUCTORS
#undef DECL_PRIMITIVE_ACCESSORS #undef DECL_PRIMITIVE_ACCESSORS
#undef DECL_BOOLEAN_ACCESSORS #undef DECL_BOOLEAN_ACCESSORS
#undef DECL_INT_ACCESSORS #undef DECL_INT_ACCESSORS
#undef DECL_ACCESSORS #undef DECL_ACCESSORS
#undef DECL_CAST #undef DECL_CAST
#undef DECL_CAST2
#undef CAST_ACCESSOR #undef CAST_ACCESSOR
#undef CAST_ACCESSOR2
#undef INT_ACCESSORS #undef INT_ACCESSORS
#undef ACCESSORS_CHECKED2 #undef ACCESSORS_CHECKED2
#undef ACCESSORS_CHECKED #undef ACCESSORS_CHECKED
...@@ -36,6 +39,7 @@ ...@@ -36,6 +39,7 @@
#undef RELEASE_WRITE_FIELD #undef RELEASE_WRITE_FIELD
#undef RELAXED_WRITE_FIELD #undef RELAXED_WRITE_FIELD
#undef WRITE_BARRIER #undef WRITE_BARRIER
#undef WEAK_WRITE_BARRIER
#undef CONDITIONAL_WRITE_BARRIER #undef CONDITIONAL_WRITE_BARRIER
#undef CONDITIONAL_WEAK_WRITE_BARRIER #undef CONDITIONAL_WEAK_WRITE_BARRIER
#undef READ_DOUBLE_FIELD #undef READ_DOUBLE_FIELD
......
...@@ -16,6 +16,21 @@ ...@@ -16,6 +16,21 @@
#include <src/v8memory.h> #include <src/v8memory.h>
// Since this changes visibility, it should always be last in a class
// definition.
#define OBJECT_CONSTRUCTORS(Type) \
public: \
Type(); \
Type* operator->() { return this; } \
const Type* operator->() const { return this; } \
\
protected: \
explicit Type(Address ptr);
#define OBJECT_CONSTRUCTORS_IMPL(Type, Super) \
inline Type::Type() : Super() {} \
inline Type::Type(Address ptr) : Super(ptr) { SLOW_DCHECK(Is##Type()); }
#define DECL_PRIMITIVE_ACCESSORS(name, type) \ #define DECL_PRIMITIVE_ACCESSORS(name, type) \
inline type name() const; \ inline type name() const; \
inline void set_##name(type value); inline void set_##name(type value);
...@@ -43,6 +58,13 @@ ...@@ -43,6 +58,13 @@
V8_INLINE static type* cast(Object* object); \ V8_INLINE static type* cast(Object* object); \
V8_INLINE static const type* cast(const Object* object); V8_INLINE static const type* cast(const Object* object);
// TODO(3770): Replacement for the above, temporarily separate for
// incremental transition.
#define DECL_CAST2(Type) \
V8_INLINE static Type cast(Object* object); \
V8_INLINE static const Type cast(const Object* object); \
V8_INLINE static Type cast(ObjectPtr object);
#define CAST_ACCESSOR(type) \ #define CAST_ACCESSOR(type) \
type* type::cast(Object* object) { \ type* type::cast(Object* object) { \
SLOW_DCHECK(object->Is##type()); \ SLOW_DCHECK(object->Is##type()); \
...@@ -53,6 +75,13 @@ ...@@ -53,6 +75,13 @@
return reinterpret_cast<const type*>(object); \ return reinterpret_cast<const type*>(object); \
} }
// TODO(3770): Replacement for the above, temporarily separate for
// incremental transition.
#define CAST_ACCESSOR2(Type) \
Type Type::cast(Object* object) { return Type(object->ptr()); } \
const Type Type::cast(const Object* object) { return Type(object->ptr()); } \
Type Type::cast(ObjectPtr object) { return Type(object.ptr()); }
#define INT_ACCESSORS(holder, name, offset) \ #define INT_ACCESSORS(holder, name, offset) \
int holder::name() const { return READ_INT_FIELD(this, offset); } \ int holder::name() const { return READ_INT_FIELD(this, offset); } \
void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); } void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
...@@ -196,8 +225,7 @@ ...@@ -196,8 +225,7 @@
return InstanceTypeChecker::Is##type(map()->instance_type()); \ return InstanceTypeChecker::Is##type(map()->instance_type()); \
} }
#define FIELD_ADDR(p, offset) \ #define FIELD_ADDR(p, offset) ((p)->ptr() + offset - kHeapObjectTag)
(reinterpret_cast<Address>(p) + offset - kHeapObjectTag)
#define READ_FIELD(p, offset) \ #define READ_FIELD(p, offset) \
(*reinterpret_cast<Object* const*>(FIELD_ADDR(p, offset))) (*reinterpret_cast<Object* const*>(FIELD_ADDR(p, offset)))
...@@ -246,17 +274,15 @@ ...@@ -246,17 +274,15 @@
#define WRITE_BARRIER(object, offset, value) \ #define WRITE_BARRIER(object, offset, value) \
do { \ do { \
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \ DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
MarkingBarrier(object, HeapObject::RawField(object, offset), value); \ MarkingBarrier(object, (object)->RawField(offset), value); \
GenerationalBarrier(object, HeapObject::RawField(object, offset), value); \ GenerationalBarrier(object, (object)->RawField(offset), value); \
} while (false) } while (false)
#define WEAK_WRITE_BARRIER(object, offset, value) \ #define WEAK_WRITE_BARRIER(object, offset, value) \
do { \ do { \
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \ DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
MarkingBarrier(object, HeapObject::RawMaybeWeakField(object, offset), \ MarkingBarrier(object, (object)->RawMaybeWeakField(offset), value); \
value); \ GenerationalBarrier(object, (object)->RawMaybeWeakField(offset), value); \
GenerationalBarrier(object, HeapObject::RawMaybeWeakField(object, offset), \
value); \
} while (false) } while (false)
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \ #define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
...@@ -264,10 +290,9 @@ ...@@ -264,10 +290,9 @@
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \ DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
if (mode != SKIP_WRITE_BARRIER) { \ if (mode != SKIP_WRITE_BARRIER) { \
if (mode == UPDATE_WRITE_BARRIER) { \ if (mode == UPDATE_WRITE_BARRIER) { \
MarkingBarrier(object, HeapObject::RawField(object, offset), value); \ MarkingBarrier(object, (object)->RawField(offset), value); \
} \ } \
GenerationalBarrier(object, HeapObject::RawField(object, offset), \ GenerationalBarrier(object, (object)->RawField(offset), value); \
value); \
} \ } \
} while (false) } while (false)
...@@ -276,11 +301,9 @@ ...@@ -276,11 +301,9 @@
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \ DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
if (mode != SKIP_WRITE_BARRIER) { \ if (mode != SKIP_WRITE_BARRIER) { \
if (mode == UPDATE_WRITE_BARRIER) { \ if (mode == UPDATE_WRITE_BARRIER) { \
MarkingBarrier(object, HeapObject::RawMaybeWeakField(object, offset), \ MarkingBarrier(object, (object)->RawMaybeWeakField(offset), value); \
value); \
} \ } \
GenerationalBarrier( \ GenerationalBarrier(object, (object)->RawMaybeWeakField(offset), value); \
object, HeapObject::RawMaybeWeakField(object, offset), value); \
} \ } \
} while (false) } while (false)
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#include "src/objects/property-array.h" #include "src/objects/property-array.h"
#include "src/heap/heap-write-barrier-inl.h" #include "src/heap/heap-write-barrier-inl.h"
#include "src/objects/heap-object-inl.h"
// Has to be the last include (doesn't have include guards): // Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h" #include "src/objects/object-macros.h"
...@@ -15,7 +16,8 @@ ...@@ -15,7 +16,8 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
CAST_ACCESSOR(PropertyArray) OBJECT_CONSTRUCTORS_IMPL(PropertyArray, HeapObjectPtr)
CAST_ACCESSOR2(PropertyArray)
Object* PropertyArray::get(int index) const { Object* PropertyArray::get(int index) const {
DCHECK_GE(index, 0); DCHECK_GE(index, 0);
...@@ -40,9 +42,7 @@ void PropertyArray::set(int index, Object* value, WriteBarrierMode mode) { ...@@ -40,9 +42,7 @@ void PropertyArray::set(int index, Object* value, WriteBarrierMode mode) {
CONDITIONAL_WRITE_BARRIER(this, offset, value, mode); CONDITIONAL_WRITE_BARRIER(this, offset, value, mode);
} }
ObjectSlot PropertyArray::data_start() { ObjectSlot PropertyArray::data_start() { return RawField(kHeaderSize); }
return HeapObject::RawField(this, kHeaderSize);
}
int PropertyArray::length() const { int PropertyArray::length() const {
Object* value_obj = READ_FIELD(this, kLengthAndHashOffset); Object* value_obj = READ_FIELD(this, kLengthAndHashOffset);
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
#ifndef V8_OBJECTS_PROPERTY_ARRAY_H_ #ifndef V8_OBJECTS_PROPERTY_ARRAY_H_
#define V8_OBJECTS_PROPERTY_ARRAY_H_ #define V8_OBJECTS_PROPERTY_ARRAY_H_
#include "src/objects.h" #include "src/objects/heap-object.h"
// Has to be the last include (doesn't have include guards): // Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h" #include "src/objects/object-macros.h"
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
class PropertyArray : public HeapObject { class PropertyArray : public HeapObjectPtr {
public: public:
// [length]: length of the array. // [length]: length of the array.
inline int length() const; inline int length() const;
...@@ -42,7 +42,7 @@ class PropertyArray : public HeapObject { ...@@ -42,7 +42,7 @@ class PropertyArray : public HeapObject {
return kHeaderSize + length * kPointerSize; return kHeaderSize + length * kPointerSize;
} }
DECL_CAST(PropertyArray) DECL_CAST2(PropertyArray)
DECL_PRINTER(PropertyArray) DECL_PRINTER(PropertyArray)
DECL_VERIFIER(PropertyArray) DECL_VERIFIER(PropertyArray)
...@@ -61,8 +61,7 @@ class PropertyArray : public HeapObject { ...@@ -61,8 +61,7 @@ class PropertyArray : public HeapObject {
static const int kNoHashSentinel = 0; static const int kNoHashSentinel = 0;
private: OBJECT_CONSTRUCTORS(PropertyArray);
DISALLOW_IMPLICIT_CONSTRUCTORS(PropertyArray);
}; };
} // namespace internal } // namespace internal
......
...@@ -174,7 +174,7 @@ class RootVisitor; ...@@ -174,7 +174,7 @@ class RootVisitor;
V(Map*, self_reference_marker_map, SelfReferenceMarkerMap) \ V(Map*, self_reference_marker_map, SelfReferenceMarkerMap) \
/* Canonical empty values */ \ /* Canonical empty values */ \
V(EnumCache*, empty_enum_cache, EmptyEnumCache) \ V(EnumCache*, empty_enum_cache, EmptyEnumCache) \
V(PropertyArray*, empty_property_array, EmptyPropertyArray) \ V(PropertyArray, empty_property_array, EmptyPropertyArray) \
V(ByteArray*, empty_byte_array, EmptyByteArray) \ V(ByteArray*, empty_byte_array, EmptyByteArray) \
V(ObjectBoilerplateDescription*, empty_object_boilerplate_description, \ V(ObjectBoilerplateDescription*, empty_object_boilerplate_description, \
EmptyObjectBoilerplateDescription) \ EmptyObjectBoilerplateDescription) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment