Commit 537f73e9 authored by Michael Starzinger's avatar Michael Starzinger Committed by Commit Bot

[heap] Move {next_code_link} into the {CodeDataContainer}.

This move the {Code::next_code_link} into the data container, making it
possible to mutate the field even when {Code} objects are protected. It
also introduces a dedicated body descriptor for the container allowing
for tagged fields to be part of the container.

R=ulan@chromium.org
BUG=v8:6792

Change-Id: I56a9d53e8bb35aeb0a7036e3abf3ebee1ba2928d
Reviewed-on: https://chromium-review.googlesource.com/738184
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48966}
parent 11d3178b
......@@ -1779,6 +1779,7 @@ Handle<JSObject> Factory::NewExternal(void* value) {
Handle<CodeDataContainer> Factory::NewCodeDataContainer(int flags) {
Handle<CodeDataContainer> data_container =
New<CodeDataContainer>(code_data_container_map(), OLD_SPACE);
data_container->set_next_code_link(*undefined_value(), SKIP_WRITE_BARRIER);
data_container->set_kind_specific_flags(flags);
data_container->clear_padding();
return data_container;
......@@ -1844,7 +1845,6 @@ Handle<Code> Factory::NewCode(
code->set_has_tagged_params(true);
code->set_deoptimization_data(*deopt_data);
code->set_stub_key(0);
code->set_next_code_link(*undefined_value(), SKIP_WRITE_BARRIER);
code->set_handler_table(*handler_table);
code->set_source_position_table(*source_position_table);
code->set_protected_instructions(*empty_fixed_array(), SKIP_WRITE_BARRIER);
......
......@@ -203,6 +203,11 @@ class ConcurrentMarkingVisitor final
return 0;
}
int VisitCodeDataContainer(Map* map, CodeDataContainer* object) {
bailout_.Push(object);
return 0;
}
// ===========================================================================
// Objects with weak fields and/or side-effectiful visitation.
// ===========================================================================
......
......@@ -1187,8 +1187,8 @@ class MarkCompactCollector::CustomRootBodyMarkingVisitor final
// VisitEmbedderPointer is defined by ObjectVisitor to call VisitPointers.
// Skip the weak next code link in a code object.
void VisitNextCodeLink(Code* host, Object** p) override {}
// Skip the weak next code link for code objects.
void VisitNextCodeLink(CodeDataContainer* host, Object** p) override {}
private:
void MarkObject(HeapObject* host, Object* object) {
......
......@@ -1050,7 +1050,7 @@ class MarkingVisitor final
V8_INLINE void VisitEmbeddedPointer(Code* host, RelocInfo* rinfo) final;
V8_INLINE void VisitCodeTarget(Code* host, RelocInfo* rinfo) final;
// Skip weak next code link.
V8_INLINE void VisitNextCodeLink(Code* host, Object** p) final {}
V8_INLINE void VisitNextCodeLink(CodeDataContainer* host, Object** p) final {}
private:
// Granularity in which FixedArrays are scanned if |fixed_array_mode|
......
......@@ -46,9 +46,10 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
DCHECK_NOT_NULL(tail);
WeakListVisitor<T>::SetWeakNext(tail, retained);
if (record_slots) {
Object** next_slot =
HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
MarkCompactCollector::RecordSlot(tail, next_slot, retained);
HeapObject* slot_holder = WeakListVisitor<T>::WeakNextHolder(tail);
int slot_offset = WeakListVisitor<T>::WeakNextOffset();
Object** slot = HeapObject::RawField(slot_holder, slot_offset);
MarkCompactCollector::RecordSlot(slot_holder, slot, retained);
}
}
// Retained object is new tail.
......@@ -86,12 +87,19 @@ static void ClearWeakList(Heap* heap, Object* list) {
template <>
struct WeakListVisitor<Code> {
static void SetWeakNext(Code* code, Object* next) {
code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
code->code_data_container()->set_next_code_link(next,
UPDATE_WEAK_WRITE_BARRIER);
}
static Object* WeakNext(Code* code) { return code->next_code_link(); }
static Object* WeakNext(Code* code) {
return code->code_data_container()->next_code_link();
}
static HeapObject* WeakNextHolder(Code* code) {
return code->code_data_container();
}
static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
static int WeakNextOffset() { return CodeDataContainer::kNextCodeLinkOffset; }
static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
......@@ -109,6 +117,8 @@ struct WeakListVisitor<Context> {
return context->next_context_link();
}
static HeapObject* WeakNextHolder(Context* context) { return context; }
static int WeakNextOffset() {
return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
}
......@@ -161,6 +171,8 @@ struct WeakListVisitor<AllocationSite> {
static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
static HeapObject* WeakNextHolder(AllocationSite* obj) { return obj; }
static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
......
......@@ -28,6 +28,7 @@ class JSRegExp;
V(BytecodeArray) \
V(Cell) \
V(Code) \
V(CodeDataContainer) \
V(ConsString) \
V(FeedbackVector) \
V(FixedArray) \
......
......@@ -358,7 +358,7 @@ class Code::BodyDescriptor final : public BodyDescriptorBase {
kProtectedInstructionsOffset);
STATIC_ASSERT(kProtectedInstructionsOffset + kPointerSize ==
kCodeDataContainerOffset);
STATIC_ASSERT(kCodeDataContainerOffset + kPointerSize == kNextCodeLinkOffset);
STATIC_ASSERT(kCodeDataContainerOffset + kPointerSize == kDataStart);
static bool IsValidSlot(HeapObject* obj, int offset) {
// Slots in code can't be invalid because we never trim code objects.
......@@ -374,12 +374,8 @@ class Code::BodyDescriptor final : public BodyDescriptorBase {
RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
IteratePointers(obj, kRelocationInfoOffset, kNextCodeLinkOffset, v);
v->VisitNextCodeLink(Code::cast(obj),
HeapObject::RawField(obj, kNextCodeLinkOffset));
// GC does not visit data/code in the header and in the body directly.
STATIC_ASSERT(Code::kNextCodeLinkOffset + kPointerSize == kDataStart);
IteratePointers(obj, kRelocationInfoOffset, kDataStart, v);
RelocIterator it(Code::cast(obj), mode_mask);
Isolate* isolate = obj->GetIsolate();
......@@ -399,6 +395,25 @@ class Code::BodyDescriptor final : public BodyDescriptorBase {
}
};
class CodeDataContainer::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(HeapObject* obj, int offset) { return true; }
template <typename ObjectVisitor>
static inline void IterateBody(HeapObject* obj, ObjectVisitor* v) {
v->VisitNextCodeLink(CodeDataContainer::cast(obj),
HeapObject::RawField(obj, kNextCodeLinkOffset));
}
template <typename ObjectVisitor>
static inline void IterateBody(HeapObject* obj, int object_size,
ObjectVisitor* v) {
IterateBody(obj, v);
}
static inline int SizeOf(Map* map, HeapObject* obj) { return kSize; }
};
class SeqOneByteString::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(HeapObject* obj, int offset) { return false; }
......@@ -538,9 +553,10 @@ ReturnType BodyDescriptorApply(InstanceType type, T1 p1, T2 p2, T3 p3) {
return Op::template apply<
SmallOrderedHashTable<SmallOrderedHashMap>::BodyDescriptor>(p1, p2,
p3);
case CODE_DATA_CONTAINER_TYPE:
return Op::template apply<CodeDataContainer::BodyDescriptor>(p1, p2, p3);
case HEAP_NUMBER_TYPE:
case MUTABLE_HEAP_NUMBER_TYPE:
case CODE_DATA_CONTAINER_TYPE:
case FILLER_TYPE:
case BYTE_ARRAY_TYPE:
case FREE_SPACE_TYPE:
......
......@@ -836,6 +836,9 @@ void WeakCell::WeakCellVerify() {
void CodeDataContainer::CodeDataContainerVerify() {
CHECK(IsCodeDataContainer());
VerifyObjectField(kNextCodeLinkOffset);
CHECK(next_code_link()->IsCode() ||
next_code_link()->IsUndefined(GetIsolate()));
}
void Code::CodeVerify() {
......
......@@ -3191,6 +3191,9 @@ VisitorId Map::GetVisitorId(Map* map) {
case SMALL_ORDERED_HASH_SET_TYPE:
return kVisitSmallOrderedHashSet;
case CODE_DATA_CONTAINER_TYPE:
return kVisitCodeDataContainer;
case JS_OBJECT_TYPE:
case JS_ERROR_TYPE:
case JS_ARGUMENTS_TYPE:
......@@ -3238,7 +3241,6 @@ VisitorId Map::GetVisitorId(Map* map) {
case FOREIGN_TYPE:
case HEAP_NUMBER_TYPE:
case MUTABLE_HEAP_NUMBER_TYPE:
case CODE_DATA_CONTAINER_TYPE:
return kVisitDataObject;
case BIGINT_TYPE:
......
......@@ -160,7 +160,6 @@ CODE_ACCESSORS(source_position_table, Object, kSourcePositionTableOffset)
CODE_ACCESSORS(protected_instructions, FixedArray, kProtectedInstructionsOffset)
CODE_ACCESSORS(code_data_container, CodeDataContainer, kCodeDataContainerOffset)
CODE_ACCESSORS(trap_handler_index, Smi, kTrapHandlerIndex)
CODE_ACCESSORS(next_code_link, Object, kNextCodeLinkOffset)
#undef CODE_ACCESSORS
void Code::WipeOutHeader() {
......@@ -170,7 +169,6 @@ void Code::WipeOutHeader() {
WRITE_FIELD(this, kSourcePositionTableOffset, nullptr);
WRITE_FIELD(this, kProtectedInstructionsOffset, nullptr);
WRITE_FIELD(this, kCodeDataContainerOffset, nullptr);
WRITE_FIELD(this, kNextCodeLinkOffset, nullptr);
}
void Code::clear_padding() {
......@@ -198,6 +196,14 @@ void Code::set_stub_key(uint32_t key) {
WRITE_UINT32_FIELD(this, kStubKeyOffset, key);
}
Object* Code::next_code_link() const {
return code_data_container()->next_code_link();
}
void Code::set_next_code_link(Object* value) {
code_data_container()->set_next_code_link(value);
}
byte* Code::instruction_start() const {
return const_cast<byte*>(FIELD_ADDR_CONST(this, kHeaderSize));
}
......@@ -526,6 +532,7 @@ bool Code::IsWeakObjectInOptimizedCode(Object* object) {
}
INT_ACCESSORS(CodeDataContainer, kind_specific_flags, kKindSpecificFlagsOffset)
ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset)
void CodeDataContainer::clear_padding() {
memset(address() + kUnalignedSize, 0, kSize - kUnalignedSize);
......
......@@ -175,8 +175,9 @@ class Code : public HeapObject {
inline void set_stub_key(uint32_t key);
// [next_code_link]: Link for lists of optimized or deoptimized code.
// Note that storage for this field is overlapped with typefeedback_info.
DECL_ACCESSORS(next_code_link, Object)
// Note that this field is stored in the {CodeDataContainer} to be mutable.
inline Object* next_code_link() const;
inline void set_next_code_link(Object* value);
// [constant_pool offset]: Offset of the constant pool.
// Valid for FLAG_enable_embedded_constant_pool only
......@@ -438,9 +439,8 @@ class Code : public HeapObject {
kSourcePositionTableOffset + kPointerSize;
static const int kCodeDataContainerOffset =
kProtectedInstructionsOffset + kPointerSize;
static const int kNextCodeLinkOffset =
static const int kInstructionSizeOffset =
kCodeDataContainerOffset + kPointerSize;
static const int kInstructionSizeOffset = kNextCodeLinkOffset + kPointerSize;
static const int kFlagsOffset = kInstructionSizeOffset + kIntSize;
static const int kSafepointTableOffsetOffset = kFlagsOffset + kIntSize;
static const int kStubKeyOffset = kSafepointTableOffsetOffset + kIntSize;
......@@ -513,6 +513,7 @@ class Code : public HeapObject {
// field {Code::code_data_container} itself is immutable.
class CodeDataContainer : public HeapObject {
public:
DECL_ACCESSORS(next_code_link, Object)
DECL_INT_ACCESSORS(kind_specific_flags)
// Clear uninitialized padding space. This ensures that the snapshot content
......@@ -525,10 +526,14 @@ class CodeDataContainer : public HeapObject {
DECL_PRINTER(CodeDataContainer)
DECL_VERIFIER(CodeDataContainer)
static const int kKindSpecificFlagsOffset = HeapObject::kHeaderSize;
static const int kNextCodeLinkOffset = HeapObject::kHeaderSize;
static const int kKindSpecificFlagsOffset =
kNextCodeLinkOffset + kPointerSize;
static const int kUnalignedSize = kKindSpecificFlagsOffset + kIntSize;
static const int kSize = OBJECT_POINTER_ALIGN(kUnalignedSize);
class BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(CodeDataContainer);
};
......
......@@ -23,6 +23,7 @@ namespace internal {
V(BytecodeArray) \
V(Cell) \
V(Code) \
V(CodeDataContainer) \
V(ConsString) \
V(DataObject) \
V(FeedbackVector) \
......
......@@ -1658,7 +1658,8 @@ bool V8HeapExplorer::IsEssentialHiddenReference(Object* parent,
if (parent->IsAllocationSite() &&
field_offset == AllocationSite::kWeakNextOffset)
return false;
if (parent->IsCode() && field_offset == Code::kNextCodeLinkOffset)
if (parent->IsCodeDataContainer() &&
field_offset == CodeDataContainer::kNextCodeLinkOffset)
return false;
if (parent->IsContext() &&
field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK))
......
......@@ -20,7 +20,7 @@ const char* const VisitorSynchronization::kTagNames
[VisitorSynchronization::kNumberOfSyncTags] = {ROOT_ID_LIST(DECLARE_TAG)};
#undef DECLARE_TAG
void ObjectVisitor::VisitNextCodeLink(Code* host, Object** p) {
void ObjectVisitor::VisitNextCodeLink(CodeDataContainer* host, Object** p) {
VisitPointers(host, p, p + 1);
}
......
......@@ -10,6 +10,7 @@
namespace v8 {
namespace internal {
class CodeDataContainer;
class Object;
#define ROOT_ID_LIST(V) \
......@@ -91,8 +92,8 @@ class ObjectVisitor BASE_EMBEDDED {
VisitPointers(host, p, p + 1);
}
// Visit weak next_code_link in Code object.
virtual void VisitNextCodeLink(Code* host, Object** p);
// Visit the weak next code link for code objects.
virtual void VisitNextCodeLink(CodeDataContainer* host, Object** p);
// To allow lazy clearing of inline caches the visitor has
// a rich interface for iterating over Code objects..
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment