Commit 7366f6e2 authored by Seth Brenith's avatar Seth Brenith Committed by V8 LUCI CQ

[torque] Don't generate k(?:Start|End)Of\w+FieldsOffset constants

Torque currently generates constants like kStartOfWeakFieldsOffset and
kEndOfStrongFieldsOffset, which can be used when writing custom
BodyDescriptors. However, these offsets have some potentially confusing
behaviors:

* They don't take inheritance into account and describe only the fields
  defined by the current class itself, so there might be (for example)
  strong fields before kStartOfStrongFieldsOffset if they were defined
  by a superclass.
* kStartOfWeakFieldsOffset points to the first field defined in Torque
  using the keyword `weak`, which indicates fields with *custom*
  weakness semantics (those that should be visited with
  IterateCustomWeakPointers), not those that may contain standard weak
  pointers (visited with IterateMaybeWeakPointers). (As a follow-up, I'd
  like to also rename `weak` to `@customWeak`.)

Given that these constants have very low usage and somewhat bizarre
semantics, I propose that we remove them. This change does so, and
updates the existing usages to either define the required constants
directly in C++ or not use them. I know that defining these constants in
C++ is more brittle, but I think that brittle and clear is better than
automatic and incomprehensible.

Bug: v8:7793
Change-Id: I87f8c85ccae4027f61ac73d4e7e4e2820e92003b
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3199731Reviewed-by: 's avatarNico Hartmann <nicohartmann@chromium.org>
Reviewed-by: 's avatarToon Verwaest <verwaest@chromium.org>
Commit-Queue: Seth Brenith <seth.brenith@microsoft.com>
Cr-Commit-Position: refs/heads/main@{#77411}
parent 32008465
......@@ -1650,8 +1650,7 @@ void WasmInstanceObject::WasmInstanceObjectVerify(Isolate* isolate) {
// Just generically check all tagged fields. Don't check the untagged fields,
// as some of them might still contain the "undefined" value if the
// WasmInstanceObject is not fully set up yet.
for (int offset = kHeaderSize; offset < kEndOfStrongFieldsOffset;
offset += kTaggedSize) {
for (uint16_t offset : kTaggedFieldOffsets) {
VerifyObjectField(isolate, offset);
}
}
......
......@@ -488,14 +488,14 @@ template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitMap(Map meta_map,
Map map) {
if (!concrete_visitor()->ShouldVisit(map)) return 0;
int size = Map::BodyDescriptor::SizeOf(meta_map, map);
size += VisitDescriptorsForMap(map);
int map_size = Map::BodyDescriptor::SizeOf(meta_map, map);
int marked_size = map_size + VisitDescriptorsForMap(map);
// Mark the pointer fields of the Map. If there is a transitions array, it has
// been marked already, so it is fine that one of these fields contains a
// pointer to it.
Map::BodyDescriptor::IterateBody(meta_map, map, size, this);
return size;
Map::BodyDescriptor::IterateBody(meta_map, map, map_size, this);
return marked_size;
}
template <typename ConcreteVisitor, typename MarkingState>
......
......@@ -36,7 +36,6 @@ namespace internal {
V(WeakCell) \
V(JSWeakCollection) \
V(JSWeakRef) \
V(Map) \
V(NativeContext) \
V(PreparseData) \
V(PropertyArray) \
......
......@@ -472,6 +472,9 @@ class Context : public TorqueGeneratedContext<Context, HeapObject> {
static const int kScopeInfoOffset = kElementsOffset;
static const int kPreviousOffset = kScopeInfoOffset + kTaggedSize;
// All fields are tagged.
static const int kStartOfStrongFieldsOffset = HeapObject::kHeaderSize;
/* Header size. */ \
/* TODO(ishell): use this as header size once MIN_CONTEXT_SLOTS */ \
/* is removed in favour of offset-based access to common fields. */ \
......
......@@ -91,11 +91,10 @@ InternalIndex DescriptorArray::SearchWithCache(Isolate* isolate, Name name,
}
ObjectSlot DescriptorArray::GetFirstPointerSlot() {
static_assert(kEndOfStrongFieldsOffset == kStartOfWeakFieldsOffset,
"Weak and strong fields are continuous.");
static_assert(kEndOfWeakFieldsOffset == kHeaderSize,
"Weak fields extend up to the end of the header.");
return RawField(DescriptorArray::kStartOfStrongFieldsOffset);
static_assert(kEnumCacheOffset + kTaggedSize == kHeaderSize,
"The strong field for enum_cache is contiguous with the "
"variable-size portion.");
return RawField(DescriptorArray::kEnumCacheOffset);
}
ObjectSlot DescriptorArray::GetDescriptorSlot(int descriptor) {
......
......@@ -144,7 +144,6 @@ class DescriptorArray
// Constant for denoting key was not found.
static const int kNotFound = -1;
STATIC_ASSERT(IsAligned(kStartOfWeakFieldsOffset, kTaggedSize));
STATIC_ASSERT(IsAligned(kHeaderSize, kTaggedSize));
// Garbage collection support.
......@@ -164,10 +163,6 @@ class DescriptorArray
inline ObjectSlot GetFirstPointerSlot();
inline ObjectSlot GetDescriptorSlot(int descriptor);
static_assert(kEndOfStrongFieldsOffset == kStartOfWeakFieldsOffset,
"Weak fields follow strong fields.");
static_assert(kEndOfWeakFieldsOffset == kHeaderSize,
"Weak fields extend up to the end of the header.");
static_assert(kDescriptorsOffset == kHeaderSize,
"Variable-size array follows header.");
class BodyDescriptor;
......
......@@ -170,9 +170,6 @@ VisitorId Map::GetVisitorId(Map map) {
case FEEDBACK_METADATA_TYPE:
return kVisitFeedbackMetadata;
case MAP_TYPE:
return kVisitMap;
case CODE_TYPE:
return kVisitCode;
......
......@@ -52,7 +52,6 @@ enum InstanceType : uint16_t;
V(JSTypedArray) \
V(JSWeakRef) \
V(JSWeakCollection) \
V(Map) \
V(NativeContext) \
V(PreparseData) \
V(PropertyArray) \
......
......@@ -34,6 +34,7 @@ bitfield struct MapBitFields3 extends uint32 {
construction_counter: int32: 3 bit;
}
@generateBodyDescriptor
extern class Map extends HeapObject {
macro PrototypeInfo(): PrototypeInfo labels HasNoPrototypeInfo {
typeswitch (this.transitions_or_prototype_info) {
......@@ -69,8 +70,8 @@ extern class Map extends HeapObject {
instance_descriptors: DescriptorArray;
dependent_code: DependentCode;
prototype_validity_cell: Smi|Cell;
weak transitions_or_prototype_info: Map|Weak<Map>|TransitionArray|
PrototypeInfo|Smi;
transitions_or_prototype_info: Map|Weak<Map>|TransitionArray|PrototypeInfo|
Smi;
}
@export
......
......@@ -843,28 +843,6 @@ class Code::BodyDescriptor final : public BodyDescriptorBase {
}
};
class Map::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
static_assert(
Map::kEndOfStrongFieldsOffset == Map::kStartOfWeakFieldsOffset,
"Leverage that weak fields directly follow strong fields for the "
"check below");
return offset >= Map::kStartOfStrongFieldsOffset &&
offset < Map::kEndOfWeakFieldsOffset;
}
template <typename ObjectVisitor>
static inline void IterateBody(Map map, HeapObject obj, int object_size,
ObjectVisitor* v) {
IteratePointers(obj, Map::kStartOfStrongFieldsOffset,
Map::kEndOfStrongFieldsOffset, v);
IterateMaybeWeakPointer(obj, kTransitionsOrPrototypeInfoOffset, v);
}
static inline int SizeOf(Map map, HeapObject obj) { return Map::kSize; }
};
class DataHandler::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
......@@ -1171,8 +1149,6 @@ ReturnType BodyDescriptorApply(InstanceType type, T1 p1, T2 p2, T3 p3, T4 p4) {
return Op::template apply<JSProxy::BodyDescriptor>(p1, p2, p3, p4);
case FOREIGN_TYPE:
return Op::template apply<Foreign::BodyDescriptor>(p1, p2, p3, p4);
case MAP_TYPE:
return Op::template apply<Map::BodyDescriptor>(p1, p2, p3, p4);
case CODE_TYPE:
return Op::template apply<Code::BodyDescriptor>(p1, p2, p3, p4);
case CELL_TYPE:
......
......@@ -3767,13 +3767,10 @@ class FieldOffsetsGenerator {
}
void Begin(FieldSectionType type) {
DCHECK(type != FieldSectionType::kNoSection);
if (!IsPointerSection(type)) return;
WriteMarker("kStartOf" + ToString(type) + "Offset");
}
void End(FieldSectionType type) {
if (!IsPointerSection(type)) return;
completed_sections_ |= type;
WriteMarker("kEndOf" + ToString(type) + "Offset");
}
FieldSectionType current_section_ = FieldSectionType::kNoSection;
......
......@@ -603,6 +603,7 @@ void ComputeSlotKindsHelper(std::vector<ObjectSlotKind>* slots,
size_t offset = start_offset;
for (const Field& field : fields) {
size_t field_size = std::get<0>(field.GetFieldSizeInformation());
if (field_size == 0) continue;
size_t slot_index = offset / TargetArchitecture::TaggedSize();
// Rounding-up division to find the number of slots occupied by all the
// fields up to and including the current one.
......
......@@ -696,7 +696,8 @@ class WasmIndirectFunctionTable
DECL_PRINTER(WasmIndirectFunctionTable)
STATIC_ASSERT(kStartOfStrongFieldsOffset == kManagedNativeAllocationsOffset);
static constexpr int kStartOfStrongFieldsOffset =
kManagedNativeAllocationsOffset;
using BodyDescriptor = FlexibleBodyDescriptor<kStartOfStrongFieldsOffset>;
TQ_OBJECT_CONSTRUCTORS(WasmIndirectFunctionTable)
......@@ -710,6 +711,9 @@ class WasmFunctionData
DECL_PRINTER(WasmFunctionData)
// All fields after those inherited from Foreign are tagged.
static constexpr int kStartOfStrongFieldsOffset = Foreign::kHeaderSize;
TQ_OBJECT_CONSTRUCTORS(WasmFunctionData)
};
......@@ -728,6 +732,8 @@ class WasmExportedFunctionData
class BodyDescriptor;
static constexpr int kEndOfStrongFieldsOffset = kHeaderSize;
TQ_OBJECT_CONSTRUCTORS(WasmExportedFunctionData)
};
......@@ -745,6 +751,8 @@ class WasmJSFunctionData
class BodyDescriptor;
static constexpr int kEndOfStrongFieldsOffset = kHeaderSize;
private:
DECL_ACCESSORS(raw_wasm_to_js_wrapper_code, CodeT)
......@@ -759,6 +767,8 @@ class WasmCapiFunctionData
class BodyDescriptor;
static constexpr int kEndOfStrongFieldsOffset = kHeaderSize;
TQ_OBJECT_CONSTRUCTORS(WasmCapiFunctionData)
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment