Commit 571e35e8 authored by Tobias Tebbi's avatar Tobias Tebbi Committed by Commit Bot

[torque] introduce const slices

With this change, there are const and mutable version of slices, in
analogy to const and mutable references, which we already have.
A const slice as a readonly view into memory, it doesn't mean that
nobody else has a writable view on it.

An array field in a Torque class produces const slices if it is declared
as const.

Due to limitations in the Torque type system, mutable slices are not
a subtype of const slices of the same type.

Bug: v8:7793
Change-Id: I1ba96e1ee82bf03b5fdc824488981f2a6b5eae8a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2560195Reviewed-by: 's avatarSeth Brenith <seth.brenith@microsoft.com>
Reviewed-by: 's avatarNico Hartmann <nicohartmann@chromium.org>
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71481}
parent c48cf114
......@@ -97,13 +97,13 @@ macro NumberToStringSmi(x: int32, radix: int32): String labels Slow {
while (n > 0) {
const digit: int32 = n % radix;
n = n / radix;
strSeq.chars[cursor] = ToCharCode(digit);
*UnsafeConstCast(&strSeq.chars[cursor]) = ToCharCode(digit);
cursor = cursor - 1;
}
if (isNegative) {
assert(cursor == 0);
// Insert '-' to result.
strSeq.chars[0] = 45;
*UnsafeConstCast(&strSeq.chars[0]) = 45;
} else {
assert(cursor == -1);
// In sync with Factory::SmiToString: If radix = 10 and positive number,
......
......@@ -2,6 +2,11 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Unfortunately, MutableSlice<> is currently not a subtype of ConstSlice.
// This would require struct subtyping, which is not yet supported.
type MutableSlice<T: type> extends torque_internal::Slice<T, &T>;
type ConstSlice<T: type> extends torque_internal::Slice<T, const &T>;
namespace torque_internal {
// Unsafe is a marker that we require to be passed when calling internal APIs
// that might lead to unsoundness when used incorrectly. Unsafe markers should
......@@ -40,8 +45,8 @@ macro ReferenceCast<T: type, U: type>(ref:&U):&T {
}
} // namespace unsafe
struct Slice<T: type> {
macro TryAtIndex(index: intptr):&T labels OutOfBounds {
struct Slice<T: type, Reference: type> {
macro TryAtIndex(index: intptr): Reference labels OutOfBounds {
if (Convert<uintptr>(index) < Convert<uintptr>(this.length)) {
return unsafe::NewReference<T>(
this.object, this.offset + index * %SizeOf<T>());
......@@ -50,40 +55,41 @@ struct Slice<T: type> {
}
}
macro AtIndex(index: intptr):&T {
macro AtIndex(index: intptr): Reference {
return this.TryAtIndex(index) otherwise unreachable;
}
macro AtIndex(index: uintptr):&T {
macro AtIndex(index: uintptr): Reference {
return this.TryAtIndex(Convert<intptr>(index)) otherwise unreachable;
}
macro AtIndex(index: constexpr int31):&T {
macro AtIndex(index: constexpr int31): Reference {
const i: intptr = Convert<intptr>(index);
return this.TryAtIndex(i) otherwise unreachable;
}
macro AtIndex(index: Smi):&T {
macro AtIndex(index: Smi): Reference {
const i: intptr = Convert<intptr>(index);
return this.TryAtIndex(i) otherwise unreachable;
}
macro Iterator(): SliceIterator<T> {
macro Iterator(): SliceIterator<T, Reference> {
const end = this.offset + this.length * %SizeOf<T>();
return SliceIterator<T>{
return SliceIterator<T, Reference>{
object: this.object,
start: this.offset,
end: end,
unsafeMarker: Unsafe {}
};
}
macro Iterator(startIndex: intptr, endIndex: intptr): SliceIterator<T> {
macro Iterator(
startIndex: intptr, endIndex: intptr): SliceIterator<T, Reference> {
check(
Convert<uintptr>(endIndex) <= Convert<uintptr>(this.length) &&
Convert<uintptr>(startIndex) <= Convert<uintptr>(endIndex));
const start = this.offset + startIndex * %SizeOf<T>();
const end = this.offset + endIndex * %SizeOf<T>();
return SliceIterator<T>{
return SliceIterator<T, Reference>{
object: this.object,
start,
end,
......@@ -99,29 +105,39 @@ struct Slice<T: type> {
namespace unsafe {
macro NewSlice<T: type>(
object: HeapObject, offset: intptr, length: intptr): Slice<T> {
return Slice<T>{
macro NewMutableSlice<T: type>(
object: HeapObject, offset: intptr, length: intptr): MutableSlice<T> {
return %RawDownCast<MutableSlice<T>>(Slice<T, &T>{
object: object,
offset: offset,
length: length,
unsafeMarker: Unsafe {}
};
});
}
macro NewConstSlice<T: type>(
object: HeapObject, offset: intptr, length: intptr): ConstSlice<T> {
return %RawDownCast<ConstSlice<T>>(Slice<T, const &T>{
object: object,
offset: offset,
length: length,
unsafeMarker: Unsafe {}
});
}
macro NewOffHeapSlice<T: type>(
startPointer: RawPtr<T>, length: intptr): Slice<T> {
return Slice<T>{
macro NewOffHeapConstSlice<T: type>(
startPointer: RawPtr<T>, length: intptr): ConstSlice<T> {
return %RawDownCast<ConstSlice<T>>(Slice<T, const &T>{
object: kZeroBitPattern,
offset: Convert<intptr>(Convert<RawPtr>(startPointer)) + kHeapObjectTag,
length: length,
unsafeMarker: Unsafe {}
};
});
}
} // namespace unsafe
struct SliceIterator<T: type> {
struct SliceIterator<T: type, Reference: type> {
macro Empty(): bool {
return this.start == this.end;
}
......@@ -130,7 +146,7 @@ struct SliceIterator<T: type> {
return *this.NextReference() otherwise NoMore;
}
macro NextReference():&T labels NoMore {
macro NextReference(): Reference labels NoMore {
if (this.Empty()) {
goto NoMore;
} else {
......@@ -194,7 +210,7 @@ macro AllocateFromNew(
}
macro InitializeFieldsFromIterator<T: type, Iterator: type>(
target: Slice<T>, originIterator: Iterator) {
target: MutableSlice<T>, originIterator: Iterator) {
let targetIterator = target.Iterator();
let originIterator = originIterator;
while (true) {
......@@ -204,9 +220,9 @@ macro InitializeFieldsFromIterator<T: type, Iterator: type>(
}
// Dummy implementations: do not initialize for UninitializedIterator.
InitializeFieldsFromIterator<char8, UninitializedIterator>(
_target: Slice<char8>, _originIterator: UninitializedIterator) {}
_target: MutableSlice<char8>, _originIterator: UninitializedIterator) {}
InitializeFieldsFromIterator<char16, UninitializedIterator>(
_target: Slice<char16>, _originIterator: UninitializedIterator) {}
_target: MutableSlice<char16>, _originIterator: UninitializedIterator) {}
extern macro IsDoubleHole(HeapObject, intptr): bool;
extern macro StoreDoubleHole(HeapObject, intptr);
......
......@@ -51,13 +51,13 @@ extern class SeqString extends String {
@generateBodyDescriptor
@doNotGenerateCast
extern class SeqOneByteString extends SeqString {
chars[length]: char8;
const chars[length]: char8;
}
@generateCppClass
@generateBodyDescriptor
@doNotGenerateCast
extern class SeqTwoByteString extends SeqString {
chars[length]: char16;
const chars[length]: char16;
}
@generateCppClass
......
......@@ -69,7 +69,8 @@ static const char* const CONST_FLOAT64_TYPE_STRING = "constexpr float64";
static const char* const TORQUE_INTERNAL_NAMESPACE_STRING = "torque_internal";
static const char* const MUTABLE_REFERENCE_TYPE_STRING = "MutableReference";
static const char* const CONST_REFERENCE_TYPE_STRING = "ConstReference";
static const char* const SLICE_TYPE_STRING = "Slice";
static const char* const MUTABLE_SLICE_TYPE_STRING = "MutableSlice";
static const char* const CONST_SLICE_TYPE_STRING = "ConstSlice";
static const char* const WEAK_TYPE_STRING = "Weak";
static const char* const SMI_TAGGED_TYPE_STRING = "SmiTagged";
static const char* const UNINITIALIZED_ITERATOR_TYPE_STRING =
......
......@@ -1380,7 +1380,8 @@ LocationReference ImplementationVisitor::GenerateFieldReferenceForInit(
VisitResult length =
GenerateCopy(layout.array_lengths.at(field.name_and_type.name));
result_range.Extend(length.stack_range());
const Type* slice_type = TypeOracle::GetSliceType(field.name_and_type.type);
const Type* slice_type =
TypeOracle::GetMutableSliceType(field.name_and_type.type);
return LocationReference::HeapSlice(VisitResult(slice_type, result_range));
} else {
// Const fields are writable during initialization.
......@@ -2235,8 +2236,8 @@ LocationReference ImplementationVisitor::GetLocationReference(
VisitResult index = Visit(expr->index);
if (reference.IsHeapSlice()) {
Arguments arguments{{index}, {}};
const AggregateType* slice_type =
AggregateType::cast(reference.heap_slice().type());
const StructType* slice_type =
*reference.heap_slice().type()->StructSupertype();
Method* method = LookupMethod("AtIndex", slice_type, arguments, {});
// The reference has to be treated like a normal value when calling methods
// on the underlying slice implementation.
......@@ -2953,7 +2954,7 @@ VisitResult ImplementationVisitor::Visit(CallMethodExpression* expr) {
target = LocationReference::Temporary(result, "this parameter");
}
const AggregateType* target_type =
AggregateType::DynamicCast(*target.ReferencedType());
(*target.ReferencedType())->AggregateSupertype().value_or(nullptr);
if (!target_type) {
ReportError("target of method call not a struct or class type");
}
......
......@@ -64,7 +64,9 @@ class LocationReference {
static LocationReference HeapSlice(VisitResult heap_slice) {
LocationReference result;
DCHECK(Type::MatchUnaryGeneric(heap_slice.type(),
TypeOracle::GetSliceGeneric()));
TypeOracle::GetConstSliceGeneric()) ||
Type::MatchUnaryGeneric(heap_slice.type(),
TypeOracle::GetMutableSliceGeneric()));
result.heap_slice_ = std::move(heap_slice);
return result;
}
......@@ -142,8 +144,12 @@ class LocationReference {
return *TypeOracle::MatchReferenceGeneric(heap_reference().type());
}
if (IsHeapSlice()) {
return *Type::MatchUnaryGeneric(heap_slice().type(),
TypeOracle::GetSliceGeneric());
if (auto type = Type::MatchUnaryGeneric(
heap_slice().type(), TypeOracle::GetMutableSliceGeneric())) {
return *type;
}
return Type::MatchUnaryGeneric(heap_slice().type(),
TypeOracle::GetConstSliceGeneric());
}
if (IsBitFieldAccess()) {
return bit_field_->name_and_type.type;
......
......@@ -97,9 +97,13 @@ class TypeOracle : public ContextualClass<TypeOracle> {
static base::Optional<const Type*> MatchReferenceGeneric(
const Type* reference_type, bool* is_const = nullptr);
static GenericType* GetSliceGeneric() {
static GenericType* GetMutableSliceGeneric() {
return Declarations::LookupUniqueGenericType(
QualifiedName({TORQUE_INTERNAL_NAMESPACE_STRING}, SLICE_TYPE_STRING));
QualifiedName(MUTABLE_SLICE_TYPE_STRING));
}
static GenericType* GetConstSliceGeneric() {
return Declarations::LookupUniqueGenericType(
QualifiedName(CONST_SLICE_TYPE_STRING));
}
static GenericType* GetWeakGeneric() {
......@@ -122,8 +126,11 @@ class TypeOracle : public ContextualClass<TypeOracle> {
return GetReferenceType(referenced_type, false);
}
static const Type* GetSliceType(const Type* referenced_type) {
return GetGenericTypeInstance(GetSliceGeneric(), {referenced_type});
static const Type* GetMutableSliceType(const Type* referenced_type) {
return GetGenericTypeInstance(GetMutableSliceGeneric(), {referenced_type});
}
static const Type* GetConstSliceType(const Type* referenced_type) {
return GetGenericTypeInstance(GetConstSliceGeneric(), {referenced_type});
}
static const std::vector<const BuiltinPointerType*>&
......
......@@ -128,6 +128,15 @@ base::Optional<const StructType*> Type::StructSupertype() const {
return base::nullopt;
}
base::Optional<const AggregateType*> Type::AggregateSupertype() const {
for (const Type* t = this; t != nullptr; t = t->parent()) {
if (auto* aggregate_type = AggregateType::DynamicCast(t)) {
return aggregate_type;
}
}
return base::nullopt;
}
// static
const Type* Type::CommonSupertype(const Type* a, const Type* b) {
int diff = a->Depth() - b->Depth();
......@@ -775,26 +784,24 @@ void ClassType::GenerateSliceAccessor(size_t field_index) {
//
// If the field has a known offset (in this example, 16):
// FieldSliceClassNameFieldName(o: ClassName) {
// return torque_internal::Slice<FieldType> {
// return torque_internal::unsafe::New{Const,Mutable}Slice<FieldType>(
// object: o,
// offset: 16,
// length: torque_internal::%IndexedFieldLength<ClassName>(
// o, "field_name")),
// unsafeMarker: torque_internal::Unsafe {}
// };
// o, "field_name")
// );
// }
//
// If the field has an unknown offset, and the previous field is named p, and
// an item in the previous field has size 4:
// FieldSliceClassNameFieldName(o: ClassName) {
// const previous = &o.p;
// return torque_internal::Slice<FieldType> {
// return torque_internal::unsafe::New{Const,Mutable}Slice<FieldType>(
// object: o,
// offset: previous.offset + 4 * previous.length,
// length: torque_internal::%IndexedFieldLength<ClassName>(
// o, "field_name")),
// unsafeMarker: torque_internal::Unsafe {}
// };
// o, "field_name")
// );
// }
const Field& field = fields_[field_index];
std::string macro_name = GetSliceMacroName(field);
......@@ -803,7 +810,10 @@ void ClassType::GenerateSliceAccessor(size_t field_index) {
signature.parameter_names.push_back(parameter_identifier);
signature.parameter_types.types.push_back(this);
signature.parameter_types.var_args = false;
signature.return_type = TypeOracle::GetSliceType(field.name_and_type.type);
signature.return_type =
field.const_qualified
? TypeOracle::GetConstSliceType(field.name_and_type.type)
: TypeOracle::GetMutableSliceType(field.name_and_type.type);
std::vector<Statement*> statements;
Expression* offset_expression = nullptr;
......@@ -863,25 +873,18 @@ void ClassType::GenerateSliceAccessor(size_t field_index) {
{parameter, MakeNode<StringLiteralExpression>(
StringLiteralQuote(field.name_and_type.name))});
// torque_internal::Unsafe {}
Expression* unsafe_expression = MakeStructExpression(
MakeBasicTypeExpression({"torque_internal"}, "Unsafe"), {});
// torque_internal::Slice<FieldType> {
// torque_internal::unsafe::New{Const,Mutable}Slice<FieldType>(
// object: o,
// offset: <<offset_expression>>,
// length: torque_internal::%IndexedFieldLength<ClassName>(
// o, "field_name")),
// unsafeMarker: torque_internal::Unsafe {}
// }
Expression* slice_expression = MakeStructExpression(
MakeBasicTypeExpression(
{"torque_internal"}, "Slice",
{MakeNode<PrecomputedTypeExpression>(field.name_and_type.type)}),
{{MakeNode<Identifier>("object"), parameter},
{MakeNode<Identifier>("offset"), offset_expression},
{MakeNode<Identifier>("length"), length_expression},
{MakeNode<Identifier>("unsafeMarker"), unsafe_expression}});
// o, "field_name")
// )
IdentifierExpression* new_struct = MakeIdentifierExpression(
{"torque_internal", "unsafe"},
field.const_qualified ? "NewConstSlice" : "NewMutableSlice",
{MakeNode<PrecomputedTypeExpression>(field.name_and_type.type)});
Expression* slice_expression = MakeCallExpression(
new_struct, {parameter, offset_expression, length_expression});
statements.push_back(MakeNode<ReturnStatement>(slice_expression));
Statement* block =
......
......@@ -139,6 +139,7 @@ class V8_EXPORT_PRIVATE Type : public TypeBase {
std::string GetConstexprGeneratedTypeName() const;
base::Optional<const ClassType*> ClassSupertype() const;
base::Optional<const StructType*> StructSupertype() const;
base::Optional<const AggregateType*> AggregateSupertype() const;
virtual std::vector<TypeChecker> GetTypeCheckers() const { return {}; }
virtual std::string GetRuntimeType() const;
static const Type* CommonSupertype(const Type* a, const Type* b);
......
......@@ -860,7 +860,7 @@ macro TestSlices() {
const firstRef:&Object = &a.objects[0];
check(TaggedEqual(*firstRef, oneTwoThree));
const slice: torque_internal::Slice<Object> = &a.objects;
const slice: MutableSlice<Object> = &a.objects;
const firstRefAgain:&Object = slice.TryAtIndex(0) otherwise unreachable;
check(TaggedEqual(*firstRefAgain, oneTwoThree));
......@@ -873,7 +873,7 @@ macro TestSlices() {
// a.objects = slice; // error, not allowed
// TODO(gsps): Currently errors, but should be allowed:
// const _sameSlice: torque_internal::Slice<Object> = &(*slice);
// const _sameSlice: MutableSlice<Object> = &(*slice);
// (*slice)[0] : Smi
}
......@@ -1347,7 +1347,7 @@ macro TestOffHeapSlice(ptr: RawPtr<char8>, length: intptr) {
check(*torque_internal::unsafe::NewOffHeapReference(ptr) == string.chars[0]);
let offHeapSlice = torque_internal::unsafe::NewOffHeapSlice(ptr, length);
let offHeapSlice = torque_internal::unsafe::NewOffHeapConstSlice(ptr, length);
let onHeapSlice = &string.chars;
for (let i: intptr = 0; i < onHeapSlice.length; ++i) {
check(*onHeapSlice.AtIndex(i) == *offHeapSlice.AtIndex(i));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment