Commit cbf01502 authored by Georg Schmid's avatar Georg Schmid Committed by Commit Bot

[torque] Add HeapSlice location references to Torque

This CL adds a LocationReference specifically for slices to Torque. This allows us to safely reference arrays in objects and pass around such references. For an array of T-typed elements, referencing yields a Slice<T>. In addition, the traditional element access syntax ('o.array[i]') now internally produces a slice, indexes it at 'i' and dereferences the resulting HeapReference.

Bug: v8:7793
Change-Id: I4af58e4d2feac547c55a1f6f9350a6c510383df2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1771782
Commit-Queue: Georg Schmid <gsps@google.com>
Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63479}
parent f4a3028b
...@@ -1140,6 +1140,7 @@ extern class WasmExceptionTag extends Struct { ...@@ -1140,6 +1140,7 @@ extern class WasmExceptionTag extends Struct {
} }
const kTaggedSize: constexpr int31 generates 'kTaggedSize'; const kTaggedSize: constexpr int31 generates 'kTaggedSize';
const kDoubleSize: constexpr int31 generates 'kDoubleSize';
const kSmiTagSize: constexpr int31 generates 'kSmiTagSize'; const kSmiTagSize: constexpr int31 generates 'kSmiTagSize';
const V8_INFINITY: constexpr float64 generates 'V8_INFINITY'; const V8_INFINITY: constexpr float64 generates 'V8_INFINITY';
...@@ -2539,6 +2540,7 @@ extern macro IntPtrConstant(constexpr ContextSlot): ContextSlot; ...@@ -2539,6 +2540,7 @@ extern macro IntPtrConstant(constexpr ContextSlot): ContextSlot;
extern macro IntPtrConstant(constexpr intptr): intptr; extern macro IntPtrConstant(constexpr intptr): intptr;
extern macro PointerConstant(constexpr RawPtr): RawPtr; extern macro PointerConstant(constexpr RawPtr): RawPtr;
extern macro SingleCharacterStringConstant(constexpr string): String; extern macro SingleCharacterStringConstant(constexpr string): String;
extern macro Float64SilenceNaN(float64): float64;
extern macro BitcastWordToTaggedSigned(intptr): Smi; extern macro BitcastWordToTaggedSigned(intptr): Smi;
extern macro BitcastWordToTaggedSigned(uintptr): Smi; extern macro BitcastWordToTaggedSigned(uintptr): Smi;
......
...@@ -6,13 +6,16 @@ namespace torque_internal { ...@@ -6,13 +6,16 @@ namespace torque_internal {
// TODO(gsps): Synthesize SizeOf<T> in the compiler // TODO(gsps): Synthesize SizeOf<T> in the compiler
macro SizeOf<T: type>(): constexpr int31; macro SizeOf<T: type>(): constexpr int31;
SizeOf<Smi>(): constexpr int31 { SizeOf<Object>(): constexpr int31 {
return kTaggedSize; return kTaggedSize;
} }
SizeOf<float64>(): constexpr int31 {
return kDoubleSize;
}
// Unsafe is a marker that we require to be passed when calling internal APIs // Unsafe is a marker that we require to be passed when calling internal APIs
// // that might lead to unsoundness when used incorrectly. Unsafe markers // that might lead to unsoundness when used incorrectly. Unsafe markers should
// should therefore not be instantiated anywhere outside of this namespace. // therefore not be instantiated anywhere outside of this namespace.
struct Unsafe {} struct Unsafe {}
struct Reference<T: type> { struct Reference<T: type> {
...@@ -30,7 +33,7 @@ namespace torque_internal { ...@@ -30,7 +33,7 @@ namespace torque_internal {
} }
struct Slice<T: type> { struct Slice<T: type> {
Access(index: intptr):&T labels OutOfBounds { TryAtIndex(index: intptr):&T labels OutOfBounds {
if (Convert<uintptr>(index) < Convert<uintptr>(this.length)) { if (Convert<uintptr>(index) < Convert<uintptr>(this.length)) {
return UnsafeNewReference<T>( return UnsafeNewReference<T>(
this.object, this.offset + index * SizeOf<T>()); this.object, this.offset + index * SizeOf<T>());
...@@ -39,6 +42,20 @@ namespace torque_internal { ...@@ -39,6 +42,20 @@ namespace torque_internal {
} }
} }
AtIndex(index: intptr):&T {
return this.TryAtIndex(index) otherwise unreachable;
}
AtIndex(index: constexpr int31):&T {
const i: intptr = Convert<intptr>(index);
return this.TryAtIndex(i) otherwise unreachable;
}
AtIndex(index: Smi):&T {
const i: intptr = Convert<intptr>(index);
return this.TryAtIndex(i) otherwise unreachable;
}
Iterator(): SliceIterator<T> { Iterator(): SliceIterator<T> {
const end = this.offset + this.length * SizeOf<T>(); const end = this.offset + this.length * SizeOf<T>();
return SliceIterator<T>{ return SliceIterator<T>{
......
...@@ -247,6 +247,9 @@ class FixedDoubleArray : public FixedArrayBase { ...@@ -247,6 +247,9 @@ class FixedDoubleArray : public FixedArrayBase {
DECL_CAST(FixedDoubleArray) DECL_CAST(FixedDoubleArray)
// Start offset of elements.
static constexpr int kFloatsOffset = kHeaderSize;
// Maximally allowed length of a FixedDoubleArray. // Maximally allowed length of a FixedDoubleArray.
static const int kMaxLength = (kMaxSize - kHeaderSize) / kDoubleSize; static const int kMaxLength = (kMaxSize - kHeaderSize) / kDoubleSize;
static_assert(Internals::IsValidSmi(kMaxLength), static_assert(Internals::IsValidSmi(kMaxLength),
......
...@@ -52,6 +52,7 @@ static const char* const CONST_INT32_TYPE_STRING = "constexpr int32"; ...@@ -52,6 +52,7 @@ static const char* const CONST_INT32_TYPE_STRING = "constexpr int32";
static const char* const CONST_FLOAT64_TYPE_STRING = "constexpr float64"; static const char* const CONST_FLOAT64_TYPE_STRING = "constexpr float64";
static const char* const TORQUE_INTERNAL_NAMESPACE_STRING = "torque_internal"; static const char* const TORQUE_INTERNAL_NAMESPACE_STRING = "torque_internal";
static const char* const REFERENCE_TYPE_STRING = "Reference"; static const char* const REFERENCE_TYPE_STRING = "Reference";
static const char* const SLICE_TYPE_STRING = "Slice";
static const char* const STRUCT_NAMESPACE_STRING = "_struct"; static const char* const STRUCT_NAMESPACE_STRING = "_struct";
inline bool IsConstexprName(const std::string& name) { inline bool IsConstexprName(const std::string& name) {
......
...@@ -708,8 +708,13 @@ void CSAGenerator::EmitInstruction(const UnsafeCastInstruction& instruction, ...@@ -708,8 +708,13 @@ void CSAGenerator::EmitInstruction(const UnsafeCastInstruction& instruction,
void CSAGenerator::EmitInstruction( void CSAGenerator::EmitInstruction(
const CreateFieldReferenceInstruction& instruction, const CreateFieldReferenceInstruction& instruction,
Stack<std::string>* stack) { Stack<std::string>* stack) {
const Field& field = base::Optional<const ClassType*> class_type =
instruction.class_type->LookupField(instruction.field_name); instruction.type->ClassSupertype();
if (!class_type.has_value()) {
ReportError("Cannot create field reference of type ", instruction.type,
" which does not inherit from a class type");
}
const Field& field = class_type.value()->LookupField(instruction.field_name);
std::string offset_name = FreshNodeName(); std::string offset_name = FreshNodeName();
stack->Push(offset_name); stack->Push(offset_name);
......
...@@ -1727,14 +1727,13 @@ Callable* ImplementationVisitor::LookupCallable( ...@@ -1727,14 +1727,13 @@ Callable* ImplementationVisitor::LookupCallable(
} }
Method* ImplementationVisitor::LookupMethod( Method* ImplementationVisitor::LookupMethod(
const std::string& name, LocationReference this_reference, const std::string& name, const AggregateType* receiver_type,
const Arguments& arguments, const TypeVector& specialization_types) { const Arguments& arguments, const TypeVector& specialization_types) {
TypeVector types(arguments.parameters.ComputeTypeVector()); TypeVector types(arguments.parameters.ComputeTypeVector());
types.insert(types.begin(), this_reference.ReferencedType()); types.insert(types.begin(), receiver_type);
return Method::cast(LookupCallable( return Method::cast(LookupCallable({{}, name}, receiver_type->Methods(name),
{{}, name}, types, arguments.labels,
AggregateType::cast(this_reference.ReferencedType())->Methods(name), specialization_types));
types, arguments.labels, specialization_types));
} }
const Type* ImplementationVisitor::GetCommonType(const Type* left, const Type* ImplementationVisitor::GetCommonType(const Type* left,
...@@ -1855,7 +1854,33 @@ LocationReference ImplementationVisitor::GetLocationReference( ...@@ -1855,7 +1854,33 @@ LocationReference ImplementationVisitor::GetLocationReference(
LanguageServerData::AddDefinition(expr->field->pos, field.pos); LanguageServerData::AddDefinition(expr->field->pos, field.pos);
} }
if (field.index) { if (field.index) {
return LocationReference::IndexedFieldAccess(object_result, fieldname); assembler().Emit(
CreateFieldReferenceInstruction{object_result.type(), fieldname});
// Fetch the length from the object
{
StackScope length_scope(this);
// Get a reference to the length
const Field* index_field = field.index.value();
GenerateCopy(object_result);
assembler().Emit(CreateFieldReferenceInstruction{
object_result.type(), index_field->name_and_type.name});
VisitResult length_reference(
TypeOracle::GetReferenceType(index_field->name_and_type.type),
assembler().TopRange(2));
// Load the length from the reference and convert it to intptr
VisitResult length = GenerateFetchFromLocation(
LocationReference::HeapReference(length_reference));
VisitResult converted_length =
GenerateCall("Convert", {{length}, {}},
{TypeOracle::GetIntPtrType(), length.type()}, false);
DCHECK_EQ(converted_length.stack_range().Size(), 1);
length_scope.Yield(converted_length);
}
const Type* slice_type =
TypeOracle::GetSliceType(field.name_and_type.type);
return LocationReference::HeapSlice(
VisitResult(slice_type, assembler().TopRange(3)));
} else { } else {
assembler().Emit( assembler().Emit(
CreateFieldReferenceInstruction{*class_type, fieldname}); CreateFieldReferenceInstruction{*class_type, fieldname});
...@@ -1873,8 +1898,13 @@ LocationReference ImplementationVisitor::GetLocationReference( ...@@ -1873,8 +1898,13 @@ LocationReference ImplementationVisitor::GetLocationReference(
ElementAccessExpression* expr) { ElementAccessExpression* expr) {
LocationReference reference = GetLocationReference(expr->array); LocationReference reference = GetLocationReference(expr->array);
VisitResult index = Visit(expr->index); VisitResult index = Visit(expr->index);
if (reference.IsIndexedFieldAccess()) { if (reference.IsHeapSlice()) {
return LocationReference::IndexedFieldIndexedAccess(reference, index); Arguments arguments{{index}, {}};
const AggregateType* slice_type =
AggregateType::cast(reference.heap_slice().type());
Method* method = LookupMethod("AtIndex", slice_type, arguments, {});
return LocationReference::HeapReference(
GenerateCall(method, reference, arguments, {}, false));
} else { } else {
return LocationReference::ArrayAccess(GenerateFetchFromLocation(reference), return LocationReference::ArrayAccess(GenerateFetchFromLocation(reference),
index); index);
...@@ -1974,7 +2004,7 @@ VisitResult ImplementationVisitor::GenerateFetchFromLocation( ...@@ -1974,7 +2004,7 @@ VisitResult ImplementationVisitor::GenerateFetchFromLocation(
DCHECK_EQ(1, LoweredSlotCount(reference.ReferencedType())); DCHECK_EQ(1, LoweredSlotCount(reference.ReferencedType()));
return VisitResult(reference.ReferencedType(), assembler().TopRange(1)); return VisitResult(reference.ReferencedType(), assembler().TopRange(1));
} else { } else {
if (reference.IsIndexedFieldAccess()) { if (reference.IsHeapSlice()) {
ReportError( ReportError(
"fetching a value directly from an indexed field isn't allowed"); "fetching a value directly from an indexed field isn't allowed");
} }
...@@ -2002,12 +2032,19 @@ void ImplementationVisitor::GenerateAssignToLocation( ...@@ -2002,12 +2032,19 @@ void ImplementationVisitor::GenerateAssignToLocation(
if (reference.binding()) { if (reference.binding()) {
(*reference.binding())->SetWritten(); (*reference.binding())->SetWritten();
} }
} else if (reference.IsIndexedFieldAccess()) { } else if (reference.IsHeapSlice()) {
ReportError("assigning a value directly to an indexed field isn't allowed"); ReportError("assigning a value directly to an indexed field isn't allowed");
} else if (reference.IsHeapReference()) { } else if (reference.IsHeapReference()) {
const Type* referenced_type = reference.ReferencedType(); const Type* referenced_type = reference.ReferencedType();
GenerateCopy(reference.heap_reference()); GenerateCopy(reference.heap_reference());
GenerateImplicitConvert(referenced_type, assignment_value); VisitResult converted_assignment_value =
GenerateImplicitConvert(referenced_type, assignment_value);
if (referenced_type == TypeOracle::GetFloat64Type()) {
VisitResult silenced_float_value =
GenerateCall("Float64SilenceNaN", {{assignment_value}, {}});
assembler().Poke(converted_assignment_value.stack_range(),
silenced_float_value.stack_range(), referenced_type);
}
assembler().Emit(StoreReferenceInstruction{referenced_type}); assembler().Emit(StoreReferenceInstruction{referenced_type});
} else { } else {
DCHECK(reference.IsTemporary()); DCHECK(reference.IsTemporary());
...@@ -2126,8 +2163,8 @@ VisitResult ImplementationVisitor::GenerateCall( ...@@ -2126,8 +2163,8 @@ VisitResult ImplementationVisitor::GenerateCall(
if (this_reference) { if (this_reference) {
DCHECK(callable->IsMethod()); DCHECK(callable->IsMethod());
Method* method = Method::cast(callable); Method* method = Method::cast(callable);
// By now, the this reference should either be a variable or // By now, the this reference should either be a variable, a temporary or
// a temporary, in both cases the fetch of the VisitResult should succeed. // a Slice. In either case the fetch of the VisitResult should succeed.
VisitResult this_value = this_reference->GetVisitResult(); VisitResult this_value = this_reference->GetVisitResult();
if (method->ShouldBeInlined()) { if (method->ShouldBeInlined()) {
if (!this_value.type()->IsSubtypeOf(method->aggregate_type())) { if (!this_value.type()->IsSubtypeOf(method->aggregate_type())) {
...@@ -2345,6 +2382,7 @@ VisitResult ImplementationVisitor::Visit(CallExpression* expr, ...@@ -2345,6 +2382,7 @@ VisitResult ImplementationVisitor::Visit(CallExpression* expr,
if (auto* loc_expr = LocationExpression::DynamicCast(expr->arguments[0])) { if (auto* loc_expr = LocationExpression::DynamicCast(expr->arguments[0])) {
LocationReference ref = GetLocationReference(loc_expr); LocationReference ref = GetLocationReference(loc_expr);
if (ref.IsHeapReference()) return scope.Yield(ref.heap_reference()); if (ref.IsHeapReference()) return scope.Yield(ref.heap_reference());
if (ref.IsHeapSlice()) return scope.Yield(ref.heap_slice());
} }
ReportError("Unable to create a heap reference."); ReportError("Unable to create a heap reference.");
} }
...@@ -2398,7 +2436,7 @@ VisitResult ImplementationVisitor::Visit(CallMethodExpression* expr) { ...@@ -2398,7 +2436,7 @@ VisitResult ImplementationVisitor::Visit(CallMethodExpression* expr) {
DCHECK_EQ(expr->method->namespace_qualification.size(), 0); DCHECK_EQ(expr->method->namespace_qualification.size(), 0);
QualifiedName qualified_name = QualifiedName(method_name); QualifiedName qualified_name = QualifiedName(method_name);
Callable* callable = nullptr; Callable* callable = nullptr;
callable = LookupMethod(method_name, target, arguments, {}); callable = LookupMethod(method_name, target_type, arguments, {});
if (GlobalContext::collect_language_server_data()) { if (GlobalContext::collect_language_server_data()) {
LanguageServerData::AddDefinition(expr->method->name->pos, LanguageServerData::AddDefinition(expr->method->name->pos,
callable->IdentifierPosition()); callable->IdentifierPosition());
...@@ -3665,6 +3703,16 @@ void ReportAllUnusedMacros() { ...@@ -3665,6 +3703,16 @@ void ReportAllUnusedMacros() {
if (macro->IsTorqueMacro() && TorqueMacro::cast(macro)->IsExportedToCSA()) { if (macro->IsTorqueMacro() && TorqueMacro::cast(macro)->IsExportedToCSA()) {
continue; continue;
} }
// TODO(gsps): Mark methods of generic structs used if they are used in any
// instantiation
if (Method* method = Method::DynamicCast(macro)) {
if (StructType* struct_type =
StructType::DynamicCast(method->aggregate_type())) {
if (struct_type->GetSpecializedFrom().has_value()) {
continue;
}
}
}
std::vector<std::string> ignored_prefixes = {"Convert<", "Cast<", std::vector<std::string> ignored_prefixes = {"Convert<", "Cast<",
"FromConstexpr<"}; "FromConstexpr<"};
......
...@@ -58,6 +58,15 @@ class LocationReference { ...@@ -58,6 +58,15 @@ class LocationReference {
result.heap_reference_ = std::move(heap_reference); result.heap_reference_ = std::move(heap_reference);
return result; return result;
} }
// A reference to an array on the heap. That is, a tagged value, an offset to
// encode an inner pointer, and the number of elements.
static LocationReference HeapSlice(VisitResult heap_slice) {
LocationReference result;
DCHECK(StructType::MatchUnaryGeneric(heap_slice.type(),
TypeOracle::GetSliceGeneric()));
result.heap_slice_ = std::move(heap_slice);
return result;
}
static LocationReference ArrayAccess(VisitResult base, VisitResult offset) { static LocationReference ArrayAccess(VisitResult base, VisitResult offset) {
LocationReference result; LocationReference result;
result.eval_function_ = std::string{"[]"}; result.eval_function_ = std::string{"[]"};
...@@ -71,26 +80,6 @@ class LocationReference { ...@@ -71,26 +80,6 @@ class LocationReference {
result.eval_function_ = "." + fieldname; result.eval_function_ = "." + fieldname;
result.assign_function_ = "." + fieldname + "="; result.assign_function_ = "." + fieldname + "=";
result.call_arguments_ = {object}; result.call_arguments_ = {object};
result.index_field_ = base::nullopt;
return result;
}
static LocationReference IndexedFieldIndexedAccess(
const LocationReference& indexed_field, VisitResult index) {
LocationReference result;
DCHECK(indexed_field.IsIndexedFieldAccess());
std::string fieldname = *indexed_field.index_field_;
result.eval_function_ = "." + fieldname + "[]";
result.assign_function_ = "." + fieldname + "[]=";
result.call_arguments_ = indexed_field.call_arguments_;
result.call_arguments_.push_back(index);
result.index_field_ = fieldname;
return result;
}
static LocationReference IndexedFieldAccess(VisitResult object,
std::string fieldname) {
LocationReference result;
result.call_arguments_ = {object};
result.index_field_ = fieldname;
return result; return result;
} }
...@@ -111,17 +100,26 @@ class LocationReference { ...@@ -111,17 +100,26 @@ class LocationReference {
DCHECK(IsHeapReference()); DCHECK(IsHeapReference());
return *heap_reference_; return *heap_reference_;
} }
bool IsHeapSlice() const { return heap_slice_.has_value(); }
const VisitResult& heap_slice() const {
DCHECK(IsHeapSlice());
return *heap_slice_;
}
const Type* ReferencedType() const { const Type* ReferencedType() const {
if (IsHeapReference()) { if (IsHeapReference()) {
return *StructType::MatchUnaryGeneric(heap_reference().type(), return *StructType::MatchUnaryGeneric(heap_reference().type(),
TypeOracle::GetReferenceGeneric()); TypeOracle::GetReferenceGeneric());
} else if (IsHeapSlice()) {
return *StructType::MatchUnaryGeneric(heap_slice().type(),
TypeOracle::GetSliceGeneric());
} }
return GetVisitResult().type(); return GetVisitResult().type();
} }
const VisitResult& GetVisitResult() const { const VisitResult& GetVisitResult() const {
if (IsVariableAccess()) return variable(); if (IsVariableAccess()) return variable();
if (IsHeapSlice()) return heap_slice();
DCHECK(IsTemporary()); DCHECK(IsTemporary());
return temporary(); return temporary();
} }
...@@ -132,13 +130,6 @@ class LocationReference { ...@@ -132,13 +130,6 @@ class LocationReference {
return *temporary_description_; return *temporary_description_;
} }
bool IsArrayField() const { return index_field_.has_value(); }
bool IsIndexedFieldAccess() const {
return IsArrayField() && !IsCallAccess();
}
bool IsIndexedFieldIndexedAccess() const {
return IsArrayField() && IsCallAccess();
}
bool IsCallAccess() const { bool IsCallAccess() const {
bool is_call_access = eval_function_.has_value(); bool is_call_access = eval_function_.has_value();
DCHECK_EQ(is_call_access, assign_function_.has_value()); DCHECK_EQ(is_call_access, assign_function_.has_value());
...@@ -166,10 +157,10 @@ class LocationReference { ...@@ -166,10 +157,10 @@ class LocationReference {
base::Optional<VisitResult> temporary_; base::Optional<VisitResult> temporary_;
base::Optional<std::string> temporary_description_; base::Optional<std::string> temporary_description_;
base::Optional<VisitResult> heap_reference_; base::Optional<VisitResult> heap_reference_;
base::Optional<VisitResult> heap_slice_;
base::Optional<std::string> eval_function_; base::Optional<std::string> eval_function_;
base::Optional<std::string> assign_function_; base::Optional<std::string> assign_function_;
VisitResultVector call_arguments_; VisitResultVector call_arguments_;
base::Optional<std::string> index_field_;
base::Optional<Binding<LocalValue>*> binding_; base::Optional<Binding<LocalValue>*> binding_;
LocationReference() = default; LocationReference() = default;
...@@ -573,7 +564,8 @@ class ImplementationVisitor { ...@@ -573,7 +564,8 @@ class ImplementationVisitor {
const Arguments& arguments, const Arguments& arguments,
const TypeVector& specialization_types); const TypeVector& specialization_types);
Method* LookupMethod(const std::string& name, LocationReference target, Method* LookupMethod(const std::string& name,
const AggregateType* receiver_type,
const Arguments& arguments, const Arguments& arguments,
const TypeVector& specialization_types); const TypeVector& specialization_types);
......
...@@ -292,26 +292,23 @@ void UnsafeCastInstruction::TypeInstruction(Stack<const Type*>* stack, ...@@ -292,26 +292,23 @@ void UnsafeCastInstruction::TypeInstruction(Stack<const Type*>* stack,
void CreateFieldReferenceInstruction::TypeInstruction( void CreateFieldReferenceInstruction::TypeInstruction(
Stack<const Type*>* stack, ControlFlowGraph* cfg) const { Stack<const Type*>* stack, ControlFlowGraph* cfg) const {
ExpectSubtype(stack->Pop(), class_type); ExpectSubtype(stack->Top(), type);
stack->Push(TypeOracle::GetHeapObjectType());
stack->Push(TypeOracle::GetIntPtrType()); stack->Push(TypeOracle::GetIntPtrType());
} }
// TODO(gsps): Remove in favor of a method on Reference<T>
void LoadReferenceInstruction::TypeInstruction(Stack<const Type*>* stack, void LoadReferenceInstruction::TypeInstruction(Stack<const Type*>* stack,
ControlFlowGraph* cfg) const { ControlFlowGraph* cfg) const {
ExpectType(TypeOracle::GetIntPtrType(), stack->Pop()); ExpectType(TypeOracle::GetIntPtrType(), stack->Pop());
ExpectType(TypeOracle::GetHeapObjectType(), stack->Pop()); ExpectSubtype(stack->Pop(), TypeOracle::GetHeapObjectType());
DCHECK_EQ(std::vector<const Type*>{type}, LowerType(type)); DCHECK_EQ(std::vector<const Type*>{type}, LowerType(type));
stack->Push(type); stack->Push(type);
} }
// TODO(gsps): Remove in favor of a method on Reference<T>
void StoreReferenceInstruction::TypeInstruction(Stack<const Type*>* stack, void StoreReferenceInstruction::TypeInstruction(Stack<const Type*>* stack,
ControlFlowGraph* cfg) const { ControlFlowGraph* cfg) const {
ExpectSubtype(stack->Pop(), type); ExpectSubtype(stack->Pop(), type);
ExpectType(TypeOracle::GetIntPtrType(), stack->Pop()); ExpectType(TypeOracle::GetIntPtrType(), stack->Pop());
ExpectType(TypeOracle::GetHeapObjectType(), stack->Pop()); ExpectSubtype(stack->Pop(), TypeOracle::GetHeapObjectType());
} }
bool CallRuntimeInstruction::IsBlockTerminator() const { bool CallRuntimeInstruction::IsBlockTerminator() const {
......
...@@ -206,10 +206,9 @@ struct NamespaceConstantInstruction : InstructionBase { ...@@ -206,10 +206,9 @@ struct NamespaceConstantInstruction : InstructionBase {
struct CreateFieldReferenceInstruction : InstructionBase { struct CreateFieldReferenceInstruction : InstructionBase {
TORQUE_INSTRUCTION_BOILERPLATE() TORQUE_INSTRUCTION_BOILERPLATE()
CreateFieldReferenceInstruction(const ClassType* class_type, CreateFieldReferenceInstruction(const Type* type, std::string field_name)
std::string field_name) : type(type), field_name(std::move(field_name)) {}
: class_type(class_type), field_name(std::move(field_name)) {} const Type* type;
const ClassType* class_type;
std::string field_name; std::string field_name;
}; };
......
...@@ -72,11 +72,20 @@ class TypeOracle : public ContextualClass<TypeOracle> { ...@@ -72,11 +72,20 @@ class TypeOracle : public ContextualClass<TypeOracle> {
{TORQUE_INTERNAL_NAMESPACE_STRING}, REFERENCE_TYPE_STRING)); {TORQUE_INTERNAL_NAMESPACE_STRING}, REFERENCE_TYPE_STRING));
} }
static GenericStructType* GetSliceGeneric() {
return Declarations::LookupUniqueGenericStructType(
QualifiedName({TORQUE_INTERNAL_NAMESPACE_STRING}, SLICE_TYPE_STRING));
}
static const StructType* GetReferenceType(const Type* referenced_type) { static const StructType* GetReferenceType(const Type* referenced_type) {
return GetGenericStructTypeInstance(GetReferenceGeneric(), return GetGenericStructTypeInstance(GetReferenceGeneric(),
{referenced_type}); {referenced_type});
} }
static const StructType* GetSliceType(const Type* referenced_type) {
return GetGenericStructTypeInstance(GetSliceGeneric(), {referenced_type});
}
static const std::vector<const BuiltinPointerType*>& static const std::vector<const BuiltinPointerType*>&
AllBuiltinPointerTypes() { AllBuiltinPointerTypes() {
return Get().all_builtin_pointer_types_; return Get().all_builtin_pointer_types_;
......
...@@ -263,7 +263,7 @@ const Field& AggregateType::LookupFieldInternal(const std::string& name) const { ...@@ -263,7 +263,7 @@ const Field& AggregateType::LookupFieldInternal(const std::string& name) const {
return parent_class->LookupField(name); return parent_class->LookupField(name);
} }
} }
ReportError("no field ", name, " found"); ReportError("no field ", name, " found in ", this->ToString());
} }
const Field& AggregateType::LookupField(const std::string& name) const { const Field& AggregateType::LookupField(const std::string& name) const {
......
...@@ -292,7 +292,7 @@ T* CheckNotNull(T* x) { ...@@ -292,7 +292,7 @@ T* CheckNotNull(T* x) {
} }
template <class T> template <class T>
inline std::ostream& operator<<(std::ostream& os, Stack<T>& t) { inline std::ostream& operator<<(std::ostream& os, const Stack<T>& t) {
os << "Stack{"; os << "Stack{";
PrintCommaSeparatedList(os, t); PrintCommaSeparatedList(os, t);
os << "}"; os << "}";
......
...@@ -498,6 +498,22 @@ TEST(TestSlices) { ...@@ -498,6 +498,22 @@ TEST(TestSlices) {
ft.Call(); ft.Call();
} }
TEST(TestSliceEnumeration) {
CcTest::InitializeVM();
Isolate* isolate(CcTest::i_isolate());
i::HandleScope scope(isolate);
Handle<Context> context =
Utils::OpenHandle(*v8::Isolate::GetCurrent()->GetCurrentContext());
CodeAssemblerTester asm_tester(isolate);
TestTorqueAssembler m(asm_tester.state());
{
m.TestSliceEnumeration(m.UncheckedCast<Context>(m.HeapConstant(context)));
m.Return(m.UndefinedConstant());
}
FunctionTester ft(asm_tester.GenerateCode(), 0);
ft.Call();
}
TEST(TestStaticAssert) { TEST(TestStaticAssert) {
CcTest::InitializeVM(); CcTest::InitializeVM();
Isolate* isolate(CcTest::i_isolate()); Isolate* isolate(CcTest::i_isolate());
......
...@@ -843,27 +843,60 @@ namespace test { ...@@ -843,27 +843,60 @@ namespace test {
@export @export
macro TestSlices() { macro TestSlices() {
const it = TestIterator{count: 3};
const a = new FixedArray{map: kFixedArrayMap, length: 3, objects: ...it};
check(a.length == 3);
const oneTwoThree = Convert<Smi>(123);
a.objects[0] = oneTwoThree;
const firstRef:&Object = & a.objects[0];
check(TaggedEqual(* firstRef, oneTwoThree));
const slice: torque_internal::Slice<Object> = & a.objects;
const firstRefAgain:&Object = slice.TryAtIndex(0) otherwise unreachable;
check(TaggedEqual(* firstRefAgain, oneTwoThree));
const threeTwoOne = Convert<Smi>(321);
* firstRefAgain = threeTwoOne;
check(TaggedEqual(a.objects[0], threeTwoOne));
// *slice; // error, not allowed
// a.objects; // error, not allowed
// a.objects = slice; // error, not allowed
// TODO(gsps): Currently errors, but should be allowed:
// const _sameSlice: torque_internal::Slice<Object> = &(*slice);
// (*slice)[0] : Smi
}
@export
macro TestSliceEnumeration(implicit context: Context)(): Undefined {
const fixedArray: FixedArray = AllocateZeroedFixedArray(3); const fixedArray: FixedArray = AllocateZeroedFixedArray(3);
// TODO(gsps): Directly reference fixedArray.objects once supported for (let i: intptr = 0; i < 3; i++) {
let slice = torque_internal::UnsafeNewSlice<Smi>( check(UnsafeCast<Smi>(fixedArray.objects[i]) == 0);
fixedArray, fixedArray.objects[i] = Convert<Smi>(i) + 3;
(& fixedArray.length).offset + torque_internal::SizeOf<Smi>(), 3); }
let slice = & fixedArray.objects;
for (let i: intptr = 0; i < slice.length; i++) { for (let i: intptr = 0; i < slice.length; i++) {
let ref = slice.Access(i) otherwise unreachable; let ref = slice.TryAtIndex(i) otherwise unreachable;
check(* ref == 0); const value = UnsafeCast<Smi>(* ref);
* ref = Convert<Smi>(i) + 7; check(value == Convert<Smi>(i) + 3);
* ref = value + 4;
} }
let it = slice.Iterator(); let it = slice.Iterator();
let count: Smi = 0; let count: Smi = 0;
while (true) { while (true) {
let ref = it.Next() otherwise break; let ref = it.Next() otherwise break;
check(* ref == count + 7); const value = UnsafeCast<Smi>(* ref);
check(value == count + 7);
count++; count++;
} }
check(count == 3); check(count == 3);
check(it.Empty()); check(it.Empty());
return Undefined;
} }
@export @export
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment