Commit 2054dab3 authored by Georg Schmid's avatar Georg Schmid Committed by Commit Bot

Add LoadFromObject and StoreToObject nodes, injected via Torque

R=tebbi@chromium.org

Change-Id: I30aab2663180382a078901c10e39cd1ad6c906f5
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1627541
Commit-Queue: Georg Schmid <gsps@google.com>
Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61897}
parent f455f86d
...@@ -887,37 +887,36 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ...@@ -887,37 +887,36 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
std::is_convertible<TNode<T>, TNode<Object>>::value, std::is_convertible<TNode<T>, TNode<Object>>::value,
int>::type = 0> int>::type = 0>
TNode<T> LoadReference(Reference reference) { TNode<T> LoadReference(Reference reference) {
return CAST(LoadObjectField(reference.object, reference.offset, return CAST(LoadFromObject(MachineTypeOf<T>::value, reference.object,
MachineTypeOf<T>::value)); reference.offset));
} }
template <class T, typename std::enable_if< template <class T, typename std::enable_if<
std::is_convertible<TNode<T>, TNode<UntaggedT>>::value, std::is_convertible<TNode<T>, TNode<UntaggedT>>::value,
int>::type = 0> int>::type = 0>
TNode<T> LoadReference(Reference reference) { TNode<T> LoadReference(Reference reference) {
return UncheckedCast<T>(LoadObjectField(reference.object, reference.offset, return UncheckedCast<T>(LoadFromObject(MachineTypeOf<T>::value,
MachineTypeOf<T>::value)); reference.object, reference.offset));
} }
template <class T, typename std::enable_if< template <class T, typename std::enable_if<
std::is_convertible<TNode<T>, TNode<Object>>::value, std::is_convertible<TNode<T>, TNode<Object>>::value,
int>::type = 0> int>::type = 0>
void StoreReference(Reference reference, TNode<T> value) { void StoreReference(Reference reference, TNode<T> value) {
int const_offset; MachineRepresentation rep = MachineRepresentationOf<T>::value;
StoreToObjectWriteBarrier write_barrier = StoreToObjectWriteBarrier::kFull;
if (std::is_same<T, Smi>::value) { if (std::is_same<T, Smi>::value) {
StoreObjectFieldNoWriteBarrier(reference.object, reference.offset, value); write_barrier = StoreToObjectWriteBarrier::kNone;
} else if (std::is_same<T, Map>::value && } else if (std::is_same<T, Map>::value) {
ToInt32Constant(reference.offset, const_offset) && write_barrier = StoreToObjectWriteBarrier::kMap;
const_offset == HeapObject::kMapOffset) {
StoreMap(reference.object, value);
} else {
StoreObjectField(reference.object, reference.offset, value);
} }
StoreToObject(rep, reference.object, reference.offset, value,
write_barrier);
} }
template <class T, typename std::enable_if< template <class T, typename std::enable_if<
std::is_convertible<TNode<T>, TNode<UntaggedT>>::value, std::is_convertible<TNode<T>, TNode<UntaggedT>>::value,
int>::type = 0> int>::type = 0>
void StoreReference(Reference reference, TNode<T> value) { void StoreReference(Reference reference, TNode<T> value) {
StoreObjectFieldNoWriteBarrier<T>(reference.object, reference.offset, StoreToObject(MachineRepresentationOf<T>::value, reference.object,
value); reference.offset, value, StoreToObjectWriteBarrier::kNone);
} }
// Tag a smi and store it. // Tag a smi and store it.
......
...@@ -971,6 +971,11 @@ Node* CodeAssembler::AtomicLoad(MachineType type, Node* base, Node* offset) { ...@@ -971,6 +971,11 @@ Node* CodeAssembler::AtomicLoad(MachineType type, Node* base, Node* offset) {
return raw_assembler()->AtomicLoad(type, base, offset); return raw_assembler()->AtomicLoad(type, base, offset);
} }
Node* CodeAssembler::LoadFromObject(MachineType type, TNode<HeapObject> object,
TNode<IntPtrT> offset) {
return raw_assembler()->LoadFromObject(type, object, offset);
}
TNode<Object> CodeAssembler::LoadRoot(RootIndex root_index) { TNode<Object> CodeAssembler::LoadRoot(RootIndex root_index) {
if (RootsTable::IsImmortalImmovable(root_index)) { if (RootsTable::IsImmortalImmovable(root_index)) {
Handle<Object> root = isolate()->root_handle(root_index); Handle<Object> root = isolate()->root_handle(root_index);
...@@ -996,6 +1001,30 @@ Node* CodeAssembler::Store(Node* base, Node* value) { ...@@ -996,6 +1001,30 @@ Node* CodeAssembler::Store(Node* base, Node* value) {
kFullWriteBarrier); kFullWriteBarrier);
} }
void CodeAssembler::StoreToObject(MachineRepresentation rep,
TNode<HeapObject> object,
TNode<IntPtrT> offset, Node* value,
StoreToObjectWriteBarrier write_barrier) {
WriteBarrierKind write_barrier_kind;
switch (write_barrier) {
case StoreToObjectWriteBarrier::kFull:
write_barrier_kind = WriteBarrierKind::kFullWriteBarrier;
break;
case StoreToObjectWriteBarrier::kMap:
write_barrier_kind = WriteBarrierKind::kMapWriteBarrier;
break;
case StoreToObjectWriteBarrier::kNone:
if (CanBeTaggedPointer(rep)) {
write_barrier_kind = WriteBarrierKind::kAssertNoWriteBarrier;
} else {
write_barrier_kind = WriteBarrierKind::kNoWriteBarrier;
}
break;
}
raw_assembler()->StoreToObject(rep, object, offset, value,
write_barrier_kind);
}
void CodeAssembler::OptimizedStoreField(MachineRepresentation rep, void CodeAssembler::OptimizedStoreField(MachineRepresentation rep,
TNode<HeapObject> object, int offset, TNode<HeapObject> object, int offset,
Node* value) { Node* value) {
......
...@@ -298,6 +298,8 @@ inline bool NeedsBoundsCheck(CheckBounds check_bounds) { ...@@ -298,6 +298,8 @@ inline bool NeedsBoundsCheck(CheckBounds check_bounds) {
} }
} }
enum class StoreToObjectWriteBarrier { kNone, kMap, kFull };
class AccessCheckNeeded; class AccessCheckNeeded;
class BigIntWrapper; class BigIntWrapper;
class ClassBoilerplate; class ClassBoilerplate;
...@@ -966,6 +968,9 @@ class V8_EXPORT_PRIVATE CodeAssembler { ...@@ -966,6 +968,9 @@ class V8_EXPORT_PRIVATE CodeAssembler {
Node* base, Node* offset, Node* base, Node* offset,
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe); LoadSensitivity needs_poisoning = LoadSensitivity::kSafe);
Node* LoadFromObject(MachineType type, TNode<HeapObject> object,
TNode<IntPtrT> offset);
// Load a value from the root array. // Load a value from the root array.
TNode<Object> LoadRoot(RootIndex root_index); TNode<Object> LoadRoot(RootIndex root_index);
...@@ -991,6 +996,9 @@ class V8_EXPORT_PRIVATE CodeAssembler { ...@@ -991,6 +996,9 @@ class V8_EXPORT_PRIVATE CodeAssembler {
TNode<HeapObject> OptimizedAllocate(TNode<IntPtrT> size, TNode<HeapObject> OptimizedAllocate(TNode<IntPtrT> size,
AllocationType allocation, AllocationType allocation,
AllowLargeObjects allow_large_objects); AllowLargeObjects allow_large_objects);
void StoreToObject(MachineRepresentation rep, TNode<HeapObject> object,
TNode<IntPtrT> offset, Node* value,
StoreToObjectWriteBarrier write_barrier);
void OptimizedStoreField(MachineRepresentation rep, TNode<HeapObject> object, void OptimizedStoreField(MachineRepresentation rep, TNode<HeapObject> object,
int offset, Node* value); int offset, Node* value);
void OptimizedStoreFieldAssertNoWriteBarrier(MachineRepresentation rep, void OptimizedStoreFieldAssertNoWriteBarrier(MachineRepresentation rep,
......
...@@ -227,10 +227,14 @@ void MemoryOptimizer::VisitNode(Node* node, AllocationState const* state) { ...@@ -227,10 +227,14 @@ void MemoryOptimizer::VisitNode(Node* node, AllocationState const* state) {
return VisitCall(node, state); return VisitCall(node, state);
case IrOpcode::kCallWithCallerSavedRegisters: case IrOpcode::kCallWithCallerSavedRegisters:
return VisitCallWithCallerSavedRegisters(node, state); return VisitCallWithCallerSavedRegisters(node, state);
case IrOpcode::kLoadFromObject:
return VisitLoadFromObject(node, state);
case IrOpcode::kLoadElement: case IrOpcode::kLoadElement:
return VisitLoadElement(node, state); return VisitLoadElement(node, state);
case IrOpcode::kLoadField: case IrOpcode::kLoadField:
return VisitLoadField(node, state); return VisitLoadField(node, state);
case IrOpcode::kStoreToObject:
return VisitStoreToObject(node, state);
case IrOpcode::kStoreElement: case IrOpcode::kStoreElement:
return VisitStoreElement(node, state); return VisitStoreElement(node, state);
case IrOpcode::kStoreField: case IrOpcode::kStoreField:
...@@ -475,6 +479,32 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node, ...@@ -475,6 +479,32 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
node->Kill(); node->Kill();
} }
void MemoryOptimizer::VisitLoadFromObject(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kLoadFromObject, node->opcode());
ObjectAccess const& access = ObjectAccessOf(node->op());
Node* offset = node->InputAt(1);
node->ReplaceInput(1, __ IntSub(offset, __ IntPtrConstant(kHeapObjectTag)));
NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
EnqueueUses(node, state);
}
void MemoryOptimizer::VisitStoreToObject(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kStoreToObject, node->opcode());
ObjectAccess const& access = ObjectAccessOf(node->op());
Node* object = node->InputAt(0);
Node* offset = node->InputAt(1);
Node* value = node->InputAt(2);
node->ReplaceInput(1, __ IntSub(offset, __ IntPtrConstant(kHeapObjectTag)));
WriteBarrierKind write_barrier_kind = ComputeWriteBarrierKind(
node, object, value, state, access.write_barrier_kind);
NodeProperties::ChangeOp(
node, machine()->Store(StoreRepresentation(
access.machine_type.representation(), write_barrier_kind)));
EnqueueUses(node, state);
}
#undef __ #undef __
void MemoryOptimizer::VisitCall(Node* node, AllocationState const* state) { void MemoryOptimizer::VisitCall(Node* node, AllocationState const* state) {
......
...@@ -116,8 +116,10 @@ class MemoryOptimizer final { ...@@ -116,8 +116,10 @@ class MemoryOptimizer final {
void VisitAllocateRaw(Node*, AllocationState const*); void VisitAllocateRaw(Node*, AllocationState const*);
void VisitCall(Node*, AllocationState const*); void VisitCall(Node*, AllocationState const*);
void VisitCallWithCallerSavedRegisters(Node*, AllocationState const*); void VisitCallWithCallerSavedRegisters(Node*, AllocationState const*);
void VisitLoadFromObject(Node*, AllocationState const*);
void VisitLoadElement(Node*, AllocationState const*); void VisitLoadElement(Node*, AllocationState const*);
void VisitLoadField(Node*, AllocationState const*); void VisitLoadField(Node*, AllocationState const*);
void VisitStoreToObject(Node*, AllocationState const*);
void VisitStoreElement(Node*, AllocationState const*); void VisitStoreElement(Node*, AllocationState const*);
void VisitStoreField(Node*, AllocationState const*); void VisitStoreField(Node*, AllocationState const*);
void VisitStore(Node*, AllocationState const*); void VisitStore(Node*, AllocationState const*);
......
...@@ -412,10 +412,12 @@ ...@@ -412,10 +412,12 @@
V(LoadField) \ V(LoadField) \
V(LoadElement) \ V(LoadElement) \
V(LoadTypedElement) \ V(LoadTypedElement) \
V(LoadFromObject) \
V(LoadDataViewElement) \ V(LoadDataViewElement) \
V(StoreField) \ V(StoreField) \
V(StoreElement) \ V(StoreElement) \
V(StoreTypedElement) \ V(StoreTypedElement) \
V(StoreToObject) \
V(StoreDataViewElement) \ V(StoreDataViewElement) \
V(StoreSignedSmallElement) \ V(StoreSignedSmallElement) \
V(TransitionAndStoreElement) \ V(TransitionAndStoreElement) \
......
...@@ -127,32 +127,37 @@ class V8_EXPORT_PRIVATE RawMachineAssembler { ...@@ -127,32 +127,37 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
} }
// Memory Operations. // Memory Operations.
Node* Load(MachineType type, Node* base, std::pair<MachineType, const Operator*> InsertDecompressionIfNeeded(
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) { MachineType type) {
return Load(type, base, IntPtrConstant(0), needs_poisoning); const Operator* decompress_op = nullptr;
}
Node* Load(MachineType type, Node* base, Node* index,
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
// change_op is used below to change to the correct Tagged representation
const Operator* change_op = nullptr;
if (COMPRESS_POINTERS_BOOL) { if (COMPRESS_POINTERS_BOOL) {
switch (type.representation()) { switch (type.representation()) {
case MachineRepresentation::kTaggedPointer: case MachineRepresentation::kTaggedPointer:
type = MachineType::CompressedPointer(); type = MachineType::CompressedPointer();
change_op = machine()->ChangeCompressedPointerToTaggedPointer(); decompress_op = machine()->ChangeCompressedPointerToTaggedPointer();
break; break;
case MachineRepresentation::kTaggedSigned: case MachineRepresentation::kTaggedSigned:
type = MachineType::CompressedSigned(); type = MachineType::CompressedSigned();
change_op = machine()->ChangeCompressedSignedToTaggedSigned(); decompress_op = machine()->ChangeCompressedSignedToTaggedSigned();
break; break;
case MachineRepresentation::kTagged: case MachineRepresentation::kTagged:
type = MachineType::AnyCompressed(); type = MachineType::AnyCompressed();
change_op = machine()->ChangeCompressedToTagged(); decompress_op = machine()->ChangeCompressedToTagged();
break; break;
default: default:
break; break;
} }
} }
return std::make_pair(type, decompress_op);
}
Node* Load(MachineType type, Node* base,
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
return Load(type, base, IntPtrConstant(0), needs_poisoning);
}
Node* Load(MachineType type, Node* base, Node* index,
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
const Operator* decompress_op;
std::tie(type, decompress_op) = InsertDecompressionIfNeeded(type);
const Operator* op = machine()->Load(type); const Operator* op = machine()->Load(type);
CHECK_NE(PoisoningMitigationLevel::kPoisonAll, poisoning_level_); CHECK_NE(PoisoningMitigationLevel::kPoisonAll, poisoning_level_);
if (needs_poisoning == LoadSensitivity::kCritical && if (needs_poisoning == LoadSensitivity::kCritical &&
...@@ -161,11 +166,25 @@ class V8_EXPORT_PRIVATE RawMachineAssembler { ...@@ -161,11 +166,25 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
} }
Node* load = AddNode(op, base, index); Node* load = AddNode(op, base, index);
if (change_op != nullptr) { if (decompress_op != nullptr) {
load = AddNode(change_op, load); load = AddNode(decompress_op, load);
} }
return load; return load;
} }
Node* LoadFromObject(
MachineType type, Node* base, Node* offset,
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
const Operator* decompress_op;
std::tie(type, decompress_op) = InsertDecompressionIfNeeded(type);
CHECK_EQ(needs_poisoning, LoadSensitivity::kSafe);
ObjectAccess access = {type, WriteBarrierKind::kNoWriteBarrier};
Node* load = AddNode(simplified()->LoadFromObject(access), base, offset);
if (decompress_op != nullptr) {
load = AddNode(decompress_op, load);
}
return load;
}
std::pair<MachineRepresentation, Node*> InsertCompressionIfNeeded( std::pair<MachineRepresentation, Node*> InsertCompressionIfNeeded(
MachineRepresentation rep, Node* value) { MachineRepresentation rep, Node* value) {
if (COMPRESS_POINTERS_BOOL) { if (COMPRESS_POINTERS_BOOL) {
...@@ -200,6 +219,13 @@ class V8_EXPORT_PRIVATE RawMachineAssembler { ...@@ -200,6 +219,13 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
return AddNode(machine()->Store(StoreRepresentation(rep, write_barrier)), return AddNode(machine()->Store(StoreRepresentation(rep, write_barrier)),
base, index, value); base, index, value);
} }
void StoreToObject(MachineRepresentation rep, Node* object, Node* offset,
Node* value, WriteBarrierKind write_barrier) {
std::tie(rep, value) = InsertCompressionIfNeeded(rep, value);
ObjectAccess access = {MachineType::TypeForRepresentation(rep),
write_barrier};
AddNode(simplified()->StoreToObject(access), object, offset, value);
}
void OptimizedStoreField(MachineRepresentation rep, Node* object, int offset, void OptimizedStoreField(MachineRepresentation rep, Node* object, int offset,
Node* value, WriteBarrierKind write_barrier) { Node* value, WriteBarrierKind write_barrier) {
std::tie(rep, value) = InsertCompressionIfNeeded(rep, value); std::tie(rep, value) = InsertCompressionIfNeeded(rep, value);
......
...@@ -113,7 +113,6 @@ size_t hash_value(ElementAccess const& access) { ...@@ -113,7 +113,6 @@ size_t hash_value(ElementAccess const& access) {
access.machine_type); access.machine_type);
} }
std::ostream& operator<<(std::ostream& os, ElementAccess const& access) { std::ostream& operator<<(std::ostream& os, ElementAccess const& access) {
os << access.base_is_tagged << ", " << access.header_size << ", " os << access.base_is_tagged << ", " << access.header_size << ", "
<< access.type << ", " << access.machine_type << ", " << access.type << ", " << access.machine_type << ", "
...@@ -124,6 +123,20 @@ std::ostream& operator<<(std::ostream& os, ElementAccess const& access) { ...@@ -124,6 +123,20 @@ std::ostream& operator<<(std::ostream& os, ElementAccess const& access) {
return os; return os;
} }
bool operator==(ObjectAccess const& lhs, ObjectAccess const& rhs) {
return lhs.machine_type == rhs.machine_type &&
lhs.write_barrier_kind == rhs.write_barrier_kind;
}
size_t hash_value(ObjectAccess const& access) {
return base::hash_combine(access.machine_type, access.write_barrier_kind);
}
std::ostream& operator<<(std::ostream& os, ObjectAccess const& access) {
os << access.machine_type << ", " << access.write_barrier_kind;
return os;
}
const FieldAccess& FieldAccessOf(const Operator* op) { const FieldAccess& FieldAccessOf(const Operator* op) {
DCHECK_NOT_NULL(op); DCHECK_NOT_NULL(op);
DCHECK(op->opcode() == IrOpcode::kLoadField || DCHECK(op->opcode() == IrOpcode::kLoadField ||
...@@ -131,7 +144,6 @@ const FieldAccess& FieldAccessOf(const Operator* op) { ...@@ -131,7 +144,6 @@ const FieldAccess& FieldAccessOf(const Operator* op) {
return OpParameter<FieldAccess>(op); return OpParameter<FieldAccess>(op);
} }
const ElementAccess& ElementAccessOf(const Operator* op) { const ElementAccess& ElementAccessOf(const Operator* op) {
DCHECK_NOT_NULL(op); DCHECK_NOT_NULL(op);
DCHECK(op->opcode() == IrOpcode::kLoadElement || DCHECK(op->opcode() == IrOpcode::kLoadElement ||
...@@ -139,6 +151,13 @@ const ElementAccess& ElementAccessOf(const Operator* op) { ...@@ -139,6 +151,13 @@ const ElementAccess& ElementAccessOf(const Operator* op) {
return OpParameter<ElementAccess>(op); return OpParameter<ElementAccess>(op);
} }
const ObjectAccess& ObjectAccessOf(const Operator* op) {
DCHECK_NOT_NULL(op);
DCHECK(op->opcode() == IrOpcode::kLoadFromObject ||
op->opcode() == IrOpcode::kStoreToObject);
return OpParameter<ObjectAccess>(op);
}
ExternalArrayType ExternalArrayTypeOf(const Operator* op) { ExternalArrayType ExternalArrayTypeOf(const Operator* op) {
DCHECK(op->opcode() == IrOpcode::kLoadTypedElement || DCHECK(op->opcode() == IrOpcode::kLoadTypedElement ||
op->opcode() == IrOpcode::kLoadDataViewElement || op->opcode() == IrOpcode::kLoadDataViewElement ||
...@@ -1684,7 +1703,9 @@ SPECULATIVE_NUMBER_BINOP_LIST(SPECULATIVE_NUMBER_BINOP) ...@@ -1684,7 +1703,9 @@ SPECULATIVE_NUMBER_BINOP_LIST(SPECULATIVE_NUMBER_BINOP)
V(LoadElement, ElementAccess, Operator::kNoWrite, 2, 1, 1) \ V(LoadElement, ElementAccess, Operator::kNoWrite, 2, 1, 1) \
V(StoreElement, ElementAccess, Operator::kNoRead, 3, 1, 0) \ V(StoreElement, ElementAccess, Operator::kNoRead, 3, 1, 0) \
V(LoadTypedElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \ V(LoadTypedElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \
V(LoadFromObject, ObjectAccess, Operator::kNoWrite, 2, 1, 1) \
V(StoreTypedElement, ExternalArrayType, Operator::kNoRead, 5, 1, 0) \ V(StoreTypedElement, ExternalArrayType, Operator::kNoRead, 5, 1, 0) \
V(StoreToObject, ObjectAccess, Operator::kNoRead, 3, 1, 0) \
V(LoadDataViewElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \ V(LoadDataViewElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \
V(StoreDataViewElement, ExternalArrayType, Operator::kNoRead, 5, 1, 0) V(StoreDataViewElement, ExternalArrayType, Operator::kNoRead, 5, 1, 0)
......
...@@ -142,6 +142,30 @@ V8_EXPORT_PRIVATE ElementAccess const& ElementAccessOf(const Operator* op) ...@@ -142,6 +142,30 @@ V8_EXPORT_PRIVATE ElementAccess const& ElementAccessOf(const Operator* op)
ExternalArrayType ExternalArrayTypeOf(const Operator* op) V8_WARN_UNUSED_RESULT; ExternalArrayType ExternalArrayTypeOf(const Operator* op) V8_WARN_UNUSED_RESULT;
// An access descriptor for loads/stores of CSA-accessible structures.
struct ObjectAccess {
MachineType machine_type; // machine type of the field.
WriteBarrierKind write_barrier_kind; // write barrier hint.
ObjectAccess()
: machine_type(MachineType::None()),
write_barrier_kind(kFullWriteBarrier) {}
ObjectAccess(MachineType machine_type, WriteBarrierKind write_barrier_kind)
: machine_type(machine_type), write_barrier_kind(write_barrier_kind) {}
int tag() const { return kHeapObjectTag; }
};
V8_EXPORT_PRIVATE bool operator==(ObjectAccess const&, ObjectAccess const&);
size_t hash_value(ObjectAccess const&);
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, ObjectAccess const&);
V8_EXPORT_PRIVATE ObjectAccess const& ObjectAccessOf(const Operator* op)
V8_WARN_UNUSED_RESULT;
// The ConvertReceiverMode is used as parameter by ConvertReceiver operators. // The ConvertReceiverMode is used as parameter by ConvertReceiver operators.
ConvertReceiverMode ConvertReceiverModeOf(Operator const* op) ConvertReceiverMode ConvertReceiverModeOf(Operator const* op)
V8_WARN_UNUSED_RESULT; V8_WARN_UNUSED_RESULT;
...@@ -829,6 +853,12 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final ...@@ -829,6 +853,12 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
const Operator* TransitionAndStoreNonNumberElement(Handle<Map> fast_map, const Operator* TransitionAndStoreNonNumberElement(Handle<Map> fast_map,
Type value_type); Type value_type);
// load-from-object [base + offset]
const Operator* LoadFromObject(ObjectAccess const&);
// store-to-object [base + offset], value
const Operator* StoreToObject(ObjectAccess const&);
// load-typed-element buffer, [base + external + index] // load-typed-element buffer, [base + external + index]
const Operator* LoadTypedElement(ExternalArrayType const&); const Operator* LoadTypedElement(ExternalArrayType const&);
......
...@@ -2168,6 +2168,8 @@ Type Typer::Visitor::TypeLoadElement(Node* node) { ...@@ -2168,6 +2168,8 @@ Type Typer::Visitor::TypeLoadElement(Node* node) {
return ElementAccessOf(node->op()).type; return ElementAccessOf(node->op()).type;
} }
Type Typer::Visitor::TypeLoadFromObject(Node* node) { UNREACHABLE(); }
Type Typer::Visitor::TypeLoadTypedElement(Node* node) { Type Typer::Visitor::TypeLoadTypedElement(Node* node) {
switch (ExternalArrayTypeOf(node->op())) { switch (ExternalArrayTypeOf(node->op())) {
#define TYPED_ARRAY_CASE(ElemType, type, TYPE, ctype) \ #define TYPED_ARRAY_CASE(ElemType, type, TYPE, ctype) \
...@@ -2194,6 +2196,8 @@ Type Typer::Visitor::TypeStoreField(Node* node) { UNREACHABLE(); } ...@@ -2194,6 +2196,8 @@ Type Typer::Visitor::TypeStoreField(Node* node) { UNREACHABLE(); }
Type Typer::Visitor::TypeStoreElement(Node* node) { UNREACHABLE(); } Type Typer::Visitor::TypeStoreElement(Node* node) { UNREACHABLE(); }
Type Typer::Visitor::TypeStoreToObject(Node* node) { UNREACHABLE(); }
Type Typer::Visitor::TypeTransitionAndStoreElement(Node* node) { Type Typer::Visitor::TypeTransitionAndStoreElement(Node* node) {
UNREACHABLE(); UNREACHABLE();
} }
......
...@@ -1563,6 +1563,9 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) { ...@@ -1563,6 +1563,9 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
// CheckValueInputIs(node, 0, Type::Object()); // CheckValueInputIs(node, 0, Type::Object());
// CheckTypeIs(node, ElementAccessOf(node->op()).type)); // CheckTypeIs(node, ElementAccessOf(node->op()).type));
break; break;
case IrOpcode::kLoadFromObject:
// TODO(gsps): Can we check some types here?
break;
case IrOpcode::kLoadTypedElement: case IrOpcode::kLoadTypedElement:
break; break;
case IrOpcode::kLoadDataViewElement: case IrOpcode::kLoadDataViewElement:
...@@ -1581,6 +1584,9 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) { ...@@ -1581,6 +1584,9 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
// CheckValueInputIs(node, 1, ElementAccessOf(node->op()).type)); // CheckValueInputIs(node, 1, ElementAccessOf(node->op()).type));
CheckNotTyped(node); CheckNotTyped(node);
break; break;
case IrOpcode::kStoreToObject:
// TODO(gsps): Can we check some types here?
break;
case IrOpcode::kTransitionAndStoreElement: case IrOpcode::kTransitionAndStoreElement:
CheckNotTyped(node); CheckNotTyped(node);
break; break;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment