Commit 8656d937 authored by Tobias Tebbi's avatar Tobias Tebbi Committed by V8 LUCI CQ

[compiler] remove UnsafePointerAdd operator

UnsafePointerAdd is unnecessary as long as a proper bitcast is used
before the addition. The bitcast is already in the effect chain and
prevents the addition from floating before a GC operation.

Change-Id: Ieadb8a51d2d24eaa1132a62c77c674954f7e2644
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3616727Reviewed-by: 's avatarMaya Lekova <mslekova@chromium.org>
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80457}
parent 09f39ae0
...@@ -1940,9 +1940,6 @@ void InstructionSelector::VisitNode(Node* node) { ...@@ -1940,9 +1940,6 @@ void InstructionSelector::VisitNode(Node* node) {
return MarkAsWord64(node), VisitSignExtendWord16ToInt64(node); return MarkAsWord64(node), VisitSignExtendWord16ToInt64(node);
case IrOpcode::kSignExtendWord32ToInt64: case IrOpcode::kSignExtendWord32ToInt64:
return MarkAsWord64(node), VisitSignExtendWord32ToInt64(node); return MarkAsWord64(node), VisitSignExtendWord32ToInt64(node);
case IrOpcode::kUnsafePointerAdd:
MarkAsRepresentation(MachineType::PointerRepresentation(), node);
return VisitUnsafePointerAdd(node);
case IrOpcode::kF64x2Splat: case IrOpcode::kF64x2Splat:
return MarkAsSimd128(node), VisitF64x2Splat(node); return MarkAsSimd128(node), VisitF64x2Splat(node);
case IrOpcode::kF64x2ExtractLane: case IrOpcode::kF64x2ExtractLane:
...@@ -3215,14 +3212,6 @@ void InstructionSelector::VisitComment(Node* node) { ...@@ -3215,14 +3212,6 @@ void InstructionSelector::VisitComment(Node* node) {
Emit(kArchComment, 0, nullptr, 1, &operand); Emit(kArchComment, 0, nullptr, 1, &operand);
} }
void InstructionSelector::VisitUnsafePointerAdd(Node* node) {
#if V8_TARGET_ARCH_64_BIT
VisitInt64Add(node);
#else // V8_TARGET_ARCH_64_BIT
VisitInt32Add(node);
#endif // V8_TARGET_ARCH_64_BIT
}
void InstructionSelector::VisitRetain(Node* node) { void InstructionSelector::VisitRetain(Node* node) {
OperandGenerator g(this); OperandGenerator g(this);
Emit(kArchNop, g.NoOutput(), g.UseAny(node->InputAt(0))); Emit(kArchNop, g.NoOutput(), g.UseAny(node->InputAt(0)));
......
...@@ -5625,8 +5625,8 @@ Node* EffectControlLinearizer::BuildTypedArrayDataPointer(Node* base, ...@@ -5625,8 +5625,8 @@ Node* EffectControlLinearizer::BuildTypedArrayDataPointer(Node* base,
if (IntPtrMatcher(base).Is(0)) { if (IntPtrMatcher(base).Is(0)) {
return external; return external;
} else { } else {
if (COMPRESS_POINTERS_BOOL) {
base = __ BitcastTaggedToWord(base); base = __ BitcastTaggedToWord(base);
if (COMPRESS_POINTERS_BOOL) {
// Zero-extend Tagged_t to UintPtr according to current compression // Zero-extend Tagged_t to UintPtr according to current compression
// scheme so that the addition with |external_pointer| (which already // scheme so that the addition with |external_pointer| (which already
// contains compensated offset value) will decompress the tagged value. // contains compensated offset value) will decompress the tagged value.
...@@ -5634,7 +5634,7 @@ Node* EffectControlLinearizer::BuildTypedArrayDataPointer(Node* base, ...@@ -5634,7 +5634,7 @@ Node* EffectControlLinearizer::BuildTypedArrayDataPointer(Node* base,
// details. // details.
base = ChangeUint32ToUintPtr(base); base = ChangeUint32ToUintPtr(base);
} }
return __ UnsafePointerAdd(base, external); return __ IntPtrAdd(base, external);
} }
} }
......
...@@ -473,9 +473,9 @@ Node* GraphAssembler::Retain(Node* buffer) { ...@@ -473,9 +473,9 @@ Node* GraphAssembler::Retain(Node* buffer) {
return AddNode(graph()->NewNode(common()->Retain(), buffer, effect())); return AddNode(graph()->NewNode(common()->Retain(), buffer, effect()));
} }
Node* GraphAssembler::UnsafePointerAdd(Node* base, Node* external) { Node* GraphAssembler::IntPtrAdd(Node* a, Node* b) {
return AddNode(graph()->NewNode(machine()->UnsafePointerAdd(), base, external, return AddNode(graph()->NewNode(
effect(), control())); machine()->Is64() ? machine()->Int64Add() : machine()->Int32Add(), a, b));
} }
TNode<Number> JSGraphAssembler::PlainPrimitiveToNumber(TNode<Object> value) { TNode<Number> JSGraphAssembler::PlainPrimitiveToNumber(TNode<Object> value) {
......
...@@ -321,7 +321,7 @@ class V8_EXPORT_PRIVATE GraphAssembler { ...@@ -321,7 +321,7 @@ class V8_EXPORT_PRIVATE GraphAssembler {
Node* ProtectedLoad(MachineType type, Node* object, Node* offset); Node* ProtectedLoad(MachineType type, Node* object, Node* offset);
Node* Retain(Node* buffer); Node* Retain(Node* buffer);
Node* UnsafePointerAdd(Node* base, Node* external); Node* IntPtrAdd(Node* a, Node* b);
Node* DeoptimizeIf(DeoptimizeReason reason, FeedbackSource const& feedback, Node* DeoptimizeIf(DeoptimizeReason reason, FeedbackSource const& feedback,
Node* condition, Node* frame_state); Node* condition, Node* frame_state);
......
...@@ -1269,13 +1269,6 @@ struct MachineOperatorGlobalCache { ...@@ -1269,13 +1269,6 @@ struct MachineOperatorGlobalCache {
}; };
DebugBreakOperator kDebugBreak; DebugBreakOperator kDebugBreak;
struct UnsafePointerAddOperator final : public Operator {
UnsafePointerAddOperator()
: Operator(IrOpcode::kUnsafePointerAdd, Operator::kKontrol,
"UnsafePointerAdd", 2, 1, 1, 1, 1, 0) {}
};
UnsafePointerAddOperator kUnsafePointerAdd;
struct StackPointerGreaterThanOperator : public Operator1<StackCheckKind> { struct StackPointerGreaterThanOperator : public Operator1<StackCheckKind> {
explicit StackPointerGreaterThanOperator(StackCheckKind kind) explicit StackPointerGreaterThanOperator(StackCheckKind kind)
: Operator1<StackCheckKind>( : Operator1<StackCheckKind>(
...@@ -1621,10 +1614,6 @@ const Operator* MachineOperatorBuilder::ProtectedStore( ...@@ -1621,10 +1614,6 @@ const Operator* MachineOperatorBuilder::ProtectedStore(
UNREACHABLE(); UNREACHABLE();
} }
const Operator* MachineOperatorBuilder::UnsafePointerAdd() {
return &cache_.kUnsafePointerAdd;
}
const Operator* MachineOperatorBuilder::StackPointerGreaterThan( const Operator* MachineOperatorBuilder::StackPointerGreaterThan(
StackCheckKind kind) { StackCheckKind kind) {
switch (kind) { switch (kind) {
......
...@@ -410,7 +410,6 @@ class V8_EXPORT_PRIVATE MachineOperatorBuilder final ...@@ -410,7 +410,6 @@ class V8_EXPORT_PRIVATE MachineOperatorBuilder final
const Operator* Comment(const char* msg); const Operator* Comment(const char* msg);
const Operator* AbortCSADcheck(); const Operator* AbortCSADcheck();
const Operator* DebugBreak(); const Operator* DebugBreak();
const Operator* UnsafePointerAdd();
const Operator* Word32And(); const Operator* Word32And();
const Operator* Word32Or(); const Operator* Word32Or();
......
...@@ -57,7 +57,6 @@ bool CanAllocate(const Node* node) { ...@@ -57,7 +57,6 @@ bool CanAllocate(const Node* node) {
case IrOpcode::kUnalignedLoad: case IrOpcode::kUnalignedLoad:
case IrOpcode::kUnalignedStore: case IrOpcode::kUnalignedStore:
case IrOpcode::kUnreachable: case IrOpcode::kUnreachable:
case IrOpcode::kUnsafePointerAdd:
case IrOpcode::kWord32AtomicAdd: case IrOpcode::kWord32AtomicAdd:
case IrOpcode::kWord32AtomicAnd: case IrOpcode::kWord32AtomicAnd:
case IrOpcode::kWord32AtomicCompareExchange: case IrOpcode::kWord32AtomicCompareExchange:
......
...@@ -765,7 +765,6 @@ ...@@ -765,7 +765,6 @@
V(SignExtendWord8ToInt64) \ V(SignExtendWord8ToInt64) \
V(SignExtendWord16ToInt64) \ V(SignExtendWord16ToInt64) \
V(SignExtendWord32ToInt64) \ V(SignExtendWord32ToInt64) \
V(UnsafePointerAdd) \
V(StackPointerGreaterThan) V(StackPointerGreaterThan)
#define MACHINE_SIMD_OP_LIST(V) \ #define MACHINE_SIMD_OP_LIST(V) \
......
...@@ -398,7 +398,7 @@ bool RedundantStoreFinder::CannotObserveStoreField(Node* node) { ...@@ -398,7 +398,7 @@ bool RedundantStoreFinder::CannotObserveStoreField(Node* node) {
return opcode == IrOpcode::kLoadElement || opcode == IrOpcode::kLoad || return opcode == IrOpcode::kLoadElement || opcode == IrOpcode::kLoad ||
opcode == IrOpcode::kLoadImmutable || opcode == IrOpcode::kStore || opcode == IrOpcode::kLoadImmutable || opcode == IrOpcode::kStore ||
opcode == IrOpcode::kEffectPhi || opcode == IrOpcode::kStoreElement || opcode == IrOpcode::kEffectPhi || opcode == IrOpcode::kStoreElement ||
opcode == IrOpcode::kUnsafePointerAdd || opcode == IrOpcode::kRetain; opcode == IrOpcode::kRetain;
} }
void RedundantStoreFinder::Visit(Node* node) { void RedundantStoreFinder::Visit(Node* node) {
......
...@@ -924,7 +924,6 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) { ...@@ -924,7 +924,6 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
case IrOpcode::kAbortCSADcheck: case IrOpcode::kAbortCSADcheck:
case IrOpcode::kDebugBreak: case IrOpcode::kDebugBreak:
case IrOpcode::kRetain: case IrOpcode::kRetain:
case IrOpcode::kUnsafePointerAdd:
case IrOpcode::kRuntimeAbort: case IrOpcode::kRuntimeAbort:
CheckNotTyped(node); CheckNotTyped(node);
break; break;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment