Commit 8656d937 authored by Tobias Tebbi's avatar Tobias Tebbi Committed by V8 LUCI CQ

[compiler] remove UnsafePointerAdd operator

UnsafePointerAdd is unnecessary as long as a proper bitcast is used
before the addition. The bitcast is already in the effect chain and
prevents the addition from floating before a GC operation.

Change-Id: Ieadb8a51d2d24eaa1132a62c77c674954f7e2644
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3616727Reviewed-by: 's avatarMaya Lekova <mslekova@chromium.org>
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80457}
parent 09f39ae0
......@@ -1940,9 +1940,6 @@ void InstructionSelector::VisitNode(Node* node) {
return MarkAsWord64(node), VisitSignExtendWord16ToInt64(node);
case IrOpcode::kSignExtendWord32ToInt64:
return MarkAsWord64(node), VisitSignExtendWord32ToInt64(node);
case IrOpcode::kUnsafePointerAdd:
MarkAsRepresentation(MachineType::PointerRepresentation(), node);
return VisitUnsafePointerAdd(node);
case IrOpcode::kF64x2Splat:
return MarkAsSimd128(node), VisitF64x2Splat(node);
case IrOpcode::kF64x2ExtractLane:
......@@ -3215,14 +3212,6 @@ void InstructionSelector::VisitComment(Node* node) {
Emit(kArchComment, 0, nullptr, 1, &operand);
}
void InstructionSelector::VisitUnsafePointerAdd(Node* node) {
#if V8_TARGET_ARCH_64_BIT
VisitInt64Add(node);
#else // V8_TARGET_ARCH_64_BIT
VisitInt32Add(node);
#endif // V8_TARGET_ARCH_64_BIT
}
void InstructionSelector::VisitRetain(Node* node) {
OperandGenerator g(this);
Emit(kArchNop, g.NoOutput(), g.UseAny(node->InputAt(0)));
......
......@@ -5625,8 +5625,8 @@ Node* EffectControlLinearizer::BuildTypedArrayDataPointer(Node* base,
if (IntPtrMatcher(base).Is(0)) {
return external;
} else {
base = __ BitcastTaggedToWord(base);
if (COMPRESS_POINTERS_BOOL) {
base = __ BitcastTaggedToWord(base);
// Zero-extend Tagged_t to UintPtr according to current compression
// scheme so that the addition with |external_pointer| (which already
// contains compensated offset value) will decompress the tagged value.
......@@ -5634,7 +5634,7 @@ Node* EffectControlLinearizer::BuildTypedArrayDataPointer(Node* base,
// details.
base = ChangeUint32ToUintPtr(base);
}
return __ UnsafePointerAdd(base, external);
return __ IntPtrAdd(base, external);
}
}
......
......@@ -473,9 +473,9 @@ Node* GraphAssembler::Retain(Node* buffer) {
return AddNode(graph()->NewNode(common()->Retain(), buffer, effect()));
}
Node* GraphAssembler::UnsafePointerAdd(Node* base, Node* external) {
return AddNode(graph()->NewNode(machine()->UnsafePointerAdd(), base, external,
effect(), control()));
Node* GraphAssembler::IntPtrAdd(Node* a, Node* b) {
return AddNode(graph()->NewNode(
machine()->Is64() ? machine()->Int64Add() : machine()->Int32Add(), a, b));
}
TNode<Number> JSGraphAssembler::PlainPrimitiveToNumber(TNode<Object> value) {
......
......@@ -321,7 +321,7 @@ class V8_EXPORT_PRIVATE GraphAssembler {
Node* ProtectedLoad(MachineType type, Node* object, Node* offset);
Node* Retain(Node* buffer);
Node* UnsafePointerAdd(Node* base, Node* external);
Node* IntPtrAdd(Node* a, Node* b);
Node* DeoptimizeIf(DeoptimizeReason reason, FeedbackSource const& feedback,
Node* condition, Node* frame_state);
......
......@@ -1269,13 +1269,6 @@ struct MachineOperatorGlobalCache {
};
DebugBreakOperator kDebugBreak;
struct UnsafePointerAddOperator final : public Operator {
UnsafePointerAddOperator()
: Operator(IrOpcode::kUnsafePointerAdd, Operator::kKontrol,
"UnsafePointerAdd", 2, 1, 1, 1, 1, 0) {}
};
UnsafePointerAddOperator kUnsafePointerAdd;
struct StackPointerGreaterThanOperator : public Operator1<StackCheckKind> {
explicit StackPointerGreaterThanOperator(StackCheckKind kind)
: Operator1<StackCheckKind>(
......@@ -1621,10 +1614,6 @@ const Operator* MachineOperatorBuilder::ProtectedStore(
UNREACHABLE();
}
const Operator* MachineOperatorBuilder::UnsafePointerAdd() {
return &cache_.kUnsafePointerAdd;
}
const Operator* MachineOperatorBuilder::StackPointerGreaterThan(
StackCheckKind kind) {
switch (kind) {
......
......@@ -410,7 +410,6 @@ class V8_EXPORT_PRIVATE MachineOperatorBuilder final
const Operator* Comment(const char* msg);
const Operator* AbortCSADcheck();
const Operator* DebugBreak();
const Operator* UnsafePointerAdd();
const Operator* Word32And();
const Operator* Word32Or();
......
......@@ -57,7 +57,6 @@ bool CanAllocate(const Node* node) {
case IrOpcode::kUnalignedLoad:
case IrOpcode::kUnalignedStore:
case IrOpcode::kUnreachable:
case IrOpcode::kUnsafePointerAdd:
case IrOpcode::kWord32AtomicAdd:
case IrOpcode::kWord32AtomicAnd:
case IrOpcode::kWord32AtomicCompareExchange:
......
......@@ -765,7 +765,6 @@
V(SignExtendWord8ToInt64) \
V(SignExtendWord16ToInt64) \
V(SignExtendWord32ToInt64) \
V(UnsafePointerAdd) \
V(StackPointerGreaterThan)
#define MACHINE_SIMD_OP_LIST(V) \
......
......@@ -398,7 +398,7 @@ bool RedundantStoreFinder::CannotObserveStoreField(Node* node) {
return opcode == IrOpcode::kLoadElement || opcode == IrOpcode::kLoad ||
opcode == IrOpcode::kLoadImmutable || opcode == IrOpcode::kStore ||
opcode == IrOpcode::kEffectPhi || opcode == IrOpcode::kStoreElement ||
opcode == IrOpcode::kUnsafePointerAdd || opcode == IrOpcode::kRetain;
opcode == IrOpcode::kRetain;
}
void RedundantStoreFinder::Visit(Node* node) {
......
......@@ -924,7 +924,6 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
case IrOpcode::kAbortCSADcheck:
case IrOpcode::kDebugBreak:
case IrOpcode::kRetain:
case IrOpcode::kUnsafePointerAdd:
case IrOpcode::kRuntimeAbort:
CheckNotTyped(node);
break;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment