Commit d2351143 authored by Santiago Aboy Solanes's avatar Santiago Aboy Solanes Committed by Commit Bot

Reland x2 "[turbofan][ptr-compr] Remove redundant ChangeTaggedToCompressed"

This is a reland of c924f54e

Reason: Added back a bitcast that was supposed to be redundant (but it
wasn't)

Changes can be seen from patchets 1..3.

Original change's description:
> [turbofan][ptr-compr] Remove redundant ChangeTaggedToCompressed
>
> The final goal is to eliminate it altogether. This CL just
> eliminate the redundant ones.
>
> Bug: v8:7703
> Change-Id: If6e718c373fca7c65ce46c347533ec4550fbc444
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1950968
> Reviewed-by: Tobias Tebbi <tebbi@chromium.org>
> Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#65398}

Bug: v8:7703
Change-Id: I099e67d0255d4ad5529a73b272df893069374136
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1965582Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65454}
parent 1d1d14da
......@@ -4988,15 +4988,6 @@ Node* EffectControlLinearizer::BuildTypedArrayDataPointer(Node* base,
return external;
} else {
if (COMPRESS_POINTERS_BOOL) {
// TurboFan does not support loading of compressed fields without
// decompression so we add the following operations to workaround that.
// We can't load the base value as word32 because in that case the
// value will not be marked as tagged in the pointer map and will not
// survive GC.
// Compress base value back to in order to be able to decompress by
// doing an unsafe add below. Both decompression and compression
// will be removed by the decompression elimination pass.
base = __ ChangeTaggedToCompressed(base);
base = __ BitcastTaggedToWord(base);
// Zero-extend Tagged_t to UintPtr according to current compression
// scheme so that the addition with |external_pointer| (which already
......
......@@ -447,26 +447,26 @@ Node* GraphAssembler::IntPtrEqual(Node* left, Node* right) {
Node* GraphAssembler::TaggedEqual(Node* left, Node* right) {
if (COMPRESS_POINTERS_BOOL) {
return Word32Equal(ChangeTaggedToCompressed(left),
ChangeTaggedToCompressed(right));
return Word32Equal(left, right);
} else {
return WordEqual(left, right);
}
return WordEqual(left, right);
}
Node* GraphAssembler::SmiSub(Node* left, Node* right) {
if (COMPRESS_POINTERS_BOOL) {
return Int32Sub(ChangeTaggedToCompressed(left),
ChangeTaggedToCompressed(right));
return Int32Sub(left, right);
} else {
return IntSub(left, right);
}
return IntSub(left, right);
}
Node* GraphAssembler::SmiLessThan(Node* left, Node* right) {
if (COMPRESS_POINTERS_BOOL) {
return Int32LessThan(ChangeTaggedToCompressed(left),
ChangeTaggedToCompressed(right));
return Int32LessThan(left, right);
} else {
return IntLessThan(left, right);
}
return IntLessThan(left, right);
}
Node* GraphAssembler::Float64RoundDown(Node* value) {
......
......@@ -40,7 +40,6 @@ class BasicBlock;
V(ChangeInt32ToFloat64) \
V(ChangeInt32ToInt64) \
V(ChangeInt64ToFloat64) \
V(ChangeTaggedToCompressed) \
V(ChangeUint32ToFloat64) \
V(ChangeUint32ToUint64) \
V(Float64Abs) \
......
......@@ -402,9 +402,6 @@ bool ValueNeedsWriteBarrier(Node* value, Isolate* isolate) {
switch (value->opcode()) {
case IrOpcode::kBitcastWordToTaggedSigned:
return false;
case IrOpcode::kChangeTaggedToCompressed:
value = NodeProperties::GetValueInput(value, 0);
continue;
case IrOpcode::kHeapConstant: {
RootIndex root_index;
if (isolate->roots_table().IsRootHandle(HeapConstantOf(value->op()),
......
......@@ -240,22 +240,6 @@ void MemoryOptimizer::VisitNode(Node* node, AllocationState const* state) {
bool MemoryOptimizer::AllocationTypeNeedsUpdateToOld(Node* const node,
const Edge edge) {
if (COMPRESS_POINTERS_BOOL &&
node->opcode() == IrOpcode::kChangeTaggedToCompressed) {
// In Pointer Compression we might have a Compress node between an
// AllocateRaw and the value used as input. This case is trickier since we
// have to check all of the Compress node edges to test for a StoreField.
for (Edge const new_edge : node->use_edges()) {
if (AllocationTypeNeedsUpdateToOld(new_edge.from(), new_edge)) {
return true;
}
}
// If we arrived here, we tested all the edges of the Compress node and
// didn't find it necessary to update the AllocationType.
return false;
}
// Test to see if we need to update the AllocationType.
if (node->opcode() == IrOpcode::kStoreField && edge.index() == 1) {
Node* parent = node->InputAt(0);
......@@ -283,14 +267,6 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
Node* const user = edge.from();
if (user->opcode() == IrOpcode::kStoreField && edge.index() == 0) {
Node* child = user->InputAt(1);
// In Pointer Compression we might have a Compress node between an
// AllocateRaw and the value used as input. If so, we need to update
// child to point to the StoreField.
if (COMPRESS_POINTERS_BOOL &&
child->opcode() == IrOpcode::kChangeTaggedToCompressed) {
child = child->InputAt(0);
}
if (child->opcode() == IrOpcode::kAllocateRaw &&
AllocationTypeOf(child->op()) == AllocationType::kYoung) {
NodeProperties::ChangeOp(child, node->op());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment