Commit 7f69a0da authored by Santiago Aboy Solanes's avatar Santiago Aboy Solanes Committed by Commit Bot

[turbofan] Compress HeapConstants in DecompressionOptimizer

We should be encountering this due to TaggedEquality.
DecompressionElimination used to take care of this, but it will not be
present in the new system.

Bug: v8:7703
Change-Id: I9fe00ee116ed1514cb4c465a8d19df6e785ef913
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1868623Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64471}
parent 6f7eeec8
...@@ -20,15 +20,28 @@ bool IsMachineLoad(Node* const node) { ...@@ -20,15 +20,28 @@ bool IsMachineLoad(Node* const node) {
opcode == IrOpcode::kUnalignedLoad; opcode == IrOpcode::kUnalignedLoad;
} }
bool IsHeapConstant(Node* const node) {
return node->opcode() == IrOpcode::kHeapConstant;
}
bool CanBeCompressed(Node* const node) {
return IsHeapConstant(node) ||
(IsMachineLoad(node) &&
CanBeTaggedPointer(
LoadRepresentationOf(node->op()).representation()));
}
} // anonymous namespace } // anonymous namespace
DecompressionOptimizer::DecompressionOptimizer(Zone* zone, Graph* graph, DecompressionOptimizer::DecompressionOptimizer(Zone* zone, Graph* graph,
CommonOperatorBuilder* common,
MachineOperatorBuilder* machine) MachineOperatorBuilder* machine)
: graph_(graph), : graph_(graph),
common_(common),
machine_(machine), machine_(machine),
states_(graph, static_cast<uint32_t>(State::kNumberOfStates)), states_(graph, static_cast<uint32_t>(State::kNumberOfStates)),
to_visit_(zone), to_visit_(zone),
compressed_loads_(zone) {} compressed_candidate_nodes_(zone) {}
void DecompressionOptimizer::MarkNodes() { void DecompressionOptimizer::MarkNodes() {
MaybeMarkAndQueueForRevisit(graph()->end(), State::kOnly32BitsObserved); MaybeMarkAndQueueForRevisit(graph()->end(), State::kOnly32BitsObserved);
...@@ -89,60 +102,72 @@ void DecompressionOptimizer::MaybeMarkAndQueueForRevisit(Node* const node, ...@@ -89,60 +102,72 @@ void DecompressionOptimizer::MaybeMarkAndQueueForRevisit(Node* const node,
states_.Set(node, state); states_.Set(node, state);
to_visit_.push_back(node); to_visit_.push_back(node);
// In the case of a TaggedPointer or TaggedAny Load that can be done in 32 if (state == State::kOnly32BitsObserved && CanBeCompressed(node)) {
// bits, we save it in compressed_loads_ to be changed later if necessary. compressed_candidate_nodes_.push_back(node);
if (state == State::kOnly32BitsObserved && IsMachineLoad(node) &&
CanBeTaggedPointer(LoadRepresentationOf(node->op()).representation())) {
compressed_loads_.push_back(node);
} }
} }
} }
void DecompressionOptimizer::ChangeLoads() { void DecompressionOptimizer::ChangeHeapConstant(Node* const node) {
for (Node* const node : compressed_loads_) { DCHECK(IsHeapConstant(node));
// compressed_loads_ contains all the nodes that once had the NodeProperties::ChangeOp(
node, common()->CompressedHeapConstant(HeapConstantOf(node->op())));
}
void DecompressionOptimizer::ChangeLoad(Node* const node) {
DCHECK(IsMachineLoad(node));
// Change to a Compressed MachRep to avoid the full decompression.
LoadRepresentation load_rep = LoadRepresentationOf(node->op());
LoadRepresentation compressed_load_rep;
if (load_rep == MachineType::AnyTagged()) {
compressed_load_rep = MachineType::AnyCompressed();
} else {
DCHECK_EQ(load_rep, MachineType::TaggedPointer());
compressed_load_rep = MachineType::CompressedPointer();
}
// Change to the Operator with the Compressed MachineRepresentation.
switch (node->opcode()) {
case IrOpcode::kLoad:
NodeProperties::ChangeOp(node, machine()->Load(compressed_load_rep));
break;
case IrOpcode::kPoisonedLoad:
NodeProperties::ChangeOp(node,
machine()->PoisonedLoad(compressed_load_rep));
break;
case IrOpcode::kProtectedLoad:
NodeProperties::ChangeOp(node,
machine()->ProtectedLoad(compressed_load_rep));
break;
case IrOpcode::kUnalignedLoad:
NodeProperties::ChangeOp(node,
machine()->UnalignedLoad(compressed_load_rep));
break;
default:
UNREACHABLE();
}
}
void DecompressionOptimizer::ChangeNodes() {
for (Node* const node : compressed_candidate_nodes_) {
// compressed_candidate_nodes_ contains all the nodes that once had the
// State::kOnly32BitsObserved. If we later updated the state to be // State::kOnly32BitsObserved. If we later updated the state to be
// State::IsEverythingObserved, then we have to ignore them. This is less // State::IsEverythingObserved, then we have to ignore them. This is less
// costly than removing them from the compressed_loads_ NodeVector when we // costly than removing them from the compressed_candidate_nodes_ NodeVector
// update them to State::IsEverythingObserved. // when we update them to State::IsEverythingObserved.
if (IsEverythingObserved(node)) continue; if (IsEverythingObserved(node)) continue;
// Change to a Compressed MachRep to avoid the full decompression. if (IsHeapConstant(node)) {
LoadRepresentation load_rep = LoadRepresentationOf(node->op()); ChangeHeapConstant(node);
LoadRepresentation compressed_load_rep;
if (load_rep == MachineType::AnyTagged()) {
compressed_load_rep = MachineType::AnyCompressed();
} else { } else {
DCHECK_EQ(load_rep, MachineType::TaggedPointer()); ChangeLoad(node);
compressed_load_rep = MachineType::CompressedPointer();
}
// Change to the Operator with the Compressed MachineRepresentation.
switch (node->opcode()) {
case IrOpcode::kLoad:
NodeProperties::ChangeOp(node, machine()->Load(compressed_load_rep));
break;
case IrOpcode::kPoisonedLoad:
NodeProperties::ChangeOp(node,
machine()->PoisonedLoad(compressed_load_rep));
break;
case IrOpcode::kProtectedLoad:
NodeProperties::ChangeOp(node,
machine()->ProtectedLoad(compressed_load_rep));
break;
case IrOpcode::kUnalignedLoad:
NodeProperties::ChangeOp(node,
machine()->UnalignedLoad(compressed_load_rep));
break;
default:
UNREACHABLE();
} }
} }
} }
void DecompressionOptimizer::Reduce() { void DecompressionOptimizer::Reduce() {
MarkNodes(); MarkNodes();
ChangeLoads(); ChangeNodes();
} }
} // namespace compiler } // namespace compiler
......
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
#ifndef V8_COMPILER_DECOMPRESSION_OPTIMIZER_H_ #ifndef V8_COMPILER_DECOMPRESSION_OPTIMIZER_H_
#define V8_COMPILER_DECOMPRESSION_OPTIMIZER_H_ #define V8_COMPILER_DECOMPRESSION_OPTIMIZER_H_
#include "src/compiler/common-operator.h"
#include "src/compiler/machine-operator.h" #include "src/compiler/machine-operator.h"
#include "src/compiler/node-marker.h" #include "src/compiler/node-marker.h"
...@@ -15,9 +16,11 @@ namespace compiler { ...@@ -15,9 +16,11 @@ namespace compiler {
// Forward declare. // Forward declare.
class Graph; class Graph;
// DecompressionOptimizer purpose is to avoid the full decompression on Loads // DecompressionOptimizer purpose is to hide the distinction between 32 bit and
// whenever possible. Its scope is narrowed down to TaggedPointer and AnyTagged, // 64 bit tagged values, while being able to use the compressed version of nodes
// since TaggedSigned avoids full decompression always. // whenever possible. Its scope is narrowed down to loads of TaggedPointer and
// AnyTagged (since TaggedSigned avoids full decompression always), and
// HeapConstants.
// DecompressionOptimizer will run only when pointer compression is enabled. For // DecompressionOptimizer will run only when pointer compression is enabled. For
// the moment, it's also requires FLAG_turbo_decompression_elimination to be // the moment, it's also requires FLAG_turbo_decompression_elimination to be
...@@ -35,11 +38,12 @@ class Graph; ...@@ -35,11 +38,12 @@ class Graph;
class V8_EXPORT_PRIVATE DecompressionOptimizer final { class V8_EXPORT_PRIVATE DecompressionOptimizer final {
public: public:
DecompressionOptimizer(Zone* zone, Graph* graph, DecompressionOptimizer(Zone* zone, Graph* graph,
CommonOperatorBuilder* common,
MachineOperatorBuilder* machine); MachineOperatorBuilder* machine);
~DecompressionOptimizer() = default; ~DecompressionOptimizer() = default;
// Assign States to the nodes, and then change the loads' Operator to avoid // Assign States to the nodes, and then change the node's Operator to use the
// decompression if possible. // compressed version if possible.
void Reduce(); void Reduce();
private: private:
...@@ -56,9 +60,15 @@ class V8_EXPORT_PRIVATE DecompressionOptimizer final { ...@@ -56,9 +60,15 @@ class V8_EXPORT_PRIVATE DecompressionOptimizer final {
kNumberOfStates kNumberOfStates
}; };
// Go through the already marked nodes and changed the operation for the loads // Change node's op from HeapConstant to CompressedHeapConstant.
// that can avoid the full decompression. void ChangeHeapConstant(Node* const node);
void ChangeLoads();
// Change node's load into a compressed one.
void ChangeLoad(Node* const node);
// Go through the already marked nodes and changed the operation for the nodes
// that can use compressed outputs.
void ChangeNodes();
// Goes through the nodes to mark them all as appropriate. It will visit each // Goes through the nodes to mark them all as appropriate. It will visit each
// node at most twice: only when the node was unvisited, then marked as // node at most twice: only when the node was unvisited, then marked as
...@@ -74,9 +84,9 @@ class V8_EXPORT_PRIVATE DecompressionOptimizer final { ...@@ -74,9 +84,9 @@ class V8_EXPORT_PRIVATE DecompressionOptimizer final {
// i.e either if: // i.e either if:
// * We are marking an unvisited node, or // * We are marking an unvisited node, or
// * We are marking a node as needing 64 bits when we previously had the // * We are marking a node as needing 64 bits when we previously had the
// information that it could output 32 bits. Also, we store the TaggedPointer // information that it could output 32 bits. Also, we store the HeapConstant
// and AnyTagged loads that have their state set as kOnly32BitsObserved. // and TaggedPointer and AnyTagged loads that have their state set as
// If the node's state changes, we queue it for revisit. // kOnly32BitsObserved. If the node's state changes, we queue it for revisit.
void MaybeMarkAndQueueForRevisit(Node* const node, State state); void MaybeMarkAndQueueForRevisit(Node* const node, State state);
bool IsEverythingObserved(Node* const node) { bool IsEverythingObserved(Node* const node) {
...@@ -84,19 +94,21 @@ class V8_EXPORT_PRIVATE DecompressionOptimizer final { ...@@ -84,19 +94,21 @@ class V8_EXPORT_PRIVATE DecompressionOptimizer final {
} }
Graph* graph() const { return graph_; } Graph* graph() const { return graph_; }
CommonOperatorBuilder* common() const { return common_; }
MachineOperatorBuilder* machine() const { return machine_; } MachineOperatorBuilder* machine() const { return machine_; }
Graph* const graph_; Graph* const graph_;
CommonOperatorBuilder* const common_;
MachineOperatorBuilder* const machine_; MachineOperatorBuilder* const machine_;
NodeMarker<State> states_; NodeMarker<State> states_;
// to_visit_ is a Deque but it's used as if it were a Queue. The reason why we // to_visit_ is a Deque but it's used as if it were a Queue. The reason why we
// are using NodeDeque is because it attempts to reuse 'freed' zone memory // are using NodeDeque is because it attempts to reuse 'freed' zone memory
// instead of always allocating a new region. // instead of always allocating a new region.
NodeDeque to_visit_; NodeDeque to_visit_;
// Contains the AnyTagged and TaggedPointer loads that can avoid the full // Contains the nodes that can be changed into a compressed version of
// decompression. In a way, it functions as a NodeSet since each node will be // themselves. In a way, it functions as a NodeSet since each node will be
// contained at most once. It's a Vector since we care about insertion speed. // contained at most once. It's a Vector since we care about insertion speed.
NodeVector compressed_loads_; NodeVector compressed_candidate_nodes_;
DISALLOW_COPY_AND_ASSIGN(DecompressionOptimizer); DISALLOW_COPY_AND_ASSIGN(DecompressionOptimizer);
}; };
......
...@@ -1799,8 +1799,8 @@ struct DecompressionOptimizationPhase { ...@@ -1799,8 +1799,8 @@ struct DecompressionOptimizationPhase {
void Run(PipelineData* data, Zone* temp_zone) { void Run(PipelineData* data, Zone* temp_zone) {
if (COMPRESS_POINTERS_BOOL && !FLAG_turbo_decompression_elimination) { if (COMPRESS_POINTERS_BOOL && !FLAG_turbo_decompression_elimination) {
DecompressionOptimizer decompression_optimizer(temp_zone, data->graph(), DecompressionOptimizer decompression_optimizer(
data->machine()); temp_zone, data->graph(), data->common(), data->machine());
decompression_optimizer.Reduce(); decompression_optimizer.Reduce();
} }
} }
......
...@@ -20,7 +20,8 @@ class DecompressionOptimizerTest : public GraphTest { ...@@ -20,7 +20,8 @@ class DecompressionOptimizerTest : public GraphTest {
protected: protected:
void Reduce() { void Reduce() {
DecompressionOptimizer decompression_optimizer(zone(), graph(), machine()); DecompressionOptimizer decompression_optimizer(zone(), graph(), common(),
machine());
decompression_optimizer.Reduce(); decompression_optimizer.Reduce();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment