Commit 6d5e9b79 authored by Ross McIlroy's avatar Ross McIlroy Committed by Commit Bot

[TurboProp] Split out MemoryLowering from MemoryOptimizer

Seperates the memory lowering operations into a seperate MemoryLowering
class which is used by the MemoryOptimizer. This will enable TurboProp
to reduce memory operations without having to do a full memory
optimization pass.

BUG=v8:9684

Change-Id: I1b333f1360fd342612672842bf879f44ab1ee60c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1815243Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63966}
parent 63e9a7d9
...@@ -1880,6 +1880,8 @@ v8_compiler_sources = [ ...@@ -1880,6 +1880,8 @@ v8_compiler_sources = [
"src/compiler/machine-operator.h", "src/compiler/machine-operator.h",
"src/compiler/map-inference.cc", "src/compiler/map-inference.cc",
"src/compiler/map-inference.h", "src/compiler/map-inference.h",
"src/compiler/memory-lowering.cc",
"src/compiler/memory-lowering.h",
"src/compiler/memory-optimizer.cc", "src/compiler/memory-optimizer.cc",
"src/compiler/memory-optimizer.h", "src/compiler/memory-optimizer.h",
"src/compiler/node-aux-data.h", "src/compiler/node-aux-data.h",
......
This diff is collapsed.
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_MEMORY_LOWERING_H_
#define V8_COMPILER_MEMORY_LOWERING_H_
#include "src/compiler/graph-assembler.h"
#include "src/compiler/graph-reducer.h"
namespace v8 {
namespace internal {
namespace compiler {
// Forward declarations.
class CommonOperatorBuilder;
struct ElementAccess;
class Graph;
class JSGraph;
class MachineOperatorBuilder;
class Node;
class Operator;
// Provides operations to lower all simplified memory access and allocation
// related nodes (i.e. Allocate, LoadField, StoreField and friends) to machine
// operators.
class MemoryLowering final : public Reducer {
public:
enum class AllocationFolding { kDoAllocationFolding, kDontAllocationFolding };
class AllocationGroup;
// An allocation state is propagated on the effect paths through the graph.
class AllocationState final : public ZoneObject {
public:
static AllocationState const* Empty(Zone* zone) {
return new (zone) AllocationState();
}
static AllocationState const* Closed(AllocationGroup* group, Node* effect,
Zone* zone) {
return new (zone) AllocationState(group, effect);
}
static AllocationState const* Open(AllocationGroup* group, intptr_t size,
Node* top, Node* effect, Zone* zone) {
return new (zone) AllocationState(group, size, top, effect);
}
bool IsYoungGenerationAllocation() const;
AllocationGroup* group() const { return group_; }
Node* top() const { return top_; }
Node* effect() const { return effect_; }
intptr_t size() const { return size_; }
private:
AllocationState();
explicit AllocationState(AllocationGroup* group, Node* effect);
AllocationState(AllocationGroup* group, intptr_t size, Node* top,
Node* effect);
AllocationGroup* const group_;
// The upper bound of the combined allocated object size on the current path
// (max int if allocation folding is impossible on this path).
intptr_t const size_;
Node* const top_;
Node* const effect_;
DISALLOW_COPY_AND_ASSIGN(AllocationState);
};
using WriteBarrierAssertFailedCallback = std::function<void(
Node* node, Node* object, const char* name, Zone* temp_zone)>;
MemoryLowering(
JSGraph* jsgraph, Zone* zone, PoisoningMitigationLevel poisoning_level,
AllocationFolding allocation_folding =
AllocationFolding::kDontAllocationFolding,
WriteBarrierAssertFailedCallback callback = [](Node*, Node*, const char*,
Zone*) { UNREACHABLE(); },
const char* function_debug_name = nullptr);
~MemoryLowering() = default;
const char* reducer_name() const override { return "MemoryReducer"; }
// Perform memory lowering reduction on the given Node.
Reduction Reduce(Node* node) override;
// Specific reducers for each optype to enable keeping track of
// AllocationState by the MemoryOptimizer.
Reduction ReduceAllocateRaw(Node* node, AllocationType allocation_type,
AllowLargeObjects allow_large_objects,
AllocationState const** state);
Reduction ReduceLoadFromObject(Node* node);
Reduction ReduceLoadElement(Node* node);
Reduction ReduceLoadField(Node* node);
Reduction ReduceStoreToObject(Node* node,
AllocationState const* state = nullptr);
Reduction ReduceStoreElement(Node* node,
AllocationState const* state = nullptr);
Reduction ReduceStoreField(Node* node,
AllocationState const* state = nullptr);
Reduction ReduceStore(Node* node, AllocationState const* state = nullptr);
private:
Reduction ReduceAllocateRaw(Node* node);
WriteBarrierKind ComputeWriteBarrierKind(Node* node, Node* object,
Node* value,
AllocationState const* state,
WriteBarrierKind);
Node* ComputeIndex(ElementAccess const& access, Node* node);
bool NeedsPoisoning(LoadSensitivity load_sensitivity) const;
Graph* graph() const;
Isolate* isolate() const;
Zone* zone() const { return zone_; }
JSGraph* jsgraph() const { return jsgraph_; }
CommonOperatorBuilder* common() const;
MachineOperatorBuilder* machine() const;
GraphAssembler* gasm() { return &graph_assembler_; }
SetOncePointer<const Operator> allocate_operator_;
JSGraph* const jsgraph_;
Zone* zone_;
GraphAssembler graph_assembler_;
AllocationFolding allocation_folding_;
PoisoningMitigationLevel poisoning_level_;
WriteBarrierAssertFailedCallback write_barrier_assert_failed_;
const char* function_debug_name_;
DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryLowering);
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_MEMORY_LOWERING_H_
This diff is collapsed.
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
#ifndef V8_COMPILER_MEMORY_OPTIMIZER_H_ #ifndef V8_COMPILER_MEMORY_OPTIMIZER_H_
#define V8_COMPILER_MEMORY_OPTIMIZER_H_ #define V8_COMPILER_MEMORY_OPTIMIZER_H_
#include "src/compiler/graph-assembler.h" #include "src/compiler/memory-lowering.h"
#include "src/zone/zone-containers.h" #include "src/zone/zone-containers.h"
namespace v8 { namespace v8 {
...@@ -15,95 +15,29 @@ class TickCounter; ...@@ -15,95 +15,29 @@ class TickCounter;
namespace compiler { namespace compiler {
// Forward declarations.
class CommonOperatorBuilder;
struct ElementAccess;
class Graph;
class JSGraph; class JSGraph;
class MachineOperatorBuilder; class Graph;
class Node;
class Operator;
// NodeIds are identifying numbers for nodes that can be used to index auxiliary // NodeIds are identifying numbers for nodes that can be used to index auxiliary
// out-of-line data associated with each node. // out-of-line data associated with each node.
using NodeId = uint32_t; using NodeId = uint32_t;
// Lowers all simplified memory access and allocation related nodes (i.e.
// Allocate, LoadField, StoreField and friends) to machine operators.
// Performs allocation folding and store write barrier elimination // Performs allocation folding and store write barrier elimination
// implicitly. // implicitly, while lowering all simplified memory access and allocation
// related nodes (i.e. Allocate, LoadField, StoreField and friends) to machine
// operators.
class MemoryOptimizer final { class MemoryOptimizer final {
public: public:
enum class AllocationFolding { kDoAllocationFolding, kDontAllocationFolding };
MemoryOptimizer(JSGraph* jsgraph, Zone* zone, MemoryOptimizer(JSGraph* jsgraph, Zone* zone,
PoisoningMitigationLevel poisoning_level, PoisoningMitigationLevel poisoning_level,
AllocationFolding allocation_folding, MemoryLowering::AllocationFolding allocation_folding,
const char* function_debug_name, TickCounter* tick_counter); const char* function_debug_name, TickCounter* tick_counter);
~MemoryOptimizer() = default; ~MemoryOptimizer() = default;
void Optimize(); void Optimize();
private: private:
// An allocation group represents a set of allocations that have been folded using AllocationState = MemoryLowering::AllocationState;
// together.
class AllocationGroup final : public ZoneObject {
public:
AllocationGroup(Node* node, AllocationType allocation, Zone* zone);
AllocationGroup(Node* node, AllocationType allocation, Node* size,
Zone* zone);
~AllocationGroup() = default;
void Add(Node* object);
bool Contains(Node* object) const;
bool IsYoungGenerationAllocation() const {
return allocation() == AllocationType::kYoung;
}
AllocationType allocation() const { return allocation_; }
Node* size() const { return size_; }
private:
ZoneSet<NodeId> node_ids_;
AllocationType const allocation_;
Node* const size_;
DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationGroup);
};
// An allocation state is propagated on the effect paths through the graph.
class AllocationState final : public ZoneObject {
public:
static AllocationState const* Empty(Zone* zone) {
return new (zone) AllocationState();
}
static AllocationState const* Closed(AllocationGroup* group, Zone* zone) {
return new (zone) AllocationState(group);
}
static AllocationState const* Open(AllocationGroup* group, intptr_t size,
Node* top, Zone* zone) {
return new (zone) AllocationState(group, size, top);
}
bool IsYoungGenerationAllocation() const;
AllocationGroup* group() const { return group_; }
Node* top() const { return top_; }
intptr_t size() const { return size_; }
private:
AllocationState();
explicit AllocationState(AllocationGroup* group);
AllocationState(AllocationGroup* group, intptr_t size, Node* top);
AllocationGroup* const group_;
// The upper bound of the combined allocated object size on the current path
// (max int if allocation folding is impossible on this path).
intptr_t const size_;
Node* const top_;
DISALLOW_COPY_AND_ASSIGN(AllocationState);
};
// An array of allocation states used to collect states on merges. // An array of allocation states used to collect states on merges.
using AllocationStates = ZoneVector<AllocationState const*>; using AllocationStates = ZoneVector<AllocationState const*>;
...@@ -127,44 +61,29 @@ class MemoryOptimizer final { ...@@ -127,44 +61,29 @@ class MemoryOptimizer final {
void VisitStore(Node*, AllocationState const*); void VisitStore(Node*, AllocationState const*);
void VisitOtherEffect(Node*, AllocationState const*); void VisitOtherEffect(Node*, AllocationState const*);
Node* ComputeIndex(ElementAccess const&, Node*);
WriteBarrierKind ComputeWriteBarrierKind(Node* node, Node* object,
Node* value,
AllocationState const* state,
WriteBarrierKind);
AllocationState const* MergeStates(AllocationStates const& states); AllocationState const* MergeStates(AllocationStates const& states);
void EnqueueMerge(Node*, int, AllocationState const*); void EnqueueMerge(Node*, int, AllocationState const*);
void EnqueueUses(Node*, AllocationState const*); void EnqueueUses(Node*, AllocationState const*);
void EnqueueUse(Node*, int, AllocationState const*); void EnqueueUse(Node*, int, AllocationState const*);
bool NeedsPoisoning(LoadSensitivity load_sensitivity) const;
// Returns true if the AllocationType of the current AllocateRaw node that we // Returns true if the AllocationType of the current AllocateRaw node that we
// are visiting needs to be updated to kOld, due to propagation of tenuring // are visiting needs to be updated to kOld, due to propagation of tenuring
// from outer to inner allocations. // from outer to inner allocations.
bool AllocationTypeNeedsUpdateToOld(Node* const user, const Edge edge); bool AllocationTypeNeedsUpdateToOld(Node* const user, const Edge edge);
AllocationState const* empty_state() const { return empty_state_; } AllocationState const* empty_state() const { return empty_state_; }
MemoryLowering* memory_lowering() { return &memory_lowering_; }
Graph* graph() const; Graph* graph() const;
Isolate* isolate() const;
JSGraph* jsgraph() const { return jsgraph_; } JSGraph* jsgraph() const { return jsgraph_; }
CommonOperatorBuilder* common() const;
MachineOperatorBuilder* machine() const;
Zone* zone() const { return zone_; } Zone* zone() const { return zone_; }
GraphAssembler* gasm() { return &graph_assembler_; }
SetOncePointer<const Operator> allocate_operator_; MemoryLowering memory_lowering_;
JSGraph* const jsgraph_; JSGraph* jsgraph_;
AllocationState const* const empty_state_; AllocationState const* const empty_state_;
ZoneMap<NodeId, AllocationStates> pending_; ZoneMap<NodeId, AllocationStates> pending_;
ZoneQueue<Token> tokens_; ZoneQueue<Token> tokens_;
Zone* const zone_; Zone* const zone_;
GraphAssembler graph_assembler_;
PoisoningMitigationLevel poisoning_level_;
AllocationFolding allocation_folding_;
const char* function_debug_name_;
TickCounter* const tick_counter_; TickCounter* const tick_counter_;
DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryOptimizer); DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryOptimizer);
......
...@@ -1727,8 +1727,8 @@ struct MemoryOptimizationPhase { ...@@ -1727,8 +1727,8 @@ struct MemoryOptimizationPhase {
MemoryOptimizer optimizer( MemoryOptimizer optimizer(
data->jsgraph(), temp_zone, data->info()->GetPoisoningMitigationLevel(), data->jsgraph(), temp_zone, data->info()->GetPoisoningMitigationLevel(),
data->info()->is_allocation_folding_enabled() data->info()->is_allocation_folding_enabled()
? MemoryOptimizer::AllocationFolding::kDoAllocationFolding ? MemoryLowering::AllocationFolding::kDoAllocationFolding
: MemoryOptimizer::AllocationFolding::kDontAllocationFolding, : MemoryLowering::AllocationFolding::kDontAllocationFolding,
data->debug_name(), &data->info()->tick_counter()); data->debug_name(), &data->info()->tick_counter());
optimizer.Optimize(); optimizer.Optimize();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment