memory-optimizer.h 5.4 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_COMPILER_MEMORY_OPTIMIZER_H_
#define V8_COMPILER_MEMORY_OPTIMIZER_H_

8
#include "src/compiler/graph-assembler.h"
9
#include "src/zone/zone-containers.h"
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33

namespace v8 {
namespace internal {
namespace compiler {

// Forward declarations.
class CommonOperatorBuilder;
struct ElementAccess;
class Graph;
class JSGraph;
class MachineOperatorBuilder;
class Node;
class Operator;

// NodeIds are identifying numbers for nodes that can be used to index auxiliary
// out-of-line data associated with each node.
typedef uint32_t NodeId;

// Lowers all simplified memory access and allocation related nodes (i.e.
// Allocate, LoadField, StoreField and friends) to machine operators.
// Performs allocation folding and store write barrier elimination
// implicitly.
class MemoryOptimizer final {
 public:
34 35
  enum class AllocationFolding { kDoAllocationFolding, kDontAllocationFolding };

36
  MemoryOptimizer(JSGraph* jsgraph, Zone* zone,
37
                  PoisoningMitigationLevel poisoning_level,
38
                  AllocationFolding allocation_folding);
39
  ~MemoryOptimizer() = default;
40 41 42 43 44 45 46 47 48 49 50

  void Optimize();

 private:
  // An allocation group represents a set of allocations that have been folded
  // together.
  class AllocationGroup final : public ZoneObject {
   public:
    AllocationGroup(Node* node, PretenureFlag pretenure, Zone* zone);
    AllocationGroup(Node* node, PretenureFlag pretenure, Node* size,
                    Zone* zone);
51
    ~AllocationGroup() = default;
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76

    void Add(Node* object);
    bool Contains(Node* object) const;
    bool IsNewSpaceAllocation() const { return pretenure() == NOT_TENURED; }

    PretenureFlag pretenure() const { return pretenure_; }
    Node* size() const { return size_; }

   private:
    ZoneSet<NodeId> node_ids_;
    PretenureFlag const pretenure_;
    Node* const size_;

    DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationGroup);
  };

  // An allocation state is propagated on the effect paths through the graph.
  class AllocationState final : public ZoneObject {
   public:
    static AllocationState const* Empty(Zone* zone) {
      return new (zone) AllocationState();
    }
    static AllocationState const* Closed(AllocationGroup* group, Zone* zone) {
      return new (zone) AllocationState(group);
    }
77
    static AllocationState const* Open(AllocationGroup* group, intptr_t size,
78 79 80 81 82 83 84 85
                                       Node* top, Zone* zone) {
      return new (zone) AllocationState(group, size, top);
    }

    bool IsNewSpaceAllocation() const;

    AllocationGroup* group() const { return group_; }
    Node* top() const { return top_; }
86
    intptr_t size() const { return size_; }
87 88 89 90

   private:
    AllocationState();
    explicit AllocationState(AllocationGroup* group);
91
    AllocationState(AllocationGroup* group, intptr_t size, Node* top);
92 93 94 95

    AllocationGroup* const group_;
    // The upper bound of the combined allocated object size on the current path
    // (max int if allocation folding is impossible on this path).
96
    intptr_t const size_;
97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
    Node* const top_;

    DISALLOW_COPY_AND_ASSIGN(AllocationState);
  };

  // An array of allocation states used to collect states on merges.
  typedef ZoneVector<AllocationState const*> AllocationStates;

  // We thread through tokens to represent the current state on a given effect
  // path through the graph.
  struct Token {
    Node* node;
    AllocationState const* state;
  };

  void VisitNode(Node*, AllocationState const*);
113
  void VisitAllocateRaw(Node*, AllocationState const*);
114
  void VisitCall(Node*, AllocationState const*);
115
  void VisitCallWithCallerSavedRegisters(Node*, AllocationState const*);
116 117 118 119
  void VisitLoadElement(Node*, AllocationState const*);
  void VisitLoadField(Node*, AllocationState const*);
  void VisitStoreElement(Node*, AllocationState const*);
  void VisitStoreField(Node*, AllocationState const*);
120
  void VisitStore(Node*, AllocationState const*);
121 122 123 124 125 126 127 128 129 130 131 132 133
  void VisitOtherEffect(Node*, AllocationState const*);

  Node* ComputeIndex(ElementAccess const&, Node*);
  WriteBarrierKind ComputeWriteBarrierKind(Node* object,
                                           AllocationState const* state,
                                           WriteBarrierKind);

  AllocationState const* MergeStates(AllocationStates const& states);

  void EnqueueMerge(Node*, int, AllocationState const*);
  void EnqueueUses(Node*, AllocationState const*);
  void EnqueueUse(Node*, int, AllocationState const*);

134 135
  bool NeedsPoisoning(LoadSensitivity load_sensitivity) const;

136 137 138 139 140 141 142
  AllocationState const* empty_state() const { return empty_state_; }
  Graph* graph() const;
  Isolate* isolate() const;
  JSGraph* jsgraph() const { return jsgraph_; }
  CommonOperatorBuilder* common() const;
  MachineOperatorBuilder* machine() const;
  Zone* zone() const { return zone_; }
143
  GraphAssembler* gasm() { return &graph_assembler_; }
144 145 146 147 148 149 150

  SetOncePointer<const Operator> allocate_operator_;
  JSGraph* const jsgraph_;
  AllocationState const* const empty_state_;
  ZoneMap<NodeId, AllocationStates> pending_;
  ZoneQueue<Token> tokens_;
  Zone* const zone_;
151
  GraphAssembler graph_assembler_;
152
  PoisoningMitigationLevel poisoning_level_;
153
  AllocationFolding allocation_folding_;
154 155 156 157 158 159 160 161 162

  DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryOptimizer);
};

}  // namespace compiler
}  // namespace internal
}  // namespace v8

#endif  // V8_COMPILER_MEMORY_OPTIMIZER_H_