Commit 8616be0c authored by Tobias Tebbi's avatar Tobias Tebbi Committed by Commit Bot

Revert "[turbofan] staging new implementation of escape analysis"

This reverts commit d230b44f.

Reason for revert: compile errors on the waterfall

Original change's description:
> [turbofan] staging new implementation of escape analysis
> 
> Bug: 
> Change-Id: Idebe4fa6d651a404a0dc1947ed4a34a8dc9707a9
> Reviewed-on: https://chromium-review.googlesource.com/565720
> Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
> Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#46966}

TBR=mstarzinger@chromium.org,jarin@chromium.org,tebbi@chromium.org

Change-Id: I73c3cb270d498aeb181e31bad04f1c73d5ca6741
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/591370Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46968}
parent e15f5544
......@@ -1381,10 +1381,6 @@ v8_source_set("v8_base") {
"src/compiler/memory-optimizer.h",
"src/compiler/move-optimizer.cc",
"src/compiler/move-optimizer.h",
"src/compiler/new-escape-analysis-reducer.cc",
"src/compiler/new-escape-analysis-reducer.h",
"src/compiler/new-escape-analysis.cc",
"src/compiler/new-escape-analysis.h",
"src/compiler/node-aux-data.h",
"src/compiler/node-cache.cc",
"src/compiler/node-cache.h",
......@@ -1406,7 +1402,6 @@ v8_source_set("v8_base") {
"src/compiler/operator.h",
"src/compiler/osr.cc",
"src/compiler/osr.h",
"src/compiler/persistent-map.h",
"src/compiler/pipeline-statistics.cc",
"src/compiler/pipeline-statistics.h",
"src/compiler/pipeline.cc",
......
......@@ -172,6 +172,7 @@ struct hash<T*> : public std::unary_function<T*, size_t> {
}
};
// base::bit_equal_to is a function object class for bitwise equality
// comparison, similar to std::equal_to, except that the comparison is performed
// on the bit representation of the operands.
......
......@@ -142,22 +142,6 @@ std::ostream& operator<<(std::ostream& os, ParameterInfo const& i) {
return os;
}
std::ostream& operator<<(std::ostream& os, ObjectStateInfo const& i) {
return os << "id:" << i.object_id() << "|size:" << i.size();
}
size_t hash_value(ObjectStateInfo const& p) {
return base::hash_combine(p.object_id(), p.size());
}
std::ostream& operator<<(std::ostream& os, TypedObjectStateInfo const& i) {
return os << "id:" << i.object_id() << "|" << i.machine_types();
}
size_t hash_value(TypedObjectStateInfo const& p) {
return base::hash_combine(p.object_id(), p.machine_types());
}
bool operator==(RelocatablePtrConstantInfo const& lhs,
RelocatablePtrConstantInfo const& rhs) {
return lhs.rmode() == rhs.rmode() && lhs.value() == rhs.value() &&
......@@ -338,7 +322,7 @@ ZoneVector<MachineType> const* MachineTypesOf(Operator const* op) {
if (op->opcode() == IrOpcode::kTypedStateValues) {
return OpParameter<TypedStateValueInfo>(op).machine_types();
}
return OpParameter<TypedObjectStateInfo>(op).machine_types();
return OpParameter<const ZoneVector<MachineType>*>(op);
}
#define CACHED_OP_LIST(V) \
......@@ -1092,14 +1076,6 @@ const Operator* CommonOperatorBuilder::RelocatableInt64Constant(
RelocatablePtrConstantInfo(value, rmode)); // parameter
}
const Operator* CommonOperatorBuilder::ObjectId(uint32_t object_id) {
return new (zone()) Operator1<uint32_t>( // --
IrOpcode::kObjectId, Operator::kPure, // opcode
"ObjectId", // name
0, 0, 0, 1, 0, 0, // counts
object_id); // parameter
}
const Operator* CommonOperatorBuilder::Select(MachineRepresentation rep,
BranchHint hint) {
return new (zone()) Operator1<SelectParameters>( // --
......@@ -1244,35 +1220,21 @@ bool IsRestOf(Operator const* op) {
return OpParameter<bool>(op);
}
const Operator* CommonOperatorBuilder::ObjectState(int object_id,
int pointer_slots) {
return new (zone()) Operator1<ObjectStateInfo>( // --
const Operator* CommonOperatorBuilder::ObjectState(int pointer_slots) {
return new (zone()) Operator1<int>( // --
IrOpcode::kObjectState, Operator::kPure, // opcode
"ObjectState", // name
pointer_slots, 0, 0, 1, 0, 0, // counts
ObjectStateInfo{object_id, pointer_slots}); // parameter
pointer_slots); // parameter
}
const Operator* CommonOperatorBuilder::TypedObjectState(
int object_id, const ZoneVector<MachineType>* types) {
return new (zone()) Operator1<TypedObjectStateInfo>( // --
const ZoneVector<MachineType>* types) {
return new (zone()) Operator1<const ZoneVector<MachineType>*>( // --
IrOpcode::kTypedObjectState, Operator::kPure, // opcode
"TypedObjectState", // name
static_cast<int>(types->size()), 0, 0, 1, 0, 0, // counts
TypedObjectStateInfo(object_id, types)); // parameter
}
uint32_t ObjectIdOf(Operator const* op) {
switch (op->opcode()) {
case IrOpcode::kObjectState:
return OpParameter<ObjectStateInfo>(op).object_id();
case IrOpcode::kTypedObjectState:
return OpParameter<TypedObjectStateInfo>(op).object_id();
case IrOpcode::kObjectId:
return OpParameter<uint32_t>(op);
default:
UNREACHABLE();
}
types); // parameter
}
const Operator* CommonOperatorBuilder::FrameState(
......
......@@ -123,23 +123,6 @@ std::ostream& operator<<(std::ostream&, ParameterInfo const&);
V8_EXPORT_PRIVATE int ParameterIndexOf(const Operator* const);
const ParameterInfo& ParameterInfoOf(const Operator* const);
struct ObjectStateInfo final : std::pair<uint32_t, int> {
using std::pair<uint32_t, int>::pair;
uint32_t object_id() const { return first; }
int size() const { return second; }
};
std::ostream& operator<<(std::ostream&, ObjectStateInfo const&);
size_t hash_value(ObjectStateInfo const& p);
struct TypedObjectStateInfo final
: std::pair<uint32_t, const ZoneVector<MachineType>*> {
using std::pair<uint32_t, const ZoneVector<MachineType>*>::pair;
uint32_t object_id() const { return first; }
const ZoneVector<MachineType>* machine_types() const { return second; }
};
std::ostream& operator<<(std::ostream&, TypedObjectStateInfo const&);
size_t hash_value(TypedObjectStateInfo const& p);
class RelocatablePtrConstantInfo final {
public:
enum Type { kInt32, kInt64 };
......@@ -313,8 +296,6 @@ ZoneVector<MachineType> const* MachineTypesOf(Operator const*)
// IsRestOf(op) is true in the second case.
bool IsRestOf(Operator const*);
uint32_t ObjectIdOf(Operator const*);
// Interface for building common operators that can be used at any level of IR,
// including JavaScript, mid-level, and low-level.
class V8_EXPORT_PRIVATE CommonOperatorBuilder final
......@@ -359,7 +340,6 @@ class V8_EXPORT_PRIVATE CommonOperatorBuilder final
const Operator* NumberConstant(volatile double);
const Operator* PointerConstant(intptr_t);
const Operator* HeapConstant(const Handle<HeapObject>&);
const Operator* ObjectId(uint32_t);
const Operator* RelocatableInt32Constant(int32_t value,
RelocInfo::Mode rmode);
......@@ -382,9 +362,8 @@ class V8_EXPORT_PRIVATE CommonOperatorBuilder final
SparseInputMask bitmask);
const Operator* ArgumentsElementsState(bool is_rest);
const Operator* ArgumentsLengthState(bool is_rest);
const Operator* ObjectState(int object_id, int pointer_slots);
const Operator* TypedObjectState(int object_id,
const ZoneVector<MachineType>* types);
const Operator* ObjectState(int pointer_slots);
const Operator* TypedObjectState(const ZoneVector<MachineType>* types);
const Operator* FrameState(BailoutId bailout_id,
OutputFrameStateCombine state_combine,
const FrameStateFunctionInfo* function_info);
......
......@@ -125,8 +125,6 @@ const Alias EscapeStatusAnalysis::kNotReachable =
const Alias EscapeStatusAnalysis::kUntrackable =
std::numeric_limits<Alias>::max() - 1;
namespace impl {
class VirtualObject : public ZoneObject {
public:
enum Status {
......@@ -568,9 +566,6 @@ bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
return changed;
}
} // namespace impl
using namespace impl;
EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
Graph* graph, Zone* zone)
: stack_(zone),
......@@ -1689,8 +1684,8 @@ Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
}
int input_count = static_cast<int>(cache_->fields().size());
Node* new_object_state =
graph()->NewNode(common()->ObjectState(vobj->id(), input_count),
input_count, &cache_->fields().front());
graph()->NewNode(common()->ObjectState(input_count), input_count,
&cache_->fields().front());
NodeProperties::SetType(new_object_state, Type::OtherInternal());
vobj->SetObjectState(new_object_state);
TRACE(
......
......@@ -15,11 +15,9 @@ namespace compiler {
// Forward declarations.
class CommonOperatorBuilder;
class EscapeStatusAnalysis;
namespace impl {
class MergeCache;
class VirtualState;
class VirtualObject;
}; // namespace impl
// EscapeObjectAnalysis simulates stores to determine values of loads if
// an object is virtual and eliminated.
......@@ -57,19 +55,17 @@ class V8_EXPORT_PRIVATE EscapeAnalysis {
bool ProcessEffectPhi(Node* node);
void ForwardVirtualState(Node* node);
impl::VirtualState* CopyForModificationAt(impl::VirtualState* state,
Node* node);
impl::VirtualObject* CopyForModificationAt(impl::VirtualObject* obj,
impl::VirtualState* state,
VirtualState* CopyForModificationAt(VirtualState* state, Node* node);
VirtualObject* CopyForModificationAt(VirtualObject* obj, VirtualState* state,
Node* node);
Node* replacement(Node* node);
bool UpdateReplacement(impl::VirtualState* state, Node* node, Node* rep);
bool UpdateReplacement(VirtualState* state, Node* node, Node* rep);
impl::VirtualObject* GetVirtualObject(impl::VirtualState* state, Node* node);
VirtualObject* GetVirtualObject(VirtualState* state, Node* node);
void DebugPrint();
void DebugPrintState(impl::VirtualState* state);
void DebugPrintState(VirtualState* state);
Graph* graph() const;
Zone* zone() const { return zone_; }
......@@ -79,10 +75,10 @@ class V8_EXPORT_PRIVATE EscapeAnalysis {
Node* const slot_not_analyzed_;
CommonOperatorBuilder* const common_;
EscapeStatusAnalysis* status_analysis_;
ZoneVector<impl::VirtualState*> virtual_states_;
ZoneVector<VirtualState*> virtual_states_;
ZoneVector<Node*> replacements_;
ZoneSet<impl::VirtualObject*> cycle_detection_;
impl::MergeCache* cache_;
ZoneSet<VirtualObject*> cycle_detection_;
MergeCache* cache_;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysis);
};
......
......@@ -482,39 +482,21 @@ class StateObjectDeduplicator {
static const size_t kNotDuplicated = SIZE_MAX;
size_t GetObjectId(Node* node) {
DCHECK(node->opcode() == IrOpcode::kTypedObjectState ||
node->opcode() == IrOpcode::kObjectId ||
node->opcode() == IrOpcode::kArgumentsElementsState);
for (size_t i = 0; i < objects_.size(); ++i) {
if (objects_[i] == node) return i;
// ObjectId nodes are the Turbofan way to express objects with the same
// identity in the deopt info. So they should always be mapped to
// previously appearing TypedObjectState nodes.
if (HasObjectId(objects_[i]) && HasObjectId(node) &&
ObjectIdOf(objects_[i]->op()) == ObjectIdOf(node->op())) {
if (objects_[i] == node) {
return i;
}
}
DCHECK(node->opcode() == IrOpcode::kTypedObjectState ||
node->opcode() == IrOpcode::kArgumentsElementsState);
return kNotDuplicated;
}
size_t InsertObject(Node* node) {
DCHECK(node->opcode() == IrOpcode::kTypedObjectState ||
node->opcode() == IrOpcode::kObjectId ||
node->opcode() == IrOpcode::kArgumentsElementsState);
size_t id = objects_.size();
objects_.push_back(node);
return id;
}
private:
static bool HasObjectId(Node* node) {
return node->opcode() == IrOpcode::kTypedObjectState ||
node->opcode() == IrOpcode::kObjectId;
}
ZoneVector<Node*> objects_;
};
......@@ -545,11 +527,9 @@ size_t InstructionSelector::AddOperandToStateValueDescriptor(
case IrOpcode::kObjectState: {
UNREACHABLE();
}
case IrOpcode::kTypedObjectState:
case IrOpcode::kObjectId: {
case IrOpcode::kTypedObjectState: {
size_t id = deduplicator->GetObjectId(input);
if (id == StateObjectDeduplicator::kNotDuplicated) {
DCHECK(input->opcode() == IrOpcode::kTypedObjectState);
size_t entries = 0;
id = deduplicator->InsertObject(input);
StateValueList* nested = values->PushRecursiveField(zone, id);
......
This diff is collapsed.
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
#define V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
#include "src/base/compiler-specific.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/new-escape-analysis.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
namespace compiler {
class Deduplicator;
class JSGraph;
// Perform hash-consing when creating or mutating nodes. Used to avoid duplicate
// nodes when creating ObjectState, StateValues and FrameState nodes
class NodeHashCache {
public:
NodeHashCache(Graph* graph, Zone* zone)
: graph_(graph), cache_(zone), temp_nodes_(zone) {}
// Handle to a conceptually new mutable node. Tries to re-use existing nodes
// and to recycle memory if possible.
class Constructor {
public:
// Construct a new node as a clone of [from].
Constructor(NodeHashCache* cache, Node* from)
: node_cache_(cache), from_(from), tmp_(nullptr) {}
// Construct a new node from scratch.
Constructor(NodeHashCache* cache, const Operator* op, int input_count,
Node** inputs, Type* type);
// Modify the new node.
void ReplaceValueInput(Node* input, int i) {
if (!tmp_ && input == NodeProperties::GetValueInput(from_, i)) return;
Node* node = MutableNode();
NodeProperties::ReplaceValueInput(node, input, i);
}
void ReplaceInput(Node* input, int i) {
if (!tmp_ && input == from_->InputAt(i)) return;
Node* node = MutableNode();
node->ReplaceInput(i, input);
}
// Obtain the mutated node or a cached copy. Invalidates the [Constructor].
Node* Get();
private:
Node* MutableNode();
NodeHashCache* node_cache_;
// Original node, copied on write.
Node* from_;
// Temporary node used for mutations, can be recycled if cache is hit.
Node* tmp_;
};
private:
Node* Query(Node* node);
void Insert(Node* node) { cache_.insert(node); }
Graph* graph_;
struct NodeEquals {
bool operator()(Node* a, Node* b) const {
return NodeProperties::Equals(a, b);
}
};
struct NodeHashCode {
size_t operator()(Node* n) const { return NodeProperties::HashCode(n); }
};
ZoneUnorderedSet<Node*, NodeHashCode, NodeEquals> cache_;
// Unused nodes whose memory can be recycled.
ZoneVector<Node*> temp_nodes_;
};
// Modify the graph according to the information computed in the previous phase.
class V8_EXPORT_PRIVATE NewEscapeAnalysisReducer final
: public NON_EXPORTED_BASE(AdvancedReducer) {
public:
NewEscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph,
EscapeAnalysisResult analysis_result, Zone* zone);
Reduction Reduce(Node* node) override;
const char* reducer_name() const override {
return "NewEscapeAnalysisReducer";
}
void Finalize() override;
// Verifies that all virtual allocation nodes have been dealt with. Run it
// after this reducer has been applied.
void VerifyReplacement() const;
private:
void ReduceFrameStateInputs(Node* node);
Node* ReduceDeoptState(Node* node, Node* effect, Deduplicator* deduplicator);
Node* ObjectIdNode(const VirtualObject* vobject);
Node* MaybeGuard(Node* original, Node* replacement);
JSGraph* jsgraph() const { return jsgraph_; }
EscapeAnalysisResult analysis_result() const { return analysis_result_; }
Zone* zone() const { return zone_; }
JSGraph* const jsgraph_;
EscapeAnalysisResult analysis_result_;
ZoneVector<Node*> object_id_cache_;
NodeHashCache node_cache_;
ZoneSet<Node*> arguments_elements_;
Zone* const zone_;
DISALLOW_COPY_AND_ASSIGN(NewEscapeAnalysisReducer);
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
This diff is collapsed.
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
#define V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
#include "src/base/functional.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/persistent-map.h"
#include "src/globals.h"
#ifdef DEBUG
#define TRACE(...) \
do { \
if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
} while (false)
#else
#define TRACE(...)
#endif
namespace v8 {
namespace internal {
namespace compiler {
class CommonOperatorBuilder;
class VariableTracker;
class EscapeAnalysisTracker;
#ifdef DEBUG
class TraceScope {
public:
TraceScope(const char* name, Node* node) : name_(name), node_(node) {
for (int i = 0; i < depth; ++i) TRACE(" ");
TRACE("[ %s %s#%d\n", name, node->op()->mnemonic(), node->id());
++depth;
}
~TraceScope() {
--depth;
for (int i = 0; i < depth; ++i) TRACE(" ");
TRACE("] %s %s#%d\n", name_, node_->op()->mnemonic(), node_->id());
}
private:
const char* name_;
Node* node_;
static thread_local int depth;
};
#define TRACE_FN(name, node) TraceScope __trace_scope_(name, node)
#else
#define TRACE_FN(name, node)
#endif
// {EffectGraphReducer} reduces up to a fixed point. It distinguishes changes to
// the effect output of a node from changes to the value output to reduce the
// number of revisitations.
class EffectGraphReducer {
public:
class Reduction {
public:
bool value_changed() const { return value_changed_; }
void set_value_changed() { value_changed_ = true; }
bool effect_changed() const { return effect_changed_; }
void set_effect_changed() { effect_changed_ = true; }
private:
bool value_changed_ = false;
bool effect_changed_ = false;
};
EffectGraphReducer(Graph* graph,
std::function<void(Node*, Reduction*)> reduce, Zone* zone);
void ReduceGraph() { ReduceFrom(graph_->end()); }
// Mark node for revisitation.
void Revisit(Node* node);
// Add a new root node to start reduction from. This is useful if the reducer
// adds nodes that are not yet reachable, but should already be considered
// part of the graph.
void AddRoot(Node* node) {
DCHECK(state_.Get(node) == State::kUnvisited);
state_.Set(node, State::kRevisit);
revisit_.push(node);
}
bool Complete() { return stack_.empty() && revisit_.empty(); }
private:
struct NodeState {
Node* node;
int input_index;
};
void ReduceFrom(Node* node);
enum class State : uint8_t { kUnvisited = 0, kRevisit, kOnStack, kVisited };
const uint8_t kNumStates = static_cast<uint8_t>(State::kVisited) + 1;
Graph* graph_;
NodeMarker<State> state_;
ZoneStack<Node*> revisit_;
ZoneStack<NodeState> stack_;
std::function<void(Node*, Reduction*)> reduce_;
};
// A variable is an abstract storage location, which is lowered to SSA values
// and phi nodes by {VariableTracker}.
class Variable {
public:
Variable() : id_(kInvalid) {}
bool operator==(Variable other) const { return id_ == other.id_; }
bool operator!=(Variable other) const { return id_ != other.id_; }
bool operator<(Variable other) const { return id_ < other.id_; }
static Variable Invalid() { return Variable(kInvalid); }
friend V8_INLINE size_t hash_value(Variable v) {
return base::hash_value(v.id_);
}
friend std::ostream& operator<<(std::ostream& os, Variable var) {
return os << var.id_;
}
private:
typedef int Id;
explicit Variable(Id id) : id_(id) {}
Id id_;
static const Id kInvalid = -1;
friend class VariableTracker;
};
// An object that can track the nodes in the graph whose current reduction
// depends on the value of the object.
class Dependable : public ZoneObject {
public:
explicit Dependable(Zone* zone) : dependants_(zone) {}
void AddDependency(Node* node) { dependants_.push_back(node); }
void RevisitDependants(EffectGraphReducer* reducer) {
for (Node* node : dependants_) {
reducer->Revisit(node);
}
dependants_.clear();
}
private:
ZoneVector<Node*> dependants_;
};
// A virtual object represents an allocation site and tracks the Variables
// associated with its fields as well as its global escape status.
class VirtualObject : public Dependable {
public:
typedef uint32_t Id;
typedef ZoneVector<Variable>::const_iterator const_iterator;
VirtualObject(VariableTracker* var_states, Id id, int size);
Maybe<Variable> FieldAt(int offset) const {
DCHECK(offset % kPointerSize == 0);
CHECK(!HasEscaped());
if (offset >= size()) {
// This can only happen in unreachable code.
return Nothing<Variable>();
}
return Just(fields_.at(offset / kPointerSize));
}
Id id() const { return id_; }
int size() const { return static_cast<int>(kPointerSize * fields_.size()); }
// Escaped might mean that the object escaped to untracked memory or that it
// is used in an operation that requires materialization.
void SetEscaped() { escaped_ = true; }
bool HasEscaped() const { return escaped_; }
const_iterator begin() const { return fields_.begin(); }
const_iterator end() const { return fields_.end(); }
private:
bool escaped_ = false;
Id id_;
ZoneVector<Variable> fields_;
};
class EscapeAnalysisResult {
public:
explicit EscapeAnalysisResult(EscapeAnalysisTracker* tracker)
: tracker_(tracker) {}
const VirtualObject* GetVirtualObject(Node* node);
Node* GetVirtualObjectField(const VirtualObject* vobject, int field,
Node* effect);
Node* GetReplacementOf(Node* node);
private:
EscapeAnalysisTracker* tracker_;
};
class V8_EXPORT_PRIVATE NewEscapeAnalysis final
: public NON_EXPORTED_BASE(EffectGraphReducer) {
public:
NewEscapeAnalysis(JSGraph* jsgraph, Zone* zone);
EscapeAnalysisResult analysis_result() {
DCHECK(Complete());
return EscapeAnalysisResult(tracker_);
}
private:
void Reduce(Node* node, Reduction* reduction);
JSGraph* jsgraph() { return jsgraph_; }
EscapeAnalysisTracker* tracker_;
JSGraph* jsgraph_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
......@@ -482,38 +482,6 @@ bool NodeProperties::IsInputRange(Edge edge, int first, int num) {
return first <= index && index < first + num;
}
// static
size_t NodeProperties::HashCode(Node* node) {
size_t h = base::hash_combine(node->op()->HashCode(), node->InputCount());
for (Node* input : node->inputs()) {
h = base::hash_combine(h, input->id());
}
return h;
}
// static
bool NodeProperties::Equals(Node* a, Node* b) {
DCHECK_NOT_NULL(a);
DCHECK_NOT_NULL(b);
DCHECK_NOT_NULL(a->op());
DCHECK_NOT_NULL(b->op());
if (!a->op()->Equals(b->op())) return false;
if (a->InputCount() != b->InputCount()) return false;
Node::Inputs aInputs = a->inputs();
Node::Inputs bInputs = b->inputs();
auto aIt = aInputs.begin();
auto bIt = bInputs.begin();
auto aEnd = aInputs.end();
for (; aIt != aEnd; ++aIt, ++bIt) {
DCHECK_NOT_NULL(*aIt);
DCHECK_NOT_NULL(*bIt);
if ((*aIt)->id() != (*bIt)->id()) return false;
}
return true;
}
} // namespace compiler
} // namespace internal
} // namespace v8
......@@ -132,12 +132,6 @@ class V8_EXPORT_PRIVATE NodeProperties final {
// Checks if two nodes are the same, looking past {CheckHeapObject}.
static bool IsSame(Node* a, Node* b);
// Check if two nodes have equal operators and reference-equal inputs. Used
// for value numbering/hash-consing.
static bool Equals(Node* a, Node* b);
// A corresponding hash function.
static size_t HashCode(Node* node);
// Walks up the {effect} chain to find a witness that provides map
// information about the {receiver}. Can look through potentially
// side effecting nodes.
......
......@@ -62,7 +62,6 @@
V(ArgumentsElementsState) \
V(ArgumentsLengthState) \
V(ObjectState) \
V(ObjectId) \
V(TypedObjectState) \
V(Call) \
V(Parameter) \
......
This diff is collapsed.
......@@ -51,8 +51,6 @@
#include "src/compiler/machine-operator-reducer.h"
#include "src/compiler/memory-optimizer.h"
#include "src/compiler/move-optimizer.h"
#include "src/compiler/new-escape-analysis-reducer.h"
#include "src/compiler/new-escape-analysis.h"
#include "src/compiler/osr.h"
#include "src/compiler/pipeline-statistics.h"
#include "src/compiler/redundancy-elimination.h"
......@@ -1159,18 +1157,6 @@ struct EscapeAnalysisPhase {
static const char* phase_name() { return "escape analysis"; }
void Run(PipelineData* data, Zone* temp_zone) {
if (FLAG_turbo_new_escape) {
NewEscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
escape_analysis.ReduceGraph();
JSGraphReducer reducer(data->jsgraph(), temp_zone);
NewEscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(),
escape_analysis.analysis_result(),
temp_zone);
AddReducer(data, &reducer, &escape_reducer);
reducer.ReduceGraph();
// TODO(tebbi): Turn this into a debug mode check once we have confidence.
escape_reducer.VerifyReplacement();
} else {
EscapeAnalysis escape_analysis(data->graph(), data->jsgraph()->common(),
temp_zone);
if (!escape_analysis.Run()) return;
......@@ -1185,7 +1171,6 @@ struct EscapeAnalysisPhase {
}
escape_reducer.VerifyReplacement();
}
}
};
struct SimplifiedLoweringPhase {
......
......@@ -142,10 +142,12 @@ UseInfo TruncatingUseInfoFromRepresentation(MachineRepresentation rep) {
UNREACHABLE();
}
UseInfo UseInfoForBasePointer(const FieldAccess& access) {
return access.tag() != 0 ? UseInfo::AnyTagged() : UseInfo::PointerInt();
}
UseInfo UseInfoForBasePointer(const ElementAccess& access) {
return access.tag() != 0 ? UseInfo::AnyTagged() : UseInfo::PointerInt();
}
......@@ -1012,8 +1014,7 @@ class RepresentationSelector {
// The target of the call.
ProcessInput(node, i, UseInfo::Any());
} else if ((i - 1) < params) {
ProcessInput(node, i,
TruncatingUseInfoFromRepresentation(
ProcessInput(node, i, TruncatingUseInfoFromRepresentation(
desc->GetInputType(i).representation()));
} else {
ProcessInput(node, i, UseInfo::AnyTagged());
......@@ -1157,8 +1158,8 @@ class RepresentationSelector {
(*types)[i] =
DeoptMachineTypeOf(GetInfo(input)->representation(), TypeOf(input));
}
NodeProperties::ChangeOp(node, jsgraph_->common()->TypedObjectState(
ObjectIdOf(node->op()), types));
NodeProperties::ChangeOp(node,
jsgraph_->common()->TypedObjectState(types));
}
SetOutput(node, MachineRepresentation::kTagged);
}
......@@ -2851,8 +2852,6 @@ class RepresentationSelector {
return VisitStateValues(node);
case IrOpcode::kObjectState:
return VisitObjectState(node);
case IrOpcode::kObjectId:
return SetOutput(node, MachineRepresentation::kTaggedPointer);
case IrOpcode::kTypeGuard: {
// We just get rid of the sigma here, choosing the best representation
// for the sigma's type.
......@@ -3296,6 +3295,7 @@ void SimplifiedLowering::DoLoadBuffer(Node* node,
}
}
void SimplifiedLowering::DoStoreBuffer(Node* node) {
DCHECK_EQ(IrOpcode::kStoreBuffer, node->opcode());
MachineRepresentation const rep =
......@@ -3423,6 +3423,7 @@ Node* SimplifiedLowering::Int32Div(Node* const node) {
return graph()->NewNode(phi_op, true0, false0, merge0);
}
Node* SimplifiedLowering::Int32Mod(Node* const node) {
Int32BinopMatcher m(node);
Node* const zero = jsgraph()->Int32Constant(0);
......@@ -3555,6 +3556,7 @@ Node* SimplifiedLowering::Uint32Div(Node* const node) {
return d.Phi(MachineRepresentation::kWord32, zero, div);
}
Node* SimplifiedLowering::Uint32Mod(Node* const node) {
Uint32BinopMatcher m(node);
Node* const minus_one = jsgraph()->Int32Constant(-1);
......
......@@ -832,8 +832,6 @@ Type* Typer::Visitor::TypeTypedStateValues(Node* node) {
return Type::Internal();
}
Type* Typer::Visitor::TypeObjectId(Node* node) { UNREACHABLE(); }
Type* Typer::Visitor::TypeArgumentsElementsState(Node* node) {
return Type::Internal();
}
......
......@@ -14,6 +14,41 @@ namespace v8 {
namespace internal {
namespace compiler {
namespace {
size_t HashCode(Node* node) {
size_t h = base::hash_combine(node->op()->HashCode(), node->InputCount());
for (Node* input : node->inputs()) {
h = base::hash_combine(h, input->id());
}
return h;
}
bool Equals(Node* a, Node* b) {
DCHECK_NOT_NULL(a);
DCHECK_NOT_NULL(b);
DCHECK_NOT_NULL(a->op());
DCHECK_NOT_NULL(b->op());
if (!a->op()->Equals(b->op())) return false;
if (a->InputCount() != b->InputCount()) return false;
Node::Inputs aInputs = a->inputs();
Node::Inputs bInputs = b->inputs();
auto aIt = aInputs.begin();
auto bIt = bInputs.begin();
auto aEnd = aInputs.end();
for (; aIt != aEnd; ++aIt, ++bIt) {
DCHECK_NOT_NULL(*aIt);
DCHECK_NOT_NULL(*bIt);
if ((*aIt)->id() != (*bIt)->id()) return false;
}
return true;
}
} // namespace
ValueNumberingReducer::ValueNumberingReducer(Zone* temp_zone, Zone* graph_zone)
: entries_(nullptr),
capacity_(0),
......@@ -27,7 +62,7 @@ ValueNumberingReducer::~ValueNumberingReducer() {}
Reduction ValueNumberingReducer::Reduce(Node* node) {
if (!node->op()->HasProperty(Operator::kIdempotent)) return NoChange();
const size_t hash = NodeProperties::HashCode(node);
const size_t hash = HashCode(node);
if (!entries_) {
DCHECK(size_ == 0);
DCHECK(capacity_ == 0);
......@@ -96,7 +131,7 @@ Reduction ValueNumberingReducer::Reduce(Node* node) {
// Otherwise, keep searching for another collision.
continue;
}
if (NodeProperties::Equals(entry, node)) {
if (Equals(entry, node)) {
Reduction reduction = ReplaceIfTypesMatch(node, entry);
if (reduction.Changed()) {
// Overwrite the colliding entry with the actual entry.
......@@ -118,7 +153,7 @@ Reduction ValueNumberingReducer::Reduce(Node* node) {
dead = i;
continue;
}
if (NodeProperties::Equals(entry, node)) {
if (Equals(entry, node)) {
return ReplaceIfTypesMatch(node, entry);
}
}
......@@ -162,8 +197,7 @@ void ValueNumberingReducer::Grow() {
for (size_t i = 0; i < old_capacity; ++i) {
Node* const old_entry = old_entries[i];
if (!old_entry || old_entry->IsDead()) continue;
for (size_t j = NodeProperties::HashCode(old_entry) & mask;;
j = (j + 1) & mask) {
for (size_t j = HashCode(old_entry) & mask;; j = (j + 1) & mask) {
Node* const entry = entries_[j];
if (entry == old_entry) {
// Skip duplicate of the old entry.
......
......@@ -507,8 +507,6 @@ void Verifier::Visitor::Check(Node* node) {
// still be kStateValues.
break;
}
case IrOpcode::kObjectId:
CheckTypeIs(node, Type::Object());
case IrOpcode::kStateValues:
case IrOpcode::kTypedStateValues:
case IrOpcode::kArgumentsElementsState:
......
......@@ -465,8 +465,6 @@ DEFINE_BOOL(turbo_loop_variable, true, "Turbofan loop variable optimization")
DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan")
DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan")
DEFINE_BOOL(turbo_escape, true, "enable escape analysis")
DEFINE_BOOL(turbo_new_escape, false,
"enable new implementation of escape analysis")
DEFINE_BOOL(turbo_instruction_scheduling, false,
"enable instruction scheduling in TurboFan")
DEFINE_BOOL(turbo_stress_instruction_scheduling, false,
......
......@@ -1430,7 +1430,7 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames,
} else {
// The receiver is not in a stack slot nor in a literal. We give up.
it.Skip(Translation::NumberOfOperandsFor(opcode));
// TODO(6586): Materializing a captured object (or duplicated
// TODO(3029): Materializing a captured object (or duplicated
// object) is hard, we return undefined for now. This breaks the
// produced stack trace, as constructor frames aren't marked as
// such anymore.
......
......@@ -811,10 +811,6 @@
'compiler/memory-optimizer.h',
'compiler/move-optimizer.cc',
'compiler/move-optimizer.h',
'compiler/new-escape-analysis.cc',
'compiler/new-escape-analysis.h',
'compiler/new-escape-analysis-reducer.cc',
'compiler/new-escape-analysis-reducer.h',
'compiler/node-aux-data.h',
'compiler/node-cache.cc',
'compiler/node-cache.h',
......@@ -836,7 +832,6 @@
'compiler/operator.h',
'compiler/osr.cc',
'compiler/osr.h',
'compiler/persistent-map.h',
'compiler/pipeline.cc',
'compiler/pipeline.h',
'compiler/pipeline-statistics.cc',
......
......@@ -12,11 +12,8 @@
#include <queue>
#include <set>
#include <stack>
#include <unordered_map>
#include <unordered_set>
#include <vector>
#include "src/base/functional.h"
#include "src/zone/zone-allocator.h"
namespace v8 {
......@@ -136,35 +133,6 @@ class ZoneMap
Compare(), ZoneAllocator<std::pair<const K, V>>(zone)) {}
};
// A wrapper subclass for std::unordered_map to make it easy to construct one
// that uses a zone allocator.
template <typename K, typename V, typename Hash = base::hash<K>,
typename KeyEqual = std::equal_to<K>>
class ZoneUnorderedMap
: public std::unordered_map<K, V, Hash, KeyEqual,
ZoneAllocator<std::pair<const K, V>>> {
public:
// Constructs an empty map.
explicit ZoneUnorderedMap(Zone* zone)
: std::unordered_map<K, V, Hash, KeyEqual,
ZoneAllocator<std::pair<const K, V>>>(
100, Hash(), KeyEqual(),
ZoneAllocator<std::pair<const K, V>>(zone)) {}
};
// A wrapper subclass for std::unordered_set to make it easy to construct one
// that uses a zone allocator.
template <typename K, typename Hash = base::hash<K>,
typename KeyEqual = std::equal_to<K>>
class ZoneUnorderedSet
: public std::unordered_set<K, Hash, KeyEqual, ZoneAllocator<K>> {
public:
// Constructs an empty map.
explicit ZoneUnorderedSet(Zone* zone)
: std::unordered_set<K, Hash, KeyEqual, ZoneAllocator<K>>(
100, Hash(), KeyEqual(), ZoneAllocator<K>(zone)) {}
};
// A wrapper subclass for std::multimap to make it easy to construct one that
// uses a zone allocator.
template <typename K, typename V, typename Compare = std::less<K>>
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --turbo-escape
function g(o) {
return {a : o, b: 42, c: o};
}
function f() {
var o = {a: {}, b: 43};
o.a = g(g(o));
o.c = o.a.c;
%DeoptimizeNow();
return o.c.a.c.a.c.a.c.b;
}
assertEquals(42, f());
assertEquals(42, f());
%OptimizeFunctionOnNextCall(f);
assertEquals(42, f());
......@@ -249,17 +249,14 @@ var c = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25];
})();
(function() {
// TODO(6586): Once we fixed the materailization of receivers for stack trace
// computation, this should be /Array\.forEach/ again.
var re = /forEach/;
var lazyDeopt = function foobar(deopt) {
var re = /Array\.forEach/;
var lazyDeopt = function(deopt) {
var b = [1,2,3];
var result = 0;
var sum = function(v,i,o) {
result += v;
if (i == 1) {
var e = new Error();
print(e.stack);
assertTrue(re.exec(e.stack) !== null);
}
};
......
......@@ -259,7 +259,7 @@ var c = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25];
})();
(function() {
var re = /map/;
var re = /Array\.map/;
var lazyDeopt = function(deopt) {
var b = [1,2,3];
var result = 0;
......
......@@ -85,7 +85,6 @@ v8_executable("unittests") {
"compiler/node-test-utils.h",
"compiler/node-unittest.cc",
"compiler/opcodes-unittest.cc",
"compiler/persistent-unittest.cc",
"compiler/regalloc/live-range-unittest.cc",
"compiler/regalloc/move-optimizer-unittest.cc",
"compiler/regalloc/register-allocator-unittest.cc",
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <tuple>
#include "src/base/utils/random-number-generator.h"
#include "src/compiler/persistent-map.h"
#include "test/unittests/test-utils.h"
#include "testing/gmock/include/gmock/gmock.h"
namespace v8 {
namespace internal {
namespace compiler {
// A random distribution that produces both small values and arbitrary numbers.
static int small_big_distr(base::RandomNumberGenerator* rand) {
return rand->NextInt() / std::max(1, rand->NextInt() / 100);
}
TEST(PersistentMap, RefTest) {
base::RandomNumberGenerator rand(92834738);
AccountingAllocator allocator;
Zone zone(&allocator, ZONE_NAME);
std::vector<PersistentMap<int, int>> pers_maps;
pers_maps.emplace_back(&zone);
std::vector<std::map<int, int>> ref_maps = {{}};
for (int i = 0; i < 100000; ++i) {
if (rand.NextInt(2) == 0) {
// Read value;
int key = small_big_distr(&rand);
if (ref_maps[0].count(key) > 0) {
ASSERT_EQ(pers_maps[0].Get(key), ref_maps[0][key]);
} else {
ASSERT_EQ(pers_maps[0].Get(key), 0);
}
}
if (rand.NextInt(2) == 0) {
// Add value;
int key = small_big_distr(&rand);
int value = small_big_distr(&rand);
pers_maps[0].Set(key, value);
ref_maps[0][key] = value;
}
if (rand.NextInt(1000) == 0) {
// Create empty map.
pers_maps.emplace_back(&zone);
ref_maps.emplace_back();
}
if (rand.NextInt(100) == 0) {
// Copy and move around maps.
int num_maps = static_cast<int>(pers_maps.size());
int source = rand.NextInt(num_maps - 1) + 1;
int target = rand.NextInt(num_maps - 1) + 1;
pers_maps[target] = std::move(pers_maps[0]);
ref_maps[target] = std::move(ref_maps[0]);
pers_maps[0] = pers_maps[source];
ref_maps[0] = ref_maps[source];
}
}
for (size_t i = 0; i < pers_maps.size(); ++i) {
std::set<int> keys;
for (auto pair : pers_maps[i]) {
ASSERT_EQ(keys.count(pair.first), 0u);
keys.insert(pair.first);
ASSERT_EQ(ref_maps[i][pair.first], pair.second);
}
for (auto pair : ref_maps[i]) {
ASSERT_EQ(pers_maps[i].Get(pair.first), pair.second);
}
}
}
TEST(PersistentMap, Zip) {
base::RandomNumberGenerator rand(92834738);
AccountingAllocator allocator;
Zone zone(&allocator, ZONE_NAME);
// Provoke hash collisions to stress the iterator.
struct bad_hash {
size_t operator()(int key) { return static_cast<size_t>(key) % 1000; }
};
PersistentMap<int, int, bad_hash> a(&zone);
PersistentMap<int, int, bad_hash> b(&zone);
int sum_a = 0;
int sum_b = 0;
for (int i = 0; i < 30000; ++i) {
int key = small_big_distr(&rand);
int value = small_big_distr(&rand);
if (rand.NextBool()) {
sum_a += value;
a.Set(key, a.Get(key) + value);
} else {
sum_b += value;
b.Set(key, b.Get(key) + value);
}
}
int sum = sum_a + sum_b;
for (auto pair : a) {
sum_a -= pair.second;
}
ASSERT_EQ(0, sum_a);
for (auto pair : b) {
sum_b -= pair.second;
}
ASSERT_EQ(0, sum_b);
for (auto triple : a.Zip(b)) {
sum -= std::get<1>(triple) + std::get<2>(triple);
}
ASSERT_EQ(0, sum);
}
} // namespace compiler
} // namespace internal
} // namespace v8
......@@ -80,7 +80,6 @@
'compiler/node-test-utils.h',
'compiler/node-unittest.cc',
'compiler/opcodes-unittest.cc',
'compiler/persistent-unittest.cc',
'compiler/regalloc/register-allocator-unittest.cc',
'compiler/schedule-unittest.cc',
'compiler/scheduler-unittest.cc',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment