Commit c87a3dda authored by Tobias Tebbi's avatar Tobias Tebbi Committed by Commit Bot

Revert "Reland: [turbofan] staging new implementation of escape analysis"

This reverts commit ccd8bb69.

Reason for revert: https://build.chromium.org/p/client.v8.fyi/builders/Mac%20Release%20%28Intel%29/builds/2643

Original change's description:
> Reland: [turbofan] staging new implementation of escape analysis
> 
> Reland of https://chromium-review.googlesource.com/c/565720, fixing compilation issues on the waterfall.
> 
> Bug: 
> Change-Id: Ide4f1ea4470e946820edc990c9bf027f04844efe
> Reviewed-on: https://chromium-review.googlesource.com/591667
> Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
> Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#46975}

TBR=jarin@chromium.org,tebbi@chromium.org

Change-Id: I30016fd8d71535c02bab8678b02147195c3e97a6
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/591672Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46980}
parent 59ddd606
...@@ -1381,10 +1381,6 @@ v8_source_set("v8_base") { ...@@ -1381,10 +1381,6 @@ v8_source_set("v8_base") {
"src/compiler/memory-optimizer.h", "src/compiler/memory-optimizer.h",
"src/compiler/move-optimizer.cc", "src/compiler/move-optimizer.cc",
"src/compiler/move-optimizer.h", "src/compiler/move-optimizer.h",
"src/compiler/new-escape-analysis-reducer.cc",
"src/compiler/new-escape-analysis-reducer.h",
"src/compiler/new-escape-analysis.cc",
"src/compiler/new-escape-analysis.h",
"src/compiler/node-aux-data.h", "src/compiler/node-aux-data.h",
"src/compiler/node-cache.cc", "src/compiler/node-cache.cc",
"src/compiler/node-cache.h", "src/compiler/node-cache.h",
...@@ -1406,7 +1402,6 @@ v8_source_set("v8_base") { ...@@ -1406,7 +1402,6 @@ v8_source_set("v8_base") {
"src/compiler/operator.h", "src/compiler/operator.h",
"src/compiler/osr.cc", "src/compiler/osr.cc",
"src/compiler/osr.h", "src/compiler/osr.h",
"src/compiler/persistent-map.h",
"src/compiler/pipeline-statistics.cc", "src/compiler/pipeline-statistics.cc",
"src/compiler/pipeline-statistics.h", "src/compiler/pipeline-statistics.h",
"src/compiler/pipeline.cc", "src/compiler/pipeline.cc",
......
...@@ -172,6 +172,7 @@ struct hash<T*> : public std::unary_function<T*, size_t> { ...@@ -172,6 +172,7 @@ struct hash<T*> : public std::unary_function<T*, size_t> {
} }
}; };
// base::bit_equal_to is a function object class for bitwise equality // base::bit_equal_to is a function object class for bitwise equality
// comparison, similar to std::equal_to, except that the comparison is performed // comparison, similar to std::equal_to, except that the comparison is performed
// on the bit representation of the operands. // on the bit representation of the operands.
......
...@@ -142,22 +142,6 @@ std::ostream& operator<<(std::ostream& os, ParameterInfo const& i) { ...@@ -142,22 +142,6 @@ std::ostream& operator<<(std::ostream& os, ParameterInfo const& i) {
return os; return os;
} }
std::ostream& operator<<(std::ostream& os, ObjectStateInfo const& i) {
return os << "id:" << i.object_id() << "|size:" << i.size();
}
size_t hash_value(ObjectStateInfo const& p) {
return base::hash_combine(p.object_id(), p.size());
}
std::ostream& operator<<(std::ostream& os, TypedObjectStateInfo const& i) {
return os << "id:" << i.object_id() << "|" << i.machine_types();
}
size_t hash_value(TypedObjectStateInfo const& p) {
return base::hash_combine(p.object_id(), p.machine_types());
}
bool operator==(RelocatablePtrConstantInfo const& lhs, bool operator==(RelocatablePtrConstantInfo const& lhs,
RelocatablePtrConstantInfo const& rhs) { RelocatablePtrConstantInfo const& rhs) {
return lhs.rmode() == rhs.rmode() && lhs.value() == rhs.value() && return lhs.rmode() == rhs.rmode() && lhs.value() == rhs.value() &&
...@@ -338,7 +322,7 @@ ZoneVector<MachineType> const* MachineTypesOf(Operator const* op) { ...@@ -338,7 +322,7 @@ ZoneVector<MachineType> const* MachineTypesOf(Operator const* op) {
if (op->opcode() == IrOpcode::kTypedStateValues) { if (op->opcode() == IrOpcode::kTypedStateValues) {
return OpParameter<TypedStateValueInfo>(op).machine_types(); return OpParameter<TypedStateValueInfo>(op).machine_types();
} }
return OpParameter<TypedObjectStateInfo>(op).machine_types(); return OpParameter<const ZoneVector<MachineType>*>(op);
} }
#define CACHED_OP_LIST(V) \ #define CACHED_OP_LIST(V) \
...@@ -1092,14 +1076,6 @@ const Operator* CommonOperatorBuilder::RelocatableInt64Constant( ...@@ -1092,14 +1076,6 @@ const Operator* CommonOperatorBuilder::RelocatableInt64Constant(
RelocatablePtrConstantInfo(value, rmode)); // parameter RelocatablePtrConstantInfo(value, rmode)); // parameter
} }
const Operator* CommonOperatorBuilder::ObjectId(uint32_t object_id) {
return new (zone()) Operator1<uint32_t>( // --
IrOpcode::kObjectId, Operator::kPure, // opcode
"ObjectId", // name
0, 0, 0, 1, 0, 0, // counts
object_id); // parameter
}
const Operator* CommonOperatorBuilder::Select(MachineRepresentation rep, const Operator* CommonOperatorBuilder::Select(MachineRepresentation rep,
BranchHint hint) { BranchHint hint) {
return new (zone()) Operator1<SelectParameters>( // -- return new (zone()) Operator1<SelectParameters>( // --
...@@ -1244,35 +1220,21 @@ bool IsRestOf(Operator const* op) { ...@@ -1244,35 +1220,21 @@ bool IsRestOf(Operator const* op) {
return OpParameter<bool>(op); return OpParameter<bool>(op);
} }
const Operator* CommonOperatorBuilder::ObjectState(int object_id, const Operator* CommonOperatorBuilder::ObjectState(int pointer_slots) {
int pointer_slots) { return new (zone()) Operator1<int>( // --
return new (zone()) Operator1<ObjectStateInfo>( // --
IrOpcode::kObjectState, Operator::kPure, // opcode IrOpcode::kObjectState, Operator::kPure, // opcode
"ObjectState", // name "ObjectState", // name
pointer_slots, 0, 0, 1, 0, 0, // counts pointer_slots, 0, 0, 1, 0, 0, // counts
ObjectStateInfo{object_id, pointer_slots}); // parameter pointer_slots); // parameter
} }
const Operator* CommonOperatorBuilder::TypedObjectState( const Operator* CommonOperatorBuilder::TypedObjectState(
int object_id, const ZoneVector<MachineType>* types) { const ZoneVector<MachineType>* types) {
return new (zone()) Operator1<TypedObjectStateInfo>( // -- return new (zone()) Operator1<const ZoneVector<MachineType>*>( // --
IrOpcode::kTypedObjectState, Operator::kPure, // opcode IrOpcode::kTypedObjectState, Operator::kPure, // opcode
"TypedObjectState", // name "TypedObjectState", // name
static_cast<int>(types->size()), 0, 0, 1, 0, 0, // counts static_cast<int>(types->size()), 0, 0, 1, 0, 0, // counts
TypedObjectStateInfo(object_id, types)); // parameter types); // parameter
}
uint32_t ObjectIdOf(Operator const* op) {
switch (op->opcode()) {
case IrOpcode::kObjectState:
return OpParameter<ObjectStateInfo>(op).object_id();
case IrOpcode::kTypedObjectState:
return OpParameter<TypedObjectStateInfo>(op).object_id();
case IrOpcode::kObjectId:
return OpParameter<uint32_t>(op);
default:
UNREACHABLE();
}
} }
const Operator* CommonOperatorBuilder::FrameState( const Operator* CommonOperatorBuilder::FrameState(
......
...@@ -123,23 +123,6 @@ std::ostream& operator<<(std::ostream&, ParameterInfo const&); ...@@ -123,23 +123,6 @@ std::ostream& operator<<(std::ostream&, ParameterInfo const&);
V8_EXPORT_PRIVATE int ParameterIndexOf(const Operator* const); V8_EXPORT_PRIVATE int ParameterIndexOf(const Operator* const);
const ParameterInfo& ParameterInfoOf(const Operator* const); const ParameterInfo& ParameterInfoOf(const Operator* const);
struct ObjectStateInfo final : std::pair<uint32_t, int> {
using std::pair<uint32_t, int>::pair;
uint32_t object_id() const { return first; }
int size() const { return second; }
};
std::ostream& operator<<(std::ostream&, ObjectStateInfo const&);
size_t hash_value(ObjectStateInfo const& p);
struct TypedObjectStateInfo final
: std::pair<uint32_t, const ZoneVector<MachineType>*> {
using std::pair<uint32_t, const ZoneVector<MachineType>*>::pair;
uint32_t object_id() const { return first; }
const ZoneVector<MachineType>* machine_types() const { return second; }
};
std::ostream& operator<<(std::ostream&, TypedObjectStateInfo const&);
size_t hash_value(TypedObjectStateInfo const& p);
class RelocatablePtrConstantInfo final { class RelocatablePtrConstantInfo final {
public: public:
enum Type { kInt32, kInt64 }; enum Type { kInt32, kInt64 };
...@@ -313,8 +296,6 @@ ZoneVector<MachineType> const* MachineTypesOf(Operator const*) ...@@ -313,8 +296,6 @@ ZoneVector<MachineType> const* MachineTypesOf(Operator const*)
// IsRestOf(op) is true in the second case. // IsRestOf(op) is true in the second case.
bool IsRestOf(Operator const*); bool IsRestOf(Operator const*);
uint32_t ObjectIdOf(Operator const*);
// Interface for building common operators that can be used at any level of IR, // Interface for building common operators that can be used at any level of IR,
// including JavaScript, mid-level, and low-level. // including JavaScript, mid-level, and low-level.
class V8_EXPORT_PRIVATE CommonOperatorBuilder final class V8_EXPORT_PRIVATE CommonOperatorBuilder final
...@@ -359,7 +340,6 @@ class V8_EXPORT_PRIVATE CommonOperatorBuilder final ...@@ -359,7 +340,6 @@ class V8_EXPORT_PRIVATE CommonOperatorBuilder final
const Operator* NumberConstant(volatile double); const Operator* NumberConstant(volatile double);
const Operator* PointerConstant(intptr_t); const Operator* PointerConstant(intptr_t);
const Operator* HeapConstant(const Handle<HeapObject>&); const Operator* HeapConstant(const Handle<HeapObject>&);
const Operator* ObjectId(uint32_t);
const Operator* RelocatableInt32Constant(int32_t value, const Operator* RelocatableInt32Constant(int32_t value,
RelocInfo::Mode rmode); RelocInfo::Mode rmode);
...@@ -382,9 +362,8 @@ class V8_EXPORT_PRIVATE CommonOperatorBuilder final ...@@ -382,9 +362,8 @@ class V8_EXPORT_PRIVATE CommonOperatorBuilder final
SparseInputMask bitmask); SparseInputMask bitmask);
const Operator* ArgumentsElementsState(bool is_rest); const Operator* ArgumentsElementsState(bool is_rest);
const Operator* ArgumentsLengthState(bool is_rest); const Operator* ArgumentsLengthState(bool is_rest);
const Operator* ObjectState(int object_id, int pointer_slots); const Operator* ObjectState(int pointer_slots);
const Operator* TypedObjectState(int object_id, const Operator* TypedObjectState(const ZoneVector<MachineType>* types);
const ZoneVector<MachineType>* types);
const Operator* FrameState(BailoutId bailout_id, const Operator* FrameState(BailoutId bailout_id,
OutputFrameStateCombine state_combine, OutputFrameStateCombine state_combine,
const FrameStateFunctionInfo* function_info); const FrameStateFunctionInfo* function_info);
......
...@@ -125,8 +125,6 @@ const Alias EscapeStatusAnalysis::kNotReachable = ...@@ -125,8 +125,6 @@ const Alias EscapeStatusAnalysis::kNotReachable =
const Alias EscapeStatusAnalysis::kUntrackable = const Alias EscapeStatusAnalysis::kUntrackable =
std::numeric_limits<Alias>::max() - 1; std::numeric_limits<Alias>::max() - 1;
namespace impl {
class VirtualObject : public ZoneObject { class VirtualObject : public ZoneObject {
public: public:
enum Status { enum Status {
...@@ -568,9 +566,6 @@ bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph, ...@@ -568,9 +566,6 @@ bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
return changed; return changed;
} }
} // namespace impl
using namespace impl;
EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis, EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
Graph* graph, Zone* zone) Graph* graph, Zone* zone)
: stack_(zone), : stack_(zone),
...@@ -1689,8 +1684,8 @@ Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) { ...@@ -1689,8 +1684,8 @@ Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
} }
int input_count = static_cast<int>(cache_->fields().size()); int input_count = static_cast<int>(cache_->fields().size());
Node* new_object_state = Node* new_object_state =
graph()->NewNode(common()->ObjectState(vobj->id(), input_count), graph()->NewNode(common()->ObjectState(input_count), input_count,
input_count, &cache_->fields().front()); &cache_->fields().front());
NodeProperties::SetType(new_object_state, Type::OtherInternal()); NodeProperties::SetType(new_object_state, Type::OtherInternal());
vobj->SetObjectState(new_object_state); vobj->SetObjectState(new_object_state);
TRACE( TRACE(
......
...@@ -15,11 +15,9 @@ namespace compiler { ...@@ -15,11 +15,9 @@ namespace compiler {
// Forward declarations. // Forward declarations.
class CommonOperatorBuilder; class CommonOperatorBuilder;
class EscapeStatusAnalysis; class EscapeStatusAnalysis;
namespace impl {
class MergeCache; class MergeCache;
class VirtualState; class VirtualState;
class VirtualObject; class VirtualObject;
}; // namespace impl
// EscapeObjectAnalysis simulates stores to determine values of loads if // EscapeObjectAnalysis simulates stores to determine values of loads if
// an object is virtual and eliminated. // an object is virtual and eliminated.
...@@ -57,19 +55,17 @@ class V8_EXPORT_PRIVATE EscapeAnalysis { ...@@ -57,19 +55,17 @@ class V8_EXPORT_PRIVATE EscapeAnalysis {
bool ProcessEffectPhi(Node* node); bool ProcessEffectPhi(Node* node);
void ForwardVirtualState(Node* node); void ForwardVirtualState(Node* node);
impl::VirtualState* CopyForModificationAt(impl::VirtualState* state, VirtualState* CopyForModificationAt(VirtualState* state, Node* node);
Node* node); VirtualObject* CopyForModificationAt(VirtualObject* obj, VirtualState* state,
impl::VirtualObject* CopyForModificationAt(impl::VirtualObject* obj,
impl::VirtualState* state,
Node* node); Node* node);
Node* replacement(Node* node); Node* replacement(Node* node);
bool UpdateReplacement(impl::VirtualState* state, Node* node, Node* rep); bool UpdateReplacement(VirtualState* state, Node* node, Node* rep);
impl::VirtualObject* GetVirtualObject(impl::VirtualState* state, Node* node); VirtualObject* GetVirtualObject(VirtualState* state, Node* node);
void DebugPrint(); void DebugPrint();
void DebugPrintState(impl::VirtualState* state); void DebugPrintState(VirtualState* state);
Graph* graph() const; Graph* graph() const;
Zone* zone() const { return zone_; } Zone* zone() const { return zone_; }
...@@ -79,10 +75,10 @@ class V8_EXPORT_PRIVATE EscapeAnalysis { ...@@ -79,10 +75,10 @@ class V8_EXPORT_PRIVATE EscapeAnalysis {
Node* const slot_not_analyzed_; Node* const slot_not_analyzed_;
CommonOperatorBuilder* const common_; CommonOperatorBuilder* const common_;
EscapeStatusAnalysis* status_analysis_; EscapeStatusAnalysis* status_analysis_;
ZoneVector<impl::VirtualState*> virtual_states_; ZoneVector<VirtualState*> virtual_states_;
ZoneVector<Node*> replacements_; ZoneVector<Node*> replacements_;
ZoneSet<impl::VirtualObject*> cycle_detection_; ZoneSet<VirtualObject*> cycle_detection_;
impl::MergeCache* cache_; MergeCache* cache_;
DISALLOW_COPY_AND_ASSIGN(EscapeAnalysis); DISALLOW_COPY_AND_ASSIGN(EscapeAnalysis);
}; };
......
...@@ -482,39 +482,21 @@ class StateObjectDeduplicator { ...@@ -482,39 +482,21 @@ class StateObjectDeduplicator {
static const size_t kNotDuplicated = SIZE_MAX; static const size_t kNotDuplicated = SIZE_MAX;
size_t GetObjectId(Node* node) { size_t GetObjectId(Node* node) {
DCHECK(node->opcode() == IrOpcode::kTypedObjectState ||
node->opcode() == IrOpcode::kObjectId ||
node->opcode() == IrOpcode::kArgumentsElementsState);
for (size_t i = 0; i < objects_.size(); ++i) { for (size_t i = 0; i < objects_.size(); ++i) {
if (objects_[i] == node) return i; if (objects_[i] == node) {
// ObjectId nodes are the Turbofan way to express objects with the same
// identity in the deopt info. So they should always be mapped to
// previously appearing TypedObjectState nodes.
if (HasObjectId(objects_[i]) && HasObjectId(node) &&
ObjectIdOf(objects_[i]->op()) == ObjectIdOf(node->op())) {
return i; return i;
} }
} }
DCHECK(node->opcode() == IrOpcode::kTypedObjectState ||
node->opcode() == IrOpcode::kArgumentsElementsState);
return kNotDuplicated; return kNotDuplicated;
} }
size_t InsertObject(Node* node) { size_t InsertObject(Node* node) {
DCHECK(node->opcode() == IrOpcode::kTypedObjectState ||
node->opcode() == IrOpcode::kObjectId ||
node->opcode() == IrOpcode::kArgumentsElementsState);
size_t id = objects_.size(); size_t id = objects_.size();
objects_.push_back(node); objects_.push_back(node);
return id; return id;
} }
private: private:
static bool HasObjectId(Node* node) {
return node->opcode() == IrOpcode::kTypedObjectState ||
node->opcode() == IrOpcode::kObjectId;
}
ZoneVector<Node*> objects_; ZoneVector<Node*> objects_;
}; };
...@@ -545,11 +527,9 @@ size_t InstructionSelector::AddOperandToStateValueDescriptor( ...@@ -545,11 +527,9 @@ size_t InstructionSelector::AddOperandToStateValueDescriptor(
case IrOpcode::kObjectState: { case IrOpcode::kObjectState: {
UNREACHABLE(); UNREACHABLE();
} }
case IrOpcode::kTypedObjectState: case IrOpcode::kTypedObjectState: {
case IrOpcode::kObjectId: {
size_t id = deduplicator->GetObjectId(input); size_t id = deduplicator->GetObjectId(input);
if (id == StateObjectDeduplicator::kNotDuplicated) { if (id == StateObjectDeduplicator::kNotDuplicated) {
DCHECK(input->opcode() == IrOpcode::kTypedObjectState);
size_t entries = 0; size_t entries = 0;
id = deduplicator->InsertObject(input); id = deduplicator->InsertObject(input);
StateValueList* nested = values->PushRecursiveField(zone, id); StateValueList* nested = values->PushRecursiveField(zone, id);
......
This diff is collapsed.
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
#define V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
#include "src/base/compiler-specific.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/new-escape-analysis.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
namespace compiler {
class Deduplicator;
class JSGraph;
// Perform hash-consing when creating or mutating nodes. Used to avoid duplicate
// nodes when creating ObjectState, StateValues and FrameState nodes
class NodeHashCache {
public:
NodeHashCache(Graph* graph, Zone* zone)
: graph_(graph), cache_(zone), temp_nodes_(zone) {}
// Handle to a conceptually new mutable node. Tries to re-use existing nodes
// and to recycle memory if possible.
class Constructor {
public:
// Construct a new node as a clone of [from].
Constructor(NodeHashCache* cache, Node* from)
: node_cache_(cache), from_(from), tmp_(nullptr) {}
// Construct a new node from scratch.
Constructor(NodeHashCache* cache, const Operator* op, int input_count,
Node** inputs, Type* type);
// Modify the new node.
void ReplaceValueInput(Node* input, int i) {
if (!tmp_ && input == NodeProperties::GetValueInput(from_, i)) return;
Node* node = MutableNode();
NodeProperties::ReplaceValueInput(node, input, i);
}
void ReplaceInput(Node* input, int i) {
if (!tmp_ && input == from_->InputAt(i)) return;
Node* node = MutableNode();
node->ReplaceInput(i, input);
}
// Obtain the mutated node or a cached copy. Invalidates the [Constructor].
Node* Get();
private:
Node* MutableNode();
NodeHashCache* node_cache_;
// Original node, copied on write.
Node* from_;
// Temporary node used for mutations, can be recycled if cache is hit.
Node* tmp_;
};
private:
Node* Query(Node* node);
void Insert(Node* node) { cache_.insert(node); }
Graph* graph_;
struct NodeEquals {
bool operator()(Node* a, Node* b) const {
return NodeProperties::Equals(a, b);
}
};
struct NodeHashCode {
size_t operator()(Node* n) const { return NodeProperties::HashCode(n); }
};
ZoneUnorderedSet<Node*, NodeHashCode, NodeEquals> cache_;
// Unused nodes whose memory can be recycled.
ZoneVector<Node*> temp_nodes_;
};
// Modify the graph according to the information computed in the previous phase.
class V8_EXPORT_PRIVATE NewEscapeAnalysisReducer final
: public NON_EXPORTED_BASE(AdvancedReducer) {
public:
NewEscapeAnalysisReducer(Editor* editor, JSGraph* jsgraph,
EscapeAnalysisResult analysis_result, Zone* zone);
Reduction Reduce(Node* node) override;
const char* reducer_name() const override {
return "NewEscapeAnalysisReducer";
}
void Finalize() override;
// Verifies that all virtual allocation nodes have been dealt with. Run it
// after this reducer has been applied.
void VerifyReplacement() const;
private:
void ReduceFrameStateInputs(Node* node);
Node* ReduceDeoptState(Node* node, Node* effect, Deduplicator* deduplicator);
Node* ObjectIdNode(const VirtualObject* vobject);
Node* MaybeGuard(Node* original, Node* replacement);
JSGraph* jsgraph() const { return jsgraph_; }
EscapeAnalysisResult analysis_result() const { return analysis_result_; }
Zone* zone() const { return zone_; }
JSGraph* const jsgraph_;
EscapeAnalysisResult analysis_result_;
ZoneVector<Node*> object_id_cache_;
NodeHashCache node_cache_;
ZoneSet<Node*> arguments_elements_;
Zone* const zone_;
DISALLOW_COPY_AND_ASSIGN(NewEscapeAnalysisReducer);
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_NEW_ESCAPE_ANALYSIS_REDUCER_H_
This diff is collapsed.
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
#define V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
#include "src/base/functional.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/persistent-map.h"
#include "src/globals.h"
#ifdef DEBUG
#define TRACE(...) \
do { \
if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
} while (false)
#else
#define TRACE(...)
#endif
namespace v8 {
namespace internal {
namespace compiler {
class CommonOperatorBuilder;
class VariableTracker;
class EscapeAnalysisTracker;
#ifdef DEBUG
class TraceScope {
public:
TraceScope(const char* name, Node* node) : name_(name), node_(node) {
for (int i = 0; i < depth; ++i) TRACE(" ");
TRACE("[ %s %s#%d\n", name, node->op()->mnemonic(), node->id());
++depth;
}
~TraceScope() {
--depth;
for (int i = 0; i < depth; ++i) TRACE(" ");
TRACE("] %s %s#%d\n", name_, node_->op()->mnemonic(), node_->id());
}
private:
const char* name_;
Node* node_;
static thread_local int depth;
};
#define TRACE_FN(name, node) TraceScope __trace_scope_(name, node)
#else
#define TRACE_FN(name, node)
#endif
// {EffectGraphReducer} reduces up to a fixed point. It distinguishes changes to
// the effect output of a node from changes to the value output to reduce the
// number of revisitations.
class EffectGraphReducer {
public:
class Reduction {
public:
bool value_changed() const { return value_changed_; }
void set_value_changed() { value_changed_ = true; }
bool effect_changed() const { return effect_changed_; }
void set_effect_changed() { effect_changed_ = true; }
private:
bool value_changed_ = false;
bool effect_changed_ = false;
};
EffectGraphReducer(Graph* graph,
std::function<void(Node*, Reduction*)> reduce, Zone* zone);
void ReduceGraph() { ReduceFrom(graph_->end()); }
// Mark node for revisitation.
void Revisit(Node* node);
// Add a new root node to start reduction from. This is useful if the reducer
// adds nodes that are not yet reachable, but should already be considered
// part of the graph.
void AddRoot(Node* node) {
DCHECK(state_.Get(node) == State::kUnvisited);
state_.Set(node, State::kRevisit);
revisit_.push(node);
}
bool Complete() { return stack_.empty() && revisit_.empty(); }
private:
struct NodeState {
Node* node;
int input_index;
};
void ReduceFrom(Node* node);
enum class State : uint8_t { kUnvisited = 0, kRevisit, kOnStack, kVisited };
const uint8_t kNumStates = static_cast<uint8_t>(State::kVisited) + 1;
Graph* graph_;
NodeMarker<State> state_;
ZoneStack<Node*> revisit_;
ZoneStack<NodeState> stack_;
std::function<void(Node*, Reduction*)> reduce_;
};
// A variable is an abstract storage location, which is lowered to SSA values
// and phi nodes by {VariableTracker}.
class Variable {
public:
Variable() : id_(kInvalid) {}
bool operator==(Variable other) const { return id_ == other.id_; }
bool operator!=(Variable other) const { return id_ != other.id_; }
bool operator<(Variable other) const { return id_ < other.id_; }
static Variable Invalid() { return Variable(kInvalid); }
friend V8_INLINE size_t hash_value(Variable v) {
return base::hash_value(v.id_);
}
friend std::ostream& operator<<(std::ostream& os, Variable var) {
return os << var.id_;
}
private:
typedef int Id;
explicit Variable(Id id) : id_(id) {}
Id id_;
static const Id kInvalid = -1;
friend class VariableTracker;
};
// An object that can track the nodes in the graph whose current reduction
// depends on the value of the object.
class Dependable : public ZoneObject {
public:
explicit Dependable(Zone* zone) : dependants_(zone) {}
void AddDependency(Node* node) { dependants_.push_back(node); }
void RevisitDependants(EffectGraphReducer* reducer) {
for (Node* node : dependants_) {
reducer->Revisit(node);
}
dependants_.clear();
}
private:
ZoneVector<Node*> dependants_;
};
// A virtual object represents an allocation site and tracks the Variables
// associated with its fields as well as its global escape status.
class VirtualObject : public Dependable {
public:
typedef uint32_t Id;
typedef ZoneVector<Variable>::const_iterator const_iterator;
VirtualObject(VariableTracker* var_states, Id id, int size);
Maybe<Variable> FieldAt(int offset) const {
DCHECK(offset % kPointerSize == 0);
CHECK(!HasEscaped());
if (offset >= size()) {
// This can only happen in unreachable code.
return Nothing<Variable>();
}
return Just(fields_.at(offset / kPointerSize));
}
Id id() const { return id_; }
int size() const { return static_cast<int>(kPointerSize * fields_.size()); }
// Escaped might mean that the object escaped to untracked memory or that it
// is used in an operation that requires materialization.
void SetEscaped() { escaped_ = true; }
bool HasEscaped() const { return escaped_; }
const_iterator begin() const { return fields_.begin(); }
const_iterator end() const { return fields_.end(); }
private:
bool escaped_ = false;
Id id_;
ZoneVector<Variable> fields_;
};
class EscapeAnalysisResult {
public:
explicit EscapeAnalysisResult(EscapeAnalysisTracker* tracker)
: tracker_(tracker) {}
const VirtualObject* GetVirtualObject(Node* node);
Node* GetVirtualObjectField(const VirtualObject* vobject, int field,
Node* effect);
Node* GetReplacementOf(Node* node);
private:
EscapeAnalysisTracker* tracker_;
};
class V8_EXPORT_PRIVATE NewEscapeAnalysis final
: public NON_EXPORTED_BASE(EffectGraphReducer) {
public:
NewEscapeAnalysis(JSGraph* jsgraph, Zone* zone);
EscapeAnalysisResult analysis_result() {
DCHECK(Complete());
return EscapeAnalysisResult(tracker_);
}
private:
void Reduce(Node* node, Reduction* reduction);
JSGraph* jsgraph() { return jsgraph_; }
EscapeAnalysisTracker* tracker_;
JSGraph* jsgraph_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_NEW_ESCAPE_ANALYSIS_H_
...@@ -482,38 +482,6 @@ bool NodeProperties::IsInputRange(Edge edge, int first, int num) { ...@@ -482,38 +482,6 @@ bool NodeProperties::IsInputRange(Edge edge, int first, int num) {
return first <= index && index < first + num; return first <= index && index < first + num;
} }
// static
size_t NodeProperties::HashCode(Node* node) {
size_t h = base::hash_combine(node->op()->HashCode(), node->InputCount());
for (Node* input : node->inputs()) {
h = base::hash_combine(h, input->id());
}
return h;
}
// static
bool NodeProperties::Equals(Node* a, Node* b) {
DCHECK_NOT_NULL(a);
DCHECK_NOT_NULL(b);
DCHECK_NOT_NULL(a->op());
DCHECK_NOT_NULL(b->op());
if (!a->op()->Equals(b->op())) return false;
if (a->InputCount() != b->InputCount()) return false;
Node::Inputs aInputs = a->inputs();
Node::Inputs bInputs = b->inputs();
auto aIt = aInputs.begin();
auto bIt = bInputs.begin();
auto aEnd = aInputs.end();
for (; aIt != aEnd; ++aIt, ++bIt) {
DCHECK_NOT_NULL(*aIt);
DCHECK_NOT_NULL(*bIt);
if ((*aIt)->id() != (*bIt)->id()) return false;
}
return true;
}
} // namespace compiler } // namespace compiler
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -132,12 +132,6 @@ class V8_EXPORT_PRIVATE NodeProperties final { ...@@ -132,12 +132,6 @@ class V8_EXPORT_PRIVATE NodeProperties final {
// Checks if two nodes are the same, looking past {CheckHeapObject}. // Checks if two nodes are the same, looking past {CheckHeapObject}.
static bool IsSame(Node* a, Node* b); static bool IsSame(Node* a, Node* b);
// Check if two nodes have equal operators and reference-equal inputs. Used
// for value numbering/hash-consing.
static bool Equals(Node* a, Node* b);
// A corresponding hash function.
static size_t HashCode(Node* node);
// Walks up the {effect} chain to find a witness that provides map // Walks up the {effect} chain to find a witness that provides map
// information about the {receiver}. Can look through potentially // information about the {receiver}. Can look through potentially
// side effecting nodes. // side effecting nodes.
......
...@@ -62,7 +62,6 @@ ...@@ -62,7 +62,6 @@
V(ArgumentsElementsState) \ V(ArgumentsElementsState) \
V(ArgumentsLengthState) \ V(ArgumentsLengthState) \
V(ObjectState) \ V(ObjectState) \
V(ObjectId) \
V(TypedObjectState) \ V(TypedObjectState) \
V(Call) \ V(Call) \
V(Parameter) \ V(Parameter) \
......
This diff is collapsed.
...@@ -51,8 +51,6 @@ ...@@ -51,8 +51,6 @@
#include "src/compiler/machine-operator-reducer.h" #include "src/compiler/machine-operator-reducer.h"
#include "src/compiler/memory-optimizer.h" #include "src/compiler/memory-optimizer.h"
#include "src/compiler/move-optimizer.h" #include "src/compiler/move-optimizer.h"
#include "src/compiler/new-escape-analysis-reducer.h"
#include "src/compiler/new-escape-analysis.h"
#include "src/compiler/osr.h" #include "src/compiler/osr.h"
#include "src/compiler/pipeline-statistics.h" #include "src/compiler/pipeline-statistics.h"
#include "src/compiler/redundancy-elimination.h" #include "src/compiler/redundancy-elimination.h"
...@@ -1159,18 +1157,6 @@ struct EscapeAnalysisPhase { ...@@ -1159,18 +1157,6 @@ struct EscapeAnalysisPhase {
static const char* phase_name() { return "escape analysis"; } static const char* phase_name() { return "escape analysis"; }
void Run(PipelineData* data, Zone* temp_zone) { void Run(PipelineData* data, Zone* temp_zone) {
if (FLAG_turbo_new_escape) {
NewEscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
escape_analysis.ReduceGraph();
JSGraphReducer reducer(data->jsgraph(), temp_zone);
NewEscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(),
escape_analysis.analysis_result(),
temp_zone);
AddReducer(data, &reducer, &escape_reducer);
reducer.ReduceGraph();
// TODO(tebbi): Turn this into a debug mode check once we have confidence.
escape_reducer.VerifyReplacement();
} else {
EscapeAnalysis escape_analysis(data->graph(), data->jsgraph()->common(), EscapeAnalysis escape_analysis(data->graph(), data->jsgraph()->common(),
temp_zone); temp_zone);
if (!escape_analysis.Run()) return; if (!escape_analysis.Run()) return;
...@@ -1185,7 +1171,6 @@ struct EscapeAnalysisPhase { ...@@ -1185,7 +1171,6 @@ struct EscapeAnalysisPhase {
} }
escape_reducer.VerifyReplacement(); escape_reducer.VerifyReplacement();
} }
}
}; };
struct SimplifiedLoweringPhase { struct SimplifiedLoweringPhase {
......
...@@ -142,10 +142,12 @@ UseInfo TruncatingUseInfoFromRepresentation(MachineRepresentation rep) { ...@@ -142,10 +142,12 @@ UseInfo TruncatingUseInfoFromRepresentation(MachineRepresentation rep) {
UNREACHABLE(); UNREACHABLE();
} }
UseInfo UseInfoForBasePointer(const FieldAccess& access) { UseInfo UseInfoForBasePointer(const FieldAccess& access) {
return access.tag() != 0 ? UseInfo::AnyTagged() : UseInfo::PointerInt(); return access.tag() != 0 ? UseInfo::AnyTagged() : UseInfo::PointerInt();
} }
UseInfo UseInfoForBasePointer(const ElementAccess& access) { UseInfo UseInfoForBasePointer(const ElementAccess& access) {
return access.tag() != 0 ? UseInfo::AnyTagged() : UseInfo::PointerInt(); return access.tag() != 0 ? UseInfo::AnyTagged() : UseInfo::PointerInt();
} }
...@@ -1012,8 +1014,7 @@ class RepresentationSelector { ...@@ -1012,8 +1014,7 @@ class RepresentationSelector {
// The target of the call. // The target of the call.
ProcessInput(node, i, UseInfo::Any()); ProcessInput(node, i, UseInfo::Any());
} else if ((i - 1) < params) { } else if ((i - 1) < params) {
ProcessInput(node, i, ProcessInput(node, i, TruncatingUseInfoFromRepresentation(
TruncatingUseInfoFromRepresentation(
desc->GetInputType(i).representation())); desc->GetInputType(i).representation()));
} else { } else {
ProcessInput(node, i, UseInfo::AnyTagged()); ProcessInput(node, i, UseInfo::AnyTagged());
...@@ -1157,8 +1158,8 @@ class RepresentationSelector { ...@@ -1157,8 +1158,8 @@ class RepresentationSelector {
(*types)[i] = (*types)[i] =
DeoptMachineTypeOf(GetInfo(input)->representation(), TypeOf(input)); DeoptMachineTypeOf(GetInfo(input)->representation(), TypeOf(input));
} }
NodeProperties::ChangeOp(node, jsgraph_->common()->TypedObjectState( NodeProperties::ChangeOp(node,
ObjectIdOf(node->op()), types)); jsgraph_->common()->TypedObjectState(types));
} }
SetOutput(node, MachineRepresentation::kTagged); SetOutput(node, MachineRepresentation::kTagged);
} }
...@@ -2851,8 +2852,6 @@ class RepresentationSelector { ...@@ -2851,8 +2852,6 @@ class RepresentationSelector {
return VisitStateValues(node); return VisitStateValues(node);
case IrOpcode::kObjectState: case IrOpcode::kObjectState:
return VisitObjectState(node); return VisitObjectState(node);
case IrOpcode::kObjectId:
return SetOutput(node, MachineRepresentation::kTaggedPointer);
case IrOpcode::kTypeGuard: { case IrOpcode::kTypeGuard: {
// We just get rid of the sigma here, choosing the best representation // We just get rid of the sigma here, choosing the best representation
// for the sigma's type. // for the sigma's type.
...@@ -3296,6 +3295,7 @@ void SimplifiedLowering::DoLoadBuffer(Node* node, ...@@ -3296,6 +3295,7 @@ void SimplifiedLowering::DoLoadBuffer(Node* node,
} }
} }
void SimplifiedLowering::DoStoreBuffer(Node* node) { void SimplifiedLowering::DoStoreBuffer(Node* node) {
DCHECK_EQ(IrOpcode::kStoreBuffer, node->opcode()); DCHECK_EQ(IrOpcode::kStoreBuffer, node->opcode());
MachineRepresentation const rep = MachineRepresentation const rep =
...@@ -3423,6 +3423,7 @@ Node* SimplifiedLowering::Int32Div(Node* const node) { ...@@ -3423,6 +3423,7 @@ Node* SimplifiedLowering::Int32Div(Node* const node) {
return graph()->NewNode(phi_op, true0, false0, merge0); return graph()->NewNode(phi_op, true0, false0, merge0);
} }
Node* SimplifiedLowering::Int32Mod(Node* const node) { Node* SimplifiedLowering::Int32Mod(Node* const node) {
Int32BinopMatcher m(node); Int32BinopMatcher m(node);
Node* const zero = jsgraph()->Int32Constant(0); Node* const zero = jsgraph()->Int32Constant(0);
...@@ -3555,6 +3556,7 @@ Node* SimplifiedLowering::Uint32Div(Node* const node) { ...@@ -3555,6 +3556,7 @@ Node* SimplifiedLowering::Uint32Div(Node* const node) {
return d.Phi(MachineRepresentation::kWord32, zero, div); return d.Phi(MachineRepresentation::kWord32, zero, div);
} }
Node* SimplifiedLowering::Uint32Mod(Node* const node) { Node* SimplifiedLowering::Uint32Mod(Node* const node) {
Uint32BinopMatcher m(node); Uint32BinopMatcher m(node);
Node* const minus_one = jsgraph()->Int32Constant(-1); Node* const minus_one = jsgraph()->Int32Constant(-1);
......
...@@ -832,8 +832,6 @@ Type* Typer::Visitor::TypeTypedStateValues(Node* node) { ...@@ -832,8 +832,6 @@ Type* Typer::Visitor::TypeTypedStateValues(Node* node) {
return Type::Internal(); return Type::Internal();
} }
Type* Typer::Visitor::TypeObjectId(Node* node) { UNREACHABLE(); }
Type* Typer::Visitor::TypeArgumentsElementsState(Node* node) { Type* Typer::Visitor::TypeArgumentsElementsState(Node* node) {
return Type::Internal(); return Type::Internal();
} }
......
...@@ -14,6 +14,41 @@ namespace v8 { ...@@ -14,6 +14,41 @@ namespace v8 {
namespace internal { namespace internal {
namespace compiler { namespace compiler {
namespace {
size_t HashCode(Node* node) {
size_t h = base::hash_combine(node->op()->HashCode(), node->InputCount());
for (Node* input : node->inputs()) {
h = base::hash_combine(h, input->id());
}
return h;
}
bool Equals(Node* a, Node* b) {
DCHECK_NOT_NULL(a);
DCHECK_NOT_NULL(b);
DCHECK_NOT_NULL(a->op());
DCHECK_NOT_NULL(b->op());
if (!a->op()->Equals(b->op())) return false;
if (a->InputCount() != b->InputCount()) return false;
Node::Inputs aInputs = a->inputs();
Node::Inputs bInputs = b->inputs();
auto aIt = aInputs.begin();
auto bIt = bInputs.begin();
auto aEnd = aInputs.end();
for (; aIt != aEnd; ++aIt, ++bIt) {
DCHECK_NOT_NULL(*aIt);
DCHECK_NOT_NULL(*bIt);
if ((*aIt)->id() != (*bIt)->id()) return false;
}
return true;
}
} // namespace
ValueNumberingReducer::ValueNumberingReducer(Zone* temp_zone, Zone* graph_zone) ValueNumberingReducer::ValueNumberingReducer(Zone* temp_zone, Zone* graph_zone)
: entries_(nullptr), : entries_(nullptr),
capacity_(0), capacity_(0),
...@@ -27,7 +62,7 @@ ValueNumberingReducer::~ValueNumberingReducer() {} ...@@ -27,7 +62,7 @@ ValueNumberingReducer::~ValueNumberingReducer() {}
Reduction ValueNumberingReducer::Reduce(Node* node) { Reduction ValueNumberingReducer::Reduce(Node* node) {
if (!node->op()->HasProperty(Operator::kIdempotent)) return NoChange(); if (!node->op()->HasProperty(Operator::kIdempotent)) return NoChange();
const size_t hash = NodeProperties::HashCode(node); const size_t hash = HashCode(node);
if (!entries_) { if (!entries_) {
DCHECK(size_ == 0); DCHECK(size_ == 0);
DCHECK(capacity_ == 0); DCHECK(capacity_ == 0);
...@@ -96,7 +131,7 @@ Reduction ValueNumberingReducer::Reduce(Node* node) { ...@@ -96,7 +131,7 @@ Reduction ValueNumberingReducer::Reduce(Node* node) {
// Otherwise, keep searching for another collision. // Otherwise, keep searching for another collision.
continue; continue;
} }
if (NodeProperties::Equals(entry, node)) { if (Equals(entry, node)) {
Reduction reduction = ReplaceIfTypesMatch(node, entry); Reduction reduction = ReplaceIfTypesMatch(node, entry);
if (reduction.Changed()) { if (reduction.Changed()) {
// Overwrite the colliding entry with the actual entry. // Overwrite the colliding entry with the actual entry.
...@@ -118,7 +153,7 @@ Reduction ValueNumberingReducer::Reduce(Node* node) { ...@@ -118,7 +153,7 @@ Reduction ValueNumberingReducer::Reduce(Node* node) {
dead = i; dead = i;
continue; continue;
} }
if (NodeProperties::Equals(entry, node)) { if (Equals(entry, node)) {
return ReplaceIfTypesMatch(node, entry); return ReplaceIfTypesMatch(node, entry);
} }
} }
...@@ -162,8 +197,7 @@ void ValueNumberingReducer::Grow() { ...@@ -162,8 +197,7 @@ void ValueNumberingReducer::Grow() {
for (size_t i = 0; i < old_capacity; ++i) { for (size_t i = 0; i < old_capacity; ++i) {
Node* const old_entry = old_entries[i]; Node* const old_entry = old_entries[i];
if (!old_entry || old_entry->IsDead()) continue; if (!old_entry || old_entry->IsDead()) continue;
for (size_t j = NodeProperties::HashCode(old_entry) & mask;; for (size_t j = HashCode(old_entry) & mask;; j = (j + 1) & mask) {
j = (j + 1) & mask) {
Node* const entry = entries_[j]; Node* const entry = entries_[j];
if (entry == old_entry) { if (entry == old_entry) {
// Skip duplicate of the old entry. // Skip duplicate of the old entry.
......
...@@ -507,8 +507,6 @@ void Verifier::Visitor::Check(Node* node) { ...@@ -507,8 +507,6 @@ void Verifier::Visitor::Check(Node* node) {
// still be kStateValues. // still be kStateValues.
break; break;
} }
case IrOpcode::kObjectId:
CheckTypeIs(node, Type::Object());
case IrOpcode::kStateValues: case IrOpcode::kStateValues:
case IrOpcode::kTypedStateValues: case IrOpcode::kTypedStateValues:
case IrOpcode::kArgumentsElementsState: case IrOpcode::kArgumentsElementsState:
......
...@@ -465,8 +465,6 @@ DEFINE_BOOL(turbo_loop_variable, true, "Turbofan loop variable optimization") ...@@ -465,8 +465,6 @@ DEFINE_BOOL(turbo_loop_variable, true, "Turbofan loop variable optimization")
DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan") DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan")
DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan") DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan")
DEFINE_BOOL(turbo_escape, true, "enable escape analysis") DEFINE_BOOL(turbo_escape, true, "enable escape analysis")
DEFINE_BOOL(turbo_new_escape, false,
"enable new implementation of escape analysis")
DEFINE_BOOL(turbo_instruction_scheduling, false, DEFINE_BOOL(turbo_instruction_scheduling, false,
"enable instruction scheduling in TurboFan") "enable instruction scheduling in TurboFan")
DEFINE_BOOL(turbo_stress_instruction_scheduling, false, DEFINE_BOOL(turbo_stress_instruction_scheduling, false,
......
...@@ -1430,7 +1430,7 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames, ...@@ -1430,7 +1430,7 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames,
} else { } else {
// The receiver is not in a stack slot nor in a literal. We give up. // The receiver is not in a stack slot nor in a literal. We give up.
it.Skip(Translation::NumberOfOperandsFor(opcode)); it.Skip(Translation::NumberOfOperandsFor(opcode));
// TODO(6586): Materializing a captured object (or duplicated // TODO(3029): Materializing a captured object (or duplicated
// object) is hard, we return undefined for now. This breaks the // object) is hard, we return undefined for now. This breaks the
// produced stack trace, as constructor frames aren't marked as // produced stack trace, as constructor frames aren't marked as
// such anymore. // such anymore.
......
...@@ -811,10 +811,6 @@ ...@@ -811,10 +811,6 @@
'compiler/memory-optimizer.h', 'compiler/memory-optimizer.h',
'compiler/move-optimizer.cc', 'compiler/move-optimizer.cc',
'compiler/move-optimizer.h', 'compiler/move-optimizer.h',
'compiler/new-escape-analysis.cc',
'compiler/new-escape-analysis.h',
'compiler/new-escape-analysis-reducer.cc',
'compiler/new-escape-analysis-reducer.h',
'compiler/node-aux-data.h', 'compiler/node-aux-data.h',
'compiler/node-cache.cc', 'compiler/node-cache.cc',
'compiler/node-cache.h', 'compiler/node-cache.h',
...@@ -836,7 +832,6 @@ ...@@ -836,7 +832,6 @@
'compiler/operator.h', 'compiler/operator.h',
'compiler/osr.cc', 'compiler/osr.cc',
'compiler/osr.h', 'compiler/osr.h',
'compiler/persistent-map.h',
'compiler/pipeline.cc', 'compiler/pipeline.cc',
'compiler/pipeline.h', 'compiler/pipeline.h',
'compiler/pipeline-statistics.cc', 'compiler/pipeline-statistics.cc',
......
...@@ -12,11 +12,8 @@ ...@@ -12,11 +12,8 @@
#include <queue> #include <queue>
#include <set> #include <set>
#include <stack> #include <stack>
#include <unordered_map>
#include <unordered_set>
#include <vector> #include <vector>
#include "src/base/functional.h"
#include "src/zone/zone-allocator.h" #include "src/zone/zone-allocator.h"
namespace v8 { namespace v8 {
...@@ -136,35 +133,6 @@ class ZoneMap ...@@ -136,35 +133,6 @@ class ZoneMap
Compare(), ZoneAllocator<std::pair<const K, V>>(zone)) {} Compare(), ZoneAllocator<std::pair<const K, V>>(zone)) {}
}; };
// A wrapper subclass for std::unordered_map to make it easy to construct one
// that uses a zone allocator.
template <typename K, typename V, typename Hash = base::hash<K>,
typename KeyEqual = std::equal_to<K>>
class ZoneUnorderedMap
: public std::unordered_map<K, V, Hash, KeyEqual,
ZoneAllocator<std::pair<const K, V>>> {
public:
// Constructs an empty map.
explicit ZoneUnorderedMap(Zone* zone)
: std::unordered_map<K, V, Hash, KeyEqual,
ZoneAllocator<std::pair<const K, V>>>(
100, Hash(), KeyEqual(),
ZoneAllocator<std::pair<const K, V>>(zone)) {}
};
// A wrapper subclass for std::unordered_set to make it easy to construct one
// that uses a zone allocator.
template <typename K, typename Hash = base::hash<K>,
typename KeyEqual = std::equal_to<K>>
class ZoneUnorderedSet
: public std::unordered_set<K, Hash, KeyEqual, ZoneAllocator<K>> {
public:
// Constructs an empty map.
explicit ZoneUnorderedSet(Zone* zone)
: std::unordered_set<K, Hash, KeyEqual, ZoneAllocator<K>>(
100, Hash(), KeyEqual(), ZoneAllocator<K>(zone)) {}
};
// A wrapper subclass for std::multimap to make it easy to construct one that // A wrapper subclass for std::multimap to make it easy to construct one that
// uses a zone allocator. // uses a zone allocator.
template <typename K, typename V, typename Compare = std::less<K>> template <typename K, typename V, typename Compare = std::less<K>>
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --turbo-escape
function g(o) {
return {a : o, b: 42, c: o};
}
function f() {
var o = {a: {}, b: 43};
o.a = g(g(o));
o.c = o.a.c;
%DeoptimizeNow();
return o.c.a.c.a.c.a.c.b;
}
assertEquals(42, f());
assertEquals(42, f());
%OptimizeFunctionOnNextCall(f);
assertEquals(42, f());
...@@ -249,17 +249,14 @@ var c = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]; ...@@ -249,17 +249,14 @@ var c = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25];
})(); })();
(function() { (function() {
// TODO(6586): Once we fixed the materailization of receivers for stack trace var re = /Array\.forEach/;
// computation, this should be /Array\.forEach/ again. var lazyDeopt = function(deopt) {
var re = /forEach/;
var lazyDeopt = function foobar(deopt) {
var b = [1,2,3]; var b = [1,2,3];
var result = 0; var result = 0;
var sum = function(v,i,o) { var sum = function(v,i,o) {
result += v; result += v;
if (i == 1) { if (i == 1) {
var e = new Error(); var e = new Error();
print(e.stack);
assertTrue(re.exec(e.stack) !== null); assertTrue(re.exec(e.stack) !== null);
} }
}; };
......
...@@ -259,7 +259,7 @@ var c = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]; ...@@ -259,7 +259,7 @@ var c = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25];
})(); })();
(function() { (function() {
var re = /map/; var re = /Array\.map/;
var lazyDeopt = function(deopt) { var lazyDeopt = function(deopt) {
var b = [1,2,3]; var b = [1,2,3];
var result = 0; var result = 0;
......
...@@ -85,7 +85,6 @@ v8_executable("unittests") { ...@@ -85,7 +85,6 @@ v8_executable("unittests") {
"compiler/node-test-utils.h", "compiler/node-test-utils.h",
"compiler/node-unittest.cc", "compiler/node-unittest.cc",
"compiler/opcodes-unittest.cc", "compiler/opcodes-unittest.cc",
"compiler/persistent-unittest.cc",
"compiler/regalloc/live-range-unittest.cc", "compiler/regalloc/live-range-unittest.cc",
"compiler/regalloc/move-optimizer-unittest.cc", "compiler/regalloc/move-optimizer-unittest.cc",
"compiler/regalloc/register-allocator-unittest.cc", "compiler/regalloc/register-allocator-unittest.cc",
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <tuple>
#include "src/base/utils/random-number-generator.h"
#include "src/compiler/persistent-map.h"
#include "test/unittests/test-utils.h"
namespace v8 {
namespace internal {
namespace compiler {
// A random distribution that produces both small values and arbitrary numbers.
static int small_big_distr(base::RandomNumberGenerator* rand) {
return rand->NextInt() / std::max(1, rand->NextInt() / 100);
}
TEST(PersistentMap, RefTest) {
base::RandomNumberGenerator rand(92834738);
AccountingAllocator allocator;
Zone zone(&allocator, ZONE_NAME);
std::vector<PersistentMap<int, int>> pers_maps;
pers_maps.emplace_back(&zone);
std::vector<std::map<int, int>> ref_maps(1);
for (int i = 0; i < 100000; ++i) {
if (rand.NextInt(2) == 0) {
// Read value;
int key = small_big_distr(&rand);
if (ref_maps[0].count(key) > 0) {
ASSERT_EQ(pers_maps[0].Get(key), ref_maps[0][key]);
} else {
ASSERT_EQ(pers_maps[0].Get(key), 0);
}
}
if (rand.NextInt(2) == 0) {
// Add value;
int key = small_big_distr(&rand);
int value = small_big_distr(&rand);
pers_maps[0].Set(key, value);
ref_maps[0][key] = value;
}
if (rand.NextInt(1000) == 0) {
// Create empty map.
pers_maps.emplace_back(&zone);
ref_maps.emplace_back();
}
if (rand.NextInt(100) == 0) {
// Copy and move around maps.
int num_maps = static_cast<int>(pers_maps.size());
int source = rand.NextInt(num_maps - 1) + 1;
int target = rand.NextInt(num_maps - 1) + 1;
pers_maps[target] = std::move(pers_maps[0]);
ref_maps[target] = std::move(ref_maps[0]);
pers_maps[0] = pers_maps[source];
ref_maps[0] = ref_maps[source];
}
}
for (size_t i = 0; i < pers_maps.size(); ++i) {
std::set<int> keys;
for (auto pair : pers_maps[i]) {
ASSERT_EQ(keys.count(pair.first), 0u);
keys.insert(pair.first);
ASSERT_EQ(ref_maps[i][pair.first], pair.second);
}
for (auto pair : ref_maps[i]) {
int value = pers_maps[i].Get(pair.first);
ASSERT_EQ(pair.second, value);
if (value != 0) {
ASSERT_EQ(keys.count(pair.first), 1u);
keys.erase(pair.first);
}
}
ASSERT_TRUE(keys.empty());
}
}
TEST(PersistentMap, Zip) {
base::RandomNumberGenerator rand(92834738);
AccountingAllocator allocator;
Zone zone(&allocator, ZONE_NAME);
// Provoke hash collisions to stress the iterator.
struct bad_hash {
size_t operator()(int key) { return static_cast<size_t>(key) % 1000; }
};
PersistentMap<int, int, bad_hash> a(&zone);
PersistentMap<int, int, bad_hash> b(&zone);
int sum_a = 0;
int sum_b = 0;
for (int i = 0; i < 30000; ++i) {
int key = small_big_distr(&rand);
int value = small_big_distr(&rand);
if (rand.NextBool()) {
sum_a += value;
a.Set(key, a.Get(key) + value);
} else {
sum_b += value;
b.Set(key, b.Get(key) + value);
}
}
int sum = sum_a + sum_b;
for (auto pair : a) {
sum_a -= pair.second;
}
ASSERT_EQ(0, sum_a);
for (auto pair : b) {
sum_b -= pair.second;
}
ASSERT_EQ(0, sum_b);
for (auto triple : a.Zip(b)) {
sum -= std::get<1>(triple) + std::get<2>(triple);
}
ASSERT_EQ(0, sum);
}
} // namespace compiler
} // namespace internal
} // namespace v8
...@@ -80,7 +80,6 @@ ...@@ -80,7 +80,6 @@
'compiler/node-test-utils.h', 'compiler/node-test-utils.h',
'compiler/node-unittest.cc', 'compiler/node-unittest.cc',
'compiler/opcodes-unittest.cc', 'compiler/opcodes-unittest.cc',
'compiler/persistent-unittest.cc',
'compiler/regalloc/register-allocator-unittest.cc', 'compiler/regalloc/register-allocator-unittest.cc',
'compiler/schedule-unittest.cc', 'compiler/schedule-unittest.cc',
'compiler/scheduler-unittest.cc', 'compiler/scheduler-unittest.cc',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment