Commit d3d7069d authored by bmeurer's avatar bmeurer Committed by Commit bot

Revert of [turbofan] Add support for copy-on-write element stores. (patchset...

Revert of [turbofan] Add support for copy-on-write element stores. (patchset #2 id:20001 of https://codereview.chromium.org/2218703003/ )

Reason for revert:
Breaks tree?

Original issue's description:
> [turbofan] Add support for copy-on-write element stores.
>
> This extends JSNativeContextSpecialization with support for stores to
> fast object/smi element backing stores that are marked as copy-on-write.
> In this case we first call the CopyFixedArray builtin to take a copy of
> the elements backing store, and then store the new elements back to the
> object, and finally perform the actual element store.
>
> R=epertoso@chromium.org
> BUG=v8:4470
>
> Committed: https://crrev.com/ac98ad22f049a59c48387f1bab1590f135d219c6
> Cr-Commit-Position: refs/heads/master@{#38370}

TBR=epertoso@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=v8:4470

Review-Url: https://codereview.chromium.org/2220513002
Cr-Commit-Position: refs/heads/master@{#38376}
parent 255cc15f
......@@ -4,7 +4,7 @@
#include "src/builtins/builtins.h"
#include "src/builtins/builtins-utils.h"
#include "src/interface-descriptors.h"
#include "src/macro-assembler.h"
namespace v8 {
......@@ -50,124 +50,5 @@ void Builtins::Generate_StackCheck(MacroAssembler* masm) {
masm->TailCallRuntime(Runtime::kStackGuard);
}
// -----------------------------------------------------------------------------
// FixedArray helpers.
void Builtins::Generate_CopyFixedArray(CodeStubAssembler* assembler) {
typedef CodeStubAssembler::Label Label;
typedef compiler::Node Node;
typedef CodeStubAssembler::Variable Variable;
typedef CopyFixedArrayDescriptor Descriptor;
Node* source = assembler->Parameter(Descriptor::kSource);
// Load the {source} length.
Node* source_length_tagged =
assembler->LoadObjectField(source, FixedArray::kLengthOffset);
Node* source_length = assembler->SmiToWord(source_length_tagged);
// Compute the size of {source} in bytes.
Node* source_size = assembler->IntPtrAdd(
assembler->WordShl(source_length,
assembler->IntPtrConstant(kPointerSizeLog2)),
assembler->IntPtrConstant(FixedArray::kHeaderSize));
// Check if we can allocate in new space.
Label if_newspace(assembler), if_oldspace(assembler);
assembler->Branch(assembler->UintPtrLessThan(
source_size, assembler->IntPtrConstant(
Page::kMaxRegularHeapObjectSize)),
&if_newspace, &if_oldspace);
assembler->Bind(&if_newspace);
{
// Allocate the targeting FixedArray in new space.
Node* target = assembler->Allocate(source_size);
assembler->StoreMapNoWriteBarrier(
target, assembler->LoadRoot(Heap::kFixedArrayMapRootIndex));
assembler->StoreObjectFieldNoWriteBarrier(target, FixedArray::kLengthOffset,
source_length_tagged);
// Copy the {source} to the {target}.
Variable var_index(assembler, MachineType::PointerRepresentation());
Label loop(assembler, &var_index), done_loop(assembler);
var_index.Bind(
assembler->IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
assembler->Goto(&loop);
assembler->Bind(&loop);
{
// Determine the current {index}.
Node* index = var_index.value();
// Check if we are done.
assembler->GotoUnless(assembler->UintPtrLessThan(index, source_size),
&done_loop);
// Load the value from {source}.
Node* value = assembler->Load(MachineType::AnyTagged(), source, index);
// Store the {value} to the {target} without a write barrier, since we
// know that the {target} is allocated in new space.
assembler->StoreNoWriteBarrier(MachineRepresentation::kTagged, target,
index, value);
// Increment {index} and continue.
var_index.Bind(
assembler->IntPtrAdd(index, assembler->IntPtrConstant(kPointerSize)));
assembler->Goto(&loop);
}
assembler->Bind(&done_loop);
assembler->Return(target);
}
assembler->Bind(&if_oldspace);
{
// Allocate the targeting FixedArray in old space
// (maybe even in large object space).
Node* flags = assembler->SmiConstant(
Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
Node* source_size_tagged = assembler->SmiFromWord(source_size);
Node* target = assembler->CallRuntime(Runtime::kAllocateInTargetSpace,
assembler->NoContextConstant(),
source_size_tagged, flags);
assembler->StoreMapNoWriteBarrier(
target, assembler->LoadRoot(Heap::kFixedArrayMapRootIndex));
assembler->StoreObjectFieldNoWriteBarrier(target, FixedArray::kLengthOffset,
source_length_tagged);
// Copy the {source} to the {target}.
Variable var_index(assembler, MachineType::PointerRepresentation());
Label loop(assembler, &var_index), done_loop(assembler);
var_index.Bind(
assembler->IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
assembler->Goto(&loop);
assembler->Bind(&loop);
{
// Determine the current {index}.
Node* index = var_index.value();
// Check if we are done.
assembler->GotoUnless(assembler->UintPtrLessThan(index, source_size),
&done_loop);
// Load the value from {source}.
Node* value = assembler->Load(MachineType::AnyTagged(), source, index);
// Store the {value} to the {target} with a proper write barrier.
assembler->Store(MachineRepresentation::kTagged, target, index, value);
// Increment {index} and continue.
var_index.Bind(
assembler->IntPtrAdd(index, assembler->IntPtrConstant(kPointerSize)));
assembler->Goto(&loop);
}
assembler->Bind(&done_loop);
assembler->Return(target);
}
}
} // namespace internal
} // namespace v8
......@@ -159,9 +159,6 @@ namespace internal {
ASM(AllocateInNewSpace) \
ASM(AllocateInOldSpace) \
\
/* FixedArray helpers */ \
TFS(CopyFixedArray, BUILTIN, kNoExtraICState, CopyFixedArray) \
\
/* Debugger */ \
DBG(FrameDropper_LiveEdit) \
DBG(Return_DebugBreak) \
......
......@@ -509,12 +509,6 @@ Callable CodeFactory::FastNewStrictArguments(Isolate* isolate,
return make_callable(stub);
}
// static
Callable CodeFactory::CopyFixedArray(Isolate* isolate) {
return Callable(isolate->builtins()->CopyFixedArray(),
CopyFixedArrayDescriptor(isolate));
}
// static
Callable CodeFactory::AllocateHeapNumber(Isolate* isolate) {
AllocateHeapNumberStub stub(isolate);
......
......@@ -136,8 +136,6 @@ class CodeFactory final {
static Callable FastNewStrictArguments(Isolate* isolate,
bool skip_stub_frame = false);
static Callable CopyFixedArray(Isolate* isolate);
static Callable AllocateHeapNumber(Isolate* isolate);
#define SIMD128_ALLOC(TYPE, Type, type, lane_count, lane_type) \
static Callable Allocate##Type(Isolate* isolate);
......
......@@ -723,9 +723,6 @@ bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
case IrOpcode::kPlainPrimitiveToFloat64:
state = LowerPlainPrimitiveToFloat64(node, *effect, *control);
break;
case IrOpcode::kEnsureWritableFastElements:
state = LowerEnsureWritableFastElements(node, *effect, *control);
break;
case IrOpcode::kTransitionElementsKind:
state = LowerTransitionElementsKind(node, *effect, *control);
break;
......@@ -2602,59 +2599,6 @@ EffectControlLinearizer::LowerPlainPrimitiveToFloat64(Node* node, Node* effect,
return ValueEffectControl(value, effect, control);
}
EffectControlLinearizer::ValueEffectControl
EffectControlLinearizer::LowerEnsureWritableFastElements(Node* node,
Node* effect,
Node* control) {
Node* object = node->InputAt(0);
Node* elements = node->InputAt(1);
// Load the current map of {elements}.
Node* elements_map = effect =
graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
elements, effect, control);
// Check if {elements} is not a copy-on-write FixedArray.
Node* check = graph()->NewNode(machine()->WordEqual(), elements_map,
jsgraph()->FixedArrayMapConstant());
Node* branch =
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
// Nothing to do if the {elements} are not copy-on-write.
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* etrue = effect;
Node* vtrue = elements;
// We need to take a copy of the {elements} and set them up for {object}.
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Node* efalse = effect;
Node* vfalse;
{
// We need to create a copy of the {elements} for {object}.
Operator::Properties properties = Operator::kEliminatable;
Callable callable = CodeFactory::CopyFixedArray(isolate());
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
CallDescriptor const* const desc = Linkage::GetStubCallDescriptor(
isolate(), graph()->zone(), callable.descriptor(), 0, flags,
properties);
vfalse = efalse = graph()->NewNode(
common()->Call(desc), jsgraph()->HeapConstant(callable.code()),
elements, jsgraph()->NoContextConstant(), efalse);
// Store the new {elements} into {object}.
efalse = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSObjectElements()), object,
vfalse, efalse, if_false);
}
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
effect = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
Node* value = graph()->NewNode(
common()->Phi(MachineRepresentation::kTagged, 2), vtrue, vfalse, control);
return ValueEffectControl(value, effect, control);
}
EffectControlLinearizer::ValueEffectControl
EffectControlLinearizer::LowerTransitionElementsKind(Node* node, Node* effect,
Node* control) {
......
......@@ -136,8 +136,6 @@ class EffectControlLinearizer {
Node* control);
ValueEffectControl LowerPlainPrimitiveToFloat64(Node* node, Node* effect,
Node* control);
ValueEffectControl LowerEnsureWritableFastElements(Node* node, Node* effect,
Node* control);
ValueEffectControl LowerTransitionElementsKind(Node* node, Node* effect,
Node* control);
ValueEffectControl LowerLoadTypedElement(Node* node, Node* effect,
......
......@@ -52,11 +52,6 @@ Node* JSGraph::EmptyStringConstant() {
return CACHED(kEmptyStringConstant, HeapConstant(factory()->empty_string()));
}
Node* JSGraph::FixedArrayMapConstant() {
return CACHED(kFixedArrayMapConstant,
HeapConstant(factory()->fixed_array_map()));
}
Node* JSGraph::HeapNumberMapConstant() {
return CACHED(kHeapNumberMapConstant,
HeapConstant(factory()->heap_number_map()));
......
......@@ -46,7 +46,6 @@ class JSGraph : public ZoneObject {
Node* EmptyFixedArrayConstant();
Node* EmptyLiteralsArrayConstant();
Node* EmptyStringConstant();
Node* FixedArrayMapConstant();
Node* HeapNumberMapConstant();
Node* OptimizedOutConstant();
Node* StaleRegisterConstant();
......@@ -157,7 +156,6 @@ class JSGraph : public ZoneObject {
kEmptyFixedArrayConstant,
kEmptyLiteralsArrayConstant,
kEmptyStringConstant,
kFixedArrayMapConstant,
kHeapNumberMapConstant,
kOptimizedOutConstant,
kStaleRegisterConstant,
......
......@@ -428,7 +428,6 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
// TODO(bmeurer): Add support for non-standard stores.
if (store_mode != STANDARD_STORE &&
store_mode != STORE_NO_TRANSITION_HANDLE_COW &&
store_mode != STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
return NoChange();
}
......@@ -961,11 +960,10 @@ JSNativeContextSpecialization::BuildElementAccess(
// Don't try to store to a copy-on-write backing store.
if (access_mode == AccessMode::kStore &&
IsFastSmiOrObjectElementsKind(elements_kind) &&
store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
effect =
graph()->NewNode(simplified()->CheckMaps(1), elements,
jsgraph()->FixedArrayMapConstant(), effect, control);
IsFastSmiOrObjectElementsKind(elements_kind)) {
effect = graph()->NewNode(
simplified()->CheckMaps(1), elements,
jsgraph()->HeapConstant(factory()->fixed_array_map()), effect, control);
}
if (IsFixedTypedArrayElementsKind(elements_kind)) {
......@@ -1069,8 +1067,7 @@ JSNativeContextSpecialization::BuildElementAccess(
}
} else {
// TODO(turbofan): Add support for additional store modes.
DCHECK(store_mode == STANDARD_STORE ||
store_mode == STORE_NO_TRANSITION_HANDLE_COW);
DCHECK_EQ(STANDARD_STORE, store_mode);
// Load the length of the {receiver}.
Node* length = effect =
......@@ -1150,16 +1147,6 @@ JSNativeContextSpecialization::BuildElementAccess(
// Make sure we do not store signalling NaNs into double arrays.
value = graph()->NewNode(simplified()->NumberSilenceNaN(), value);
}
// Ensure that copy-on-write backing store is writable.
if (IsFastSmiOrObjectElementsKind(elements_kind) &&
store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
elements = effect =
graph()->NewNode(simplified()->EnsureWritableFastElements(),
receiver, elements, effect, control);
}
// Perform the actual element access.
effect = graph()->NewNode(simplified()->StoreElement(element_access),
elements, index, value, effect, control);
}
......
......@@ -4,7 +4,6 @@
#include "src/compiler/load-elimination.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/simplified-operator.h"
......@@ -54,8 +53,6 @@ Reduction LoadElimination::Reduce(Node* node) {
switch (node->opcode()) {
case IrOpcode::kCheckMaps:
return ReduceCheckMaps(node);
case IrOpcode::kEnsureWritableFastElements:
return ReduceEnsureWritableFastElements(node);
case IrOpcode::kTransitionElementsKind:
return ReduceTransitionElementsKind(node);
case IrOpcode::kLoadField:
......@@ -329,29 +326,6 @@ Reduction LoadElimination::ReduceCheckMaps(Node* node) {
return UpdateState(node, state);
}
Reduction LoadElimination::ReduceEnsureWritableFastElements(Node* node) {
Node* const object = NodeProperties::GetValueInput(node, 0);
Node* const elements = NodeProperties::GetValueInput(node, 1);
Node* const effect = NodeProperties::GetEffectInput(node);
AbstractState const* state = node_states_.Get(effect);
if (state == nullptr) return NoChange();
Node* fixed_array_map = jsgraph()->FixedArrayMapConstant();
if (Node* const elements_map = state->LookupField(elements, 0)) {
// Check if the {elements} already have the fixed array map.
if (elements_map == fixed_array_map) {
ReplaceWithValue(node, elements, effect);
return Replace(elements);
}
}
// We know that the resulting elements have the fixed array map.
state = state->AddField(node, 0, fixed_array_map, zone());
// Kill the previous elements on {object}.
state = state->KillField(object, 2, zone());
// Add the new elements on {object}.
state = state->AddField(object, 2, node, zone());
return UpdateState(node, state);
}
Reduction LoadElimination::ReduceTransitionElementsKind(Node* node) {
Node* const object = NodeProperties::GetValueInput(node, 0);
Node* const source_map = NodeProperties::GetValueInput(node, 1);
......@@ -578,19 +552,6 @@ LoadElimination::AbstractState const* LoadElimination::ComputeLoopState(
visited.insert(current);
if (!current->op()->HasProperty(Operator::kNoWrite)) {
switch (current->opcode()) {
case IrOpcode::kEnsureWritableFastElements: {
Node* const object = NodeProperties::GetValueInput(current, 0);
Node* const elements = NodeProperties::GetValueInput(current, 1);
state = state->KillField(elements, 0, zone());
state = state->KillField(object, 2, zone());
break;
}
case IrOpcode::kTransitionElementsKind: {
Node* const object = NodeProperties::GetValueInput(current, 0);
state = state->KillField(object, 0, zone());
state = state->KillField(object, 2, zone());
break;
}
case IrOpcode::kStoreField: {
FieldAccess const& access = FieldAccessOf(current->op());
Node* const object = NodeProperties::GetValueInput(current, 0);
......@@ -605,11 +566,6 @@ LoadElimination::AbstractState const* LoadElimination::ComputeLoopState(
state = state->KillElement(object, index, zone());
break;
}
case IrOpcode::kStoreBuffer:
case IrOpcode::kStoreTypedElement: {
// Doesn't affect anything we track with the state currently.
break;
}
default:
return empty_state();
}
......
......@@ -13,12 +13,11 @@ namespace compiler {
// Foward declarations.
struct FieldAccess;
class JSGraph;
class LoadElimination final : public AdvancedReducer {
public:
LoadElimination(Editor* editor, JSGraph* jsgraph, Zone* zone)
: AdvancedReducer(editor), node_states_(zone), jsgraph_(jsgraph) {}
LoadElimination(Editor* editor, Zone* zone)
: AdvancedReducer(editor), node_states_(zone) {}
~LoadElimination() final {}
Reduction Reduce(Node* node) final;
......@@ -151,7 +150,6 @@ class LoadElimination final : public AdvancedReducer {
};
Reduction ReduceCheckMaps(Node* node);
Reduction ReduceEnsureWritableFastElements(Node* node);
Reduction ReduceTransitionElementsKind(Node* node);
Reduction ReduceLoadField(Node* node);
Reduction ReduceStoreField(Node* node);
......@@ -170,12 +168,10 @@ class LoadElimination final : public AdvancedReducer {
static int FieldIndexOf(FieldAccess const& access);
AbstractState const* empty_state() const { return &empty_state_; }
JSGraph* jsgraph() const { return jsgraph_; }
Zone* zone() const { return node_states_.zone(); }
AbstractState const empty_state_;
AbstractStateForEffectNodes node_states_;
JSGraph* const jsgraph_;
DISALLOW_COPY_AND_ASSIGN(LoadElimination);
};
......
......@@ -304,7 +304,6 @@
V(ObjectIsSmi) \
V(ObjectIsString) \
V(ObjectIsUndetectable) \
V(EnsureWritableFastElements) \
V(TransitionElementsKind)
#define SIMPLIFIED_OP_LIST(V) \
......
......@@ -615,10 +615,9 @@ PipelineCompilationJob::Status PipelineCompilationJob::CreateGraphImpl() {
if (!Compiler::EnsureDeoptimizationSupport(info())) return FAILED;
}
// TODO(mstarzinger): Hack to ensure that certain call descriptors are
// TODO(mstarzinger): Hack to ensure that the ToNumber call descriptor is
// initialized on the main thread, since it is needed off-thread by the
// effect control linearizer.
CodeFactory::CopyFixedArray(info()->isolate());
CodeFactory::ToNumber(info()->isolate());
linkage_ = new (&zone_) Linkage(Linkage::ComputeIncoming(&zone_, info()));
......@@ -1075,8 +1074,7 @@ struct LoadEliminationPhase {
DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
data->common());
RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
LoadElimination load_elimination(&graph_reducer, data->jsgraph(),
temp_zone);
LoadElimination load_elimination(&graph_reducer, temp_zone);
ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
data->common(), data->machine());
......
......@@ -2375,9 +2375,6 @@ class RepresentationSelector {
VisitInputs(node);
return SetOutput(node, MachineRepresentation::kNone);
}
case IrOpcode::kEnsureWritableFastElements:
return VisitBinop(node, UseInfo::AnyTagged(),
MachineRepresentation::kTagged);
//------------------------------------------------------------------
// Machine-level operators.
......
......@@ -503,16 +503,6 @@ struct SimplifiedOperatorGlobalCache final {
AllocateOperator<NOT_TENURED> kAllocateNotTenuredOperator;
AllocateOperator<TENURED> kAllocateTenuredOperator;
struct EnsureWritableFastElementsOperator final : public Operator {
EnsureWritableFastElementsOperator()
: Operator( // --
IrOpcode::kEnsureWritableFastElements, // opcode
Operator::kNoDeopt | Operator::kNoThrow, // flags
"EnsureWritableFastElements", // name
2, 1, 1, 1, 1, 0) {} // counts
};
EnsureWritableFastElementsOperator kEnsureWritableFastElements;
#define BUFFER_ACCESS(Type, type, TYPE, ctype, size) \
struct LoadBuffer##Type##Operator final : public Operator1<BufferAccess> { \
LoadBuffer##Type##Operator() \
......@@ -628,10 +618,6 @@ const Operator* SimplifiedOperatorBuilder::ReferenceEqual(Type* type) {
"ReferenceEqual", 2, 0, 0, 1, 0, 0);
}
const Operator* SimplifiedOperatorBuilder::EnsureWritableFastElements() {
return &cache_.kEnsureWritableFastElements;
}
const Operator* SimplifiedOperatorBuilder::TransitionElementsKind(
ElementsTransition transition) {
return new (zone()) Operator1<ElementsTransition>( // --
......
......@@ -148,8 +148,6 @@ enum class ElementsTransition : uint8_t {
kSlowTransition // full transition, round-trip to the runtime.
};
size_t hash_value(ElementsTransition);
std::ostream& operator<<(std::ostream&, ElementsTransition);
ElementsTransition ElementsTransitionOf(const Operator* op) WARN_UNUSED_RESULT;
......@@ -313,9 +311,6 @@ class SimplifiedOperatorBuilder final : public ZoneObject {
const Operator* ObjectIsString();
const Operator* ObjectIsUndetectable();
// ensure-writable-fast-elements object, elements
const Operator* EnsureWritableFastElements();
// transition-elements-kind object, from-map, to-map
const Operator* TransitionElementsKind(ElementsTransition transition);
......
......@@ -719,10 +719,6 @@ Type* Typer::Visitor::TypeLoopExitEffect(Node* node) {
return nullptr;
}
Type* Typer::Visitor::TypeEnsureWritableFastElements(Node* node) {
return Operand(node, 1);
}
Type* Typer::Visitor::TypeTransitionElementsKind(Node* node) {
UNREACHABLE();
return nullptr;
......
......@@ -851,11 +851,6 @@ void Verifier::Visitor::Check(Node* node) {
CheckValueInputIs(node, 0, Type::PlainNumber());
CheckUpperIs(node, Type::TaggedPointer());
break;
case IrOpcode::kEnsureWritableFastElements:
CheckValueInputIs(node, 0, Type::Any());
CheckValueInputIs(node, 1, Type::Internal());
CheckUpperIs(node, Type::Internal());
break;
case IrOpcode::kTransitionElementsKind:
CheckValueInputIs(node, 0, Type::Any());
CheckValueInputIs(node, 1, Type::Internal());
......
......@@ -55,7 +55,6 @@ class PlatformInterfaceDescriptor;
V(ConstructStub) \
V(ConstructTrampoline) \
V(RegExpConstructResult) \
V(CopyFixedArray) \
V(TransitionElementsKind) \
V(AllocateHeapNumber) \
V(AllocateFloat32x4) \
......@@ -649,12 +648,6 @@ class StoreGlobalViaContextDescriptor : public CallInterfaceDescriptor {
static const Register ValueRegister();
};
class CopyFixedArrayDescriptor : public CallInterfaceDescriptor {
public:
DEFINE_PARAMETERS(kSource)
DECLARE_DEFAULT_DESCRIPTOR(CopyFixedArrayDescriptor, CallInterfaceDescriptor,
kParameterCount)
};
class TransitionElementsKindDescriptor : public CallInterfaceDescriptor {
public:
......
......@@ -4,7 +4,6 @@
#include "src/compiler/load-elimination.h"
#include "src/compiler/access-builder.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/node.h"
#include "src/compiler/simplified-operator.h"
#include "test/unittests/compiler/graph-reducer-unittest.h"
......@@ -21,20 +20,14 @@ namespace compiler {
class LoadEliminationTest : public TypedGraphTest {
public:
LoadEliminationTest()
: TypedGraphTest(3),
simplified_(zone()),
jsgraph_(isolate(), graph(), common(), nullptr, simplified(), nullptr) {
}
LoadEliminationTest() : TypedGraphTest(3), simplified_(zone()) {}
~LoadEliminationTest() override {}
protected:
JSGraph* jsgraph() { return &jsgraph_; }
SimplifiedOperatorBuilder* simplified() { return &simplified_; }
private:
SimplifiedOperatorBuilder simplified_;
JSGraph jsgraph_;
};
TEST_F(LoadEliminationTest, LoadElementAndLoadElement) {
......@@ -46,7 +39,7 @@ TEST_F(LoadEliminationTest, LoadElementAndLoadElement) {
MachineType::AnyTagged(), kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
LoadElimination load_elimination(&editor, jsgraph(), zone());
LoadElimination load_elimination(&editor, zone());
load_elimination.Reduce(graph()->start());
......@@ -72,7 +65,7 @@ TEST_F(LoadEliminationTest, StoreElementAndLoadElement) {
MachineType::AnyTagged(), kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
LoadElimination load_elimination(&editor, jsgraph(), zone());
LoadElimination load_elimination(&editor, zone());
load_elimination.Reduce(graph()->start());
......@@ -99,7 +92,7 @@ TEST_F(LoadEliminationTest, StoreElementAndStoreFieldAndLoadElement) {
MachineType::AnyTagged(), kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
LoadElimination load_elimination(&editor, jsgraph(), zone());
LoadElimination load_elimination(&editor, zone());
load_elimination.Reduce(graph()->start());
......@@ -133,7 +126,7 @@ TEST_F(LoadEliminationTest, LoadFieldAndLoadField) {
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
LoadElimination load_elimination(&editor, jsgraph(), zone());
LoadElimination load_elimination(&editor, zone());
load_elimination.Reduce(graph()->start());
......@@ -162,7 +155,7 @@ TEST_F(LoadEliminationTest, StoreFieldAndLoadField) {
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
LoadElimination load_elimination(&editor, jsgraph(), zone());
LoadElimination load_elimination(&editor, zone());
load_elimination.Reduce(graph()->start());
......@@ -192,7 +185,7 @@ TEST_F(LoadEliminationTest, StoreFieldAndStoreElementAndLoadField) {
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
LoadElimination load_elimination(&editor, jsgraph(), zone());
LoadElimination load_elimination(&editor, zone());
load_elimination.Reduce(graph()->start());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment