Commit 65d2f6e1 authored by Mike Stanton's avatar Mike Stanton Committed by Commit Bot

Reland "[Turbofan] Introduce AllocateRaw node"

This is a reland of ba76ad68
Original change's description:
> [Turbofan] Introduce AllocateRaw node
>
> In order to simplify and verify the TurboFan graph, we
> need to wire allocations into the control chain after
> effect control linearization.
>
> Bug: v8:7002
> Change-Id: I4c5956c8d16773d721482d46a0b407bee01a9597
> Reviewed-on: https://chromium-review.googlesource.com/738139
> Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
> Commit-Queue: Michael Stanton <mvstanton@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#49209}

TBR=jarin@chromium.org

Bug: v8:7002
Change-Id: Iba588c498bb5de113abfba7f9e40cfe4325dea4a
Reviewed-on: https://chromium-review.googlesource.com/758436
Commit-Queue: Michael Stanton <mvstanton@chromium.org>
Reviewed-by: 's avatarMichael Stanton <mvstanton@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49300}
parent 1789c6f9
......@@ -829,6 +829,9 @@ bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
case IrOpcode::kCheckEqualsInternalizedString:
LowerCheckEqualsInternalizedString(node, frame_state);
break;
case IrOpcode::kAllocate:
result = LowerAllocate(node);
break;
case IrOpcode::kCheckEqualsSymbol:
LowerCheckEqualsSymbol(node, frame_state);
break;
......@@ -1917,6 +1920,13 @@ Node* EffectControlLinearizer::LowerCheckedTruncateTaggedToWord32(
return done.PhiAt(0);
}
Node* EffectControlLinearizer::LowerAllocate(Node* node) {
Node* size = node->InputAt(0);
PretenureFlag pretenure = PretenureFlagOf(node->op());
Node* new_node = __ Allocate(pretenure, size);
return new_node;
}
Node* EffectControlLinearizer::LowerObjectIsArrayBufferView(Node* node) {
Node* value = node->InputAt(0);
......
......@@ -84,6 +84,7 @@ class V8_EXPORT_PRIVATE EffectControlLinearizer {
Node* LowerTruncateTaggedToFloat64(Node* node);
Node* LowerTruncateTaggedToWord32(Node* node);
Node* LowerCheckedTruncateTaggedToWord32(Node* node, Node* frame_state);
Node* LowerAllocate(Node* node);
Node* LowerObjectIsArrayBufferView(Node* node);
Node* LowerObjectIsCallable(Node* node);
Node* LowerObjectIsConstructor(Node* node);
......
......@@ -97,8 +97,8 @@ Node* GraphAssembler::Projection(int index, Node* value) {
}
Node* GraphAssembler::Allocate(PretenureFlag pretenure, Node* size) {
return current_effect_ =
graph()->NewNode(simplified()->Allocate(Type::Any(), NOT_TENURED),
return current_control_ = current_effect_ =
graph()->NewNode(simplified()->AllocateRaw(Type::Any(), pretenure),
size, current_effect_, current_control_);
}
......
......@@ -75,7 +75,11 @@ void MemoryOptimizer::VisitNode(Node* node, AllocationState const* state) {
DCHECK_LT(0, node->op()->EffectInputCount());
switch (node->opcode()) {
case IrOpcode::kAllocate:
return VisitAllocate(node, state);
// Allocate nodes were purged from the graph in effect-control
// linearization.
UNREACHABLE();
case IrOpcode::kAllocateRaw:
return VisitAllocateRaw(node, state);
case IrOpcode::kCall:
return VisitCall(node, state);
case IrOpcode::kCallWithCallerSavedRegisters:
......@@ -109,8 +113,9 @@ void MemoryOptimizer::VisitNode(Node* node, AllocationState const* state) {
#define __ gasm()->
void MemoryOptimizer::VisitAllocate(Node* node, AllocationState const* state) {
DCHECK_EQ(IrOpcode::kAllocate, node->opcode());
void MemoryOptimizer::VisitAllocateRaw(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kAllocateRaw, node->opcode());
Node* value;
Node* size = node->InputAt(0);
Node* effect = node->InputAt(1);
......@@ -129,7 +134,7 @@ void MemoryOptimizer::VisitAllocate(Node* node, AllocationState const* state) {
Node* const user = edge.from();
if (user->opcode() == IrOpcode::kStoreField && edge.index() == 0) {
Node* const child = user->InputAt(1);
if (child->opcode() == IrOpcode::kAllocate &&
if (child->opcode() == IrOpcode::kAllocateRaw &&
PretenureFlagOf(child->op()) == NOT_TENURED) {
NodeProperties::ChangeOp(child, node->op());
break;
......@@ -142,7 +147,7 @@ void MemoryOptimizer::VisitAllocate(Node* node, AllocationState const* state) {
Node* const user = edge.from();
if (user->opcode() == IrOpcode::kStoreField && edge.index() == 1) {
Node* const parent = user->InputAt(0);
if (parent->opcode() == IrOpcode::kAllocate &&
if (parent->opcode() == IrOpcode::kAllocateRaw &&
PretenureFlagOf(parent->op()) == TENURED) {
pretenure = TENURED;
break;
......@@ -297,7 +302,6 @@ void MemoryOptimizer::VisitAllocate(Node* node, AllocationState const* state) {
effect = __ ExtractCurrentEffect();
control = __ ExtractCurrentControl();
USE(control); // Floating control, dropped on the floor.
// Replace all effect uses of {node} with the {effect}, enqueue the
// effect uses for further processing, and replace all value uses of
......@@ -306,9 +310,11 @@ void MemoryOptimizer::VisitAllocate(Node* node, AllocationState const* state) {
if (NodeProperties::IsEffectEdge(edge)) {
EnqueueUse(edge.from(), edge.index(), state);
edge.UpdateTo(effect);
} else {
DCHECK(NodeProperties::IsValueEdge(edge));
} else if (NodeProperties::IsValueEdge(edge)) {
edge.UpdateTo(value);
} else {
DCHECK(NodeProperties::IsControlEdge(edge));
edge.UpdateTo(control);
}
}
......
......@@ -106,7 +106,7 @@ class MemoryOptimizer final {
};
void VisitNode(Node*, AllocationState const*);
void VisitAllocate(Node*, AllocationState const*);
void VisitAllocateRaw(Node*, AllocationState const*);
void VisitCall(Node*, AllocationState const*);
void VisitCallWithCallerSavedRegisters(Node*, AllocationState const*);
void VisitLoadElement(Node*, AllocationState const*);
......
......@@ -344,6 +344,7 @@
V(TypeOf) \
V(ClassOf) \
V(Allocate) \
V(AllocateRaw) \
V(LoadFieldByIndex) \
V(LoadField) \
V(LoadElement) \
......
......@@ -510,7 +510,8 @@ PretenureFlag PretenureFlagOf(const Operator* op) {
op->opcode() == IrOpcode::kNewSmiOrObjectElements) {
return OpParameter<PretenureFlag>(op);
}
DCHECK_EQ(IrOpcode::kAllocate, op->opcode());
DCHECK(op->opcode() == IrOpcode::kAllocate ||
op->opcode() == IrOpcode::kAllocateRaw);
return OpParameter<AllocateParameters>(op).pretenure();
}
......@@ -1208,6 +1209,14 @@ const Operator* SimplifiedOperatorBuilder::Allocate(Type* type,
1, 1, 1, 1, 1, 0, AllocateParameters(type, pretenure));
}
const Operator* SimplifiedOperatorBuilder::AllocateRaw(
Type* type, PretenureFlag pretenure) {
return new (zone()) Operator1<AllocateParameters>(
IrOpcode::kAllocateRaw,
Operator::kNoDeopt | Operator::kNoThrow | Operator::kNoWrite,
"AllocateRaw", 1, 1, 1, 1, 1, 1, AllocateParameters(type, pretenure));
}
const Operator* SimplifiedOperatorBuilder::StringFromCodePoint(
UnicodeEncoding encoding) {
switch (encoding) {
......
......@@ -517,6 +517,8 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
const Operator* TransitionElementsKind(ElementsTransition transition);
const Operator* Allocate(Type* type, PretenureFlag pretenure = NOT_TENURED);
const Operator* AllocateRaw(Type* type,
PretenureFlag pretenure = NOT_TENURED);
const Operator* LoadFieldByIndex();
const Operator* LoadField(FieldAccess const&);
......
......@@ -1938,6 +1938,8 @@ Type* Typer::Visitor::TypeAllocate(Node* node) {
return AllocateTypeOf(node->op());
}
Type* Typer::Visitor::TypeAllocateRaw(Node* node) { UNREACHABLE(); }
Type* Typer::Visitor::TypeLoadFieldByIndex(Node* node) {
return Type::NonInternal();
}
......
......@@ -1085,6 +1085,9 @@ void Verifier::Visitor::Check(Node* node) {
case IrOpcode::kAllocate:
CheckValueInputIs(node, 0, Type::PlainNumber());
break;
case IrOpcode::kAllocateRaw:
// CheckValueInputIs(node, 0, Type::PlainNumber());
break;
case IrOpcode::kEnsureWritableFastElements:
CheckValueInputIs(node, 0, Type::Any());
CheckValueInputIs(node, 1, Type::Internal());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment