Commit 4a0f3c6f authored by Jakob Gruber's avatar Jakob Gruber Committed by V8 LUCI CQ

[nci] Remove the last remnants of NCI code

Bug: v8:8888
Change-Id: I3ecbf73df482865d838a0b98b6acd45de7e186f2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3085270
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Auto-Submit: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#76225}
parent d9ad9e31
......@@ -77,28 +77,6 @@ class BytecodeGraphBuilder {
Node* GetParameter(int index, const char* debug_name_hint = nullptr);
CodeKind code_kind() const { return code_kind_; }
bool native_context_independent() const {
// TODO(jgruber,v8:8888): Remove dependent code.
return false;
}
bool is_turboprop() const { return code_kind_ == CodeKind::TURBOPROP; }
bool generate_full_feedback_collection() const {
// NCI code currently collects full feedback.
DCHECK_IMPLIES(native_context_independent(),
CollectFeedbackInGenericLowering());
return native_context_independent();
}
static JSTypeHintLowering::LoweringResult NoChange() {
return JSTypeHintLowering::LoweringResult::NoChange();
}
bool CanApplyTypeHintLowering(IrOpcode::Value opcode) const {
return !generate_full_feedback_collection() ||
!IrOpcode::IsFeedbackCollectingOpcode(opcode);
}
bool CanApplyTypeHintLowering(const Operator* op) const {
return CanApplyTypeHintLowering(static_cast<IrOpcode::Value>(op->opcode()));
}
// The node representing the current feedback vector is generated once prior
// to visiting bytecodes, and is later passed as input to other nodes that
......@@ -107,22 +85,20 @@ class BytecodeGraphBuilder {
// to feedback_vector() once all uses of the direct heap object reference
// have been replaced with a Node* reference.
void CreateFeedbackVectorNode();
Node* BuildLoadFeedbackVector();
Node* feedback_vector_node() const {
DCHECK_NOT_NULL(feedback_vector_node_);
return feedback_vector_node_;
}
void CreateFeedbackCellNode();
Node* BuildLoadFeedbackCell();
Node* feedback_cell_node() const {
DCHECK(CodeKindCanTierUp(code_kind()));
DCHECK_NOT_NULL(feedback_cell_node_);
return feedback_cell_node_;
}
// Same as above for the feedback vector node.
void CreateNativeContextNode();
Node* BuildLoadNativeContext();
Node* native_context_node() const {
DCHECK_NOT_NULL(native_context_node_);
return native_context_node_;
......@@ -135,13 +111,6 @@ class BytecodeGraphBuilder {
// Only relevant for specific code kinds (see CodeKindCanTierUp).
void MaybeBuildTierUpCheck();
// Like bytecode, NCI code must collect call feedback to preserve proper
// behavior of inlining heuristics when tiering up to Turbofan in the future.
// The invocation count (how often a particular JSFunction has been called)
// is tracked by the callee. For bytecode, this happens in the
// InterpreterEntryTrampoline, for NCI code it happens here in the prologue.
void MaybeBuildIncrementInvocationCount();
// Builder for loading the a native context field.
Node* BuildLoadNativeContextField(int index);
......@@ -1156,70 +1125,24 @@ Node* BytecodeGraphBuilder::GetParameter(int parameter_index,
void BytecodeGraphBuilder::CreateFeedbackCellNode() {
DCHECK_NULL(feedback_cell_node_);
if (native_context_independent()) {
feedback_cell_node_ = BuildLoadFeedbackCell();
} else if (is_turboprop()) {
feedback_cell_node_ = jsgraph()->Constant(feedback_cell_);
}
}
Node* BytecodeGraphBuilder::BuildLoadFeedbackCell() {
DCHECK(native_context_independent());
DCHECK_NULL(feedback_cell_node_);
return NewNode(
simplified()->LoadField(AccessBuilder::ForJSFunctionFeedbackCell()),
GetFunctionClosure());
// Only used by tier-up logic; for code that doesn't tier-up, we can skip
// this.
if (!CodeKindCanTierUp(code_kind())) return;
feedback_cell_node_ = jsgraph()->Constant(feedback_cell_);
}
void BytecodeGraphBuilder::CreateFeedbackVectorNode() {
DCHECK_NULL(feedback_vector_node_);
feedback_vector_node_ = native_context_independent()
? BuildLoadFeedbackVector()
: jsgraph()->Constant(feedback_vector());
}
Node* BytecodeGraphBuilder::BuildLoadFeedbackVector() {
DCHECK(native_context_independent());
DCHECK_NULL(feedback_vector_node_);
// The feedback vector must exist and remain live while the generated code
// lives. Specifically that means it must be created when NCI code is
// installed, and must not be flushed.
return NewNode(simplified()->LoadField(AccessBuilder::ForFeedbackCellValue()),
feedback_cell_node());
feedback_vector_node_ = jsgraph()->Constant(feedback_vector());
}
Node* BytecodeGraphBuilder::BuildLoadFeedbackCell(int index) {
if (native_context_independent()) {
// TODO(jgruber,v8:8888): Assumes that the feedback vector has been
// allocated.
Node* closure_feedback_cell_array =
NewNode(simplified()->LoadField(
AccessBuilder::ForFeedbackVectorClosureFeedbackCellArray()),
feedback_vector_node());
return NewNode(
simplified()->LoadField(AccessBuilder::ForFixedArraySlot(index)),
closure_feedback_cell_array);
} else {
return jsgraph()->Constant(feedback_vector().GetClosureFeedbackCell(index));
}
return jsgraph()->Constant(feedback_vector().GetClosureFeedbackCell(index));
}
void BytecodeGraphBuilder::CreateNativeContextNode() {
DCHECK_NULL(native_context_node_);
native_context_node_ = native_context_independent()
? BuildLoadNativeContext()
: jsgraph()->Constant(native_context());
}
Node* BytecodeGraphBuilder::BuildLoadNativeContext() {
DCHECK(native_context_independent());
DCHECK_NULL(native_context_node_);
Node* context_map = NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
environment()->Context());
return NewNode(simplified()->LoadField(AccessBuilder::ForMapNativeContext()),
context_map);
native_context_node_ = jsgraph()->Constant(native_context());
}
void BytecodeGraphBuilder::MaybeBuildTierUpCheck() {
......@@ -1241,21 +1164,6 @@ void BytecodeGraphBuilder::MaybeBuildTierUpCheck() {
new_target, argc, context);
}
void BytecodeGraphBuilder::MaybeBuildIncrementInvocationCount() {
if (!generate_full_feedback_collection()) return;
Node* current_invocation_count =
NewNode(simplified()->LoadField(
AccessBuilder::ForFeedbackVectorInvocationCount()),
feedback_vector_node());
Node* next_invocation_count =
NewNode(simplified()->NumberAdd(), current_invocation_count,
jsgraph()->SmiConstant(1));
NewNode(simplified()->StoreField(
AccessBuilder::ForFeedbackVectorInvocationCount()),
feedback_vector_node(), next_invocation_count);
}
Node* BytecodeGraphBuilder::BuildLoadNativeContextField(int index) {
Node* result = NewNode(javascript()->LoadContext(0, index, true));
NodeProperties::ReplaceContextInput(result, native_context_node());
......@@ -1289,7 +1197,6 @@ void BytecodeGraphBuilder::CreateGraph() {
CreateFeedbackCellNode();
CreateFeedbackVectorNode();
MaybeBuildTierUpCheck();
MaybeBuildIncrementInvocationCount();
CreateNativeContextNode();
VisitBytecodes();
......@@ -4158,7 +4065,6 @@ JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedUnaryOp(const Operator* op,
Node* operand,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......@@ -4172,7 +4078,6 @@ JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedBinaryOp(const Operator* op, Node* left,
Node* right,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......@@ -4187,7 +4092,6 @@ BytecodeGraphBuilder::TryBuildSimplifiedForInNext(Node* receiver,
Node* cache_array,
Node* cache_type, Node* index,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(IrOpcode::kJSForInNext)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......@@ -4200,7 +4104,6 @@ BytecodeGraphBuilder::TryBuildSimplifiedForInNext(Node* receiver,
JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedForInPrepare(Node* enumerator,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(IrOpcode::kJSForInPrepare)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......@@ -4213,7 +4116,6 @@ BytecodeGraphBuilder::TryBuildSimplifiedForInPrepare(Node* enumerator,
JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedToNumber(Node* value,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(IrOpcode::kJSToNumber)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......@@ -4225,7 +4127,6 @@ BytecodeGraphBuilder::TryBuildSimplifiedToNumber(Node* value,
JSTypeHintLowering::LoweringResult BytecodeGraphBuilder::TryBuildSimplifiedCall(
const Operator* op, Node* const* args, int arg_count, FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......@@ -4240,7 +4141,6 @@ BytecodeGraphBuilder::TryBuildSimplifiedConstruct(const Operator* op,
Node* const* args,
int arg_count,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......@@ -4255,7 +4155,6 @@ BytecodeGraphBuilder::TryBuildSimplifiedGetIterator(const Operator* op,
Node* receiver,
FeedbackSlot load_slot,
FeedbackSlot call_slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult early_reduction =
......@@ -4268,7 +4167,6 @@ BytecodeGraphBuilder::TryBuildSimplifiedGetIterator(const Operator* op,
JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedLoadNamed(const Operator* op,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult early_reduction =
......@@ -4281,7 +4179,6 @@ JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedLoadKeyed(const Operator* op,
Node* receiver, Node* key,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......@@ -4295,7 +4192,6 @@ JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedStoreNamed(const Operator* op,
Node* receiver, Node* value,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......@@ -4310,7 +4206,6 @@ BytecodeGraphBuilder::TryBuildSimplifiedStoreKeyed(const Operator* op,
Node* receiver, Node* key,
Node* value,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
......
......@@ -796,12 +796,6 @@ void CompilationDependencies::DependOnOwnConstantDictionaryProperty(
}
bool CompilationDependencies::Commit(Handle<Code> code) {
// Dependencies are context-dependent. In the future it may be possible to
// restore them in the consumer native context, but for now they are
// disabled.
CHECK_IMPLIES(broker_->is_native_context_independent(),
dependencies_.empty());
for (auto dep : dependencies_) {
if (!dep->IsValid()) {
dependencies_.clear();
......
......@@ -4055,10 +4055,6 @@ JSCallReducer::ReduceCallOrConstructWithArrayLikeOrSpreadOfCreateArguments(
}
}
// TODO(jgruber,v8:8888): Attempt to remove this restriction. The reason it
// currently exists is because we cannot create code dependencies in NCI code.
if (broker()->is_native_context_independent()) return NoChange();
// For call/construct with spread, we need to also install a code
// dependency on the array iterator lookup protector cell to ensure
// that no one messed with the %ArrayIteratorPrototype%.next method.
......@@ -6599,10 +6595,6 @@ Reduction JSCallReducer::ReduceStringFromCodePoint(Node* node) {
}
Reduction JSCallReducer::ReduceStringPrototypeIterator(Node* node) {
// TODO(jgruber): We could reduce here when generating native context
// independent code, if LowerJSCreateStringIterator were implemented in
// generic lowering.
if (broker()->is_native_context_independent()) return NoChange();
JSCallNode n(node);
CallParameters const& p = n.Parameters();
if (p.speculation_mode() == SpeculationMode::kDisallowSpeculation) {
......@@ -6727,11 +6719,6 @@ Reduction JSCallReducer::ReduceStringPrototypeConcat(Node* node) {
}
Reduction JSCallReducer::ReducePromiseConstructor(Node* node) {
// TODO(jgruber): We could reduce here when generating native context
// independent code, if LowerJSCreatePromise were implemented in generic
// lowering.
if (broker()->is_native_context_independent()) return NoChange();
PromiseBuiltinReducerAssembler a(this, node, broker());
// We only inline when we have the executor.
......
......@@ -241,13 +241,6 @@ namespace {
bool ShouldUseMegamorphicLoadBuiltin(FeedbackSource const& source,
base::Optional<NameRef> name,
JSHeapBroker* broker) {
if (broker->is_native_context_independent()) {
// The decision to use the megamorphic load builtin is made based on
// current feedback, and is thus context-dependent. It cannot be used when
// generating NCI code.
return false;
}
ProcessedFeedback const& feedback =
broker->GetFeedbackForPropertyAccess(source, AccessMode::kLoad, name);
......@@ -595,7 +588,6 @@ void JSGenericLowering::LowerJSCreateArray(Node* node) {
Node* stub_code = jsgraph()->ArrayConstructorStubConstant();
Node* stub_arity = jsgraph()->Int32Constant(arity);
base::Optional<AllocationSiteRef> const site = p.site(broker());
DCHECK_IMPLIES(broker()->is_native_context_independent(), !site.has_value());
Node* type_info = site.has_value() ? jsgraph()->Constant(site.value())
: jsgraph()->UndefinedConstant();
Node* receiver = jsgraph()->UndefinedConstant();
......@@ -814,23 +806,6 @@ void JSGenericLowering::LowerJSCreateBlockContext(Node* node) {
ReplaceWithRuntimeCall(node, Runtime::kPushBlockContext);
}
namespace {
bool CollectCallAndConstructFeedback(JSHeapBroker* broker) {
// Call and construct feedback is a special case. Besides shape feedback, we
// also increment the call count, which is later used to make inlining
// decisions. The call count is only comparable/reliable if it is incremented
// for all calls inside a function. This is not the case in default turbofan
// mode, in which many calls may be inlined and will thus never reach generic
// lowering (where we insert the feedback-collecting builtin call).
// Therefore it should only be collected in native context independent code,
// where we 1. know every call will reach generic lowering, and 2. we must
// collect full feedback to properly tier up later.
return broker->is_native_context_independent();
}
} // namespace
// TODO(jgruber,v8:8888): Should this collect feedback?
void JSGenericLowering::LowerJSConstructForwardVarargs(Node* node) {
ConstructForwardVarargsParameters p =
......@@ -862,57 +837,22 @@ void JSGenericLowering::LowerJSConstruct(Node* node) {
CallDescriptor::Flags flags = FrameStateFlagForCall(node);
static constexpr int kReceiver = 1;
static constexpr int kMaybeFeedbackVector = 1;
if (CollectFeedbackInGenericLowering() &&
CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
const int stack_argument_count =
arg_count + kReceiver + kMaybeFeedbackVector;
Callable callable =
Builtins::CallableFor(isolate(), Builtin::kConstruct_WithFeedback);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(),
kMaybeFeedbackVector);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* stub_arity = jsgraph()->Int32Constant(arg_count);
Node* slot = jsgraph()->UintPtrConstant(p.feedback().index());
Node* receiver = jsgraph()->UndefinedConstant();
Node* feedback_vector = node->RemoveInput(n.FeedbackVectorIndex());
// Register argument inputs are followed by stack argument inputs (such as
// feedback_vector). Both are listed in ascending order. Note that
// the receiver is implicitly placed on the stack and is thus inserted
// between explicitly-specified register and stack arguments.
// TODO(jgruber): Implement a simpler way to specify these mutations.
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 3, stub_arity);
node->InsertInput(zone(), 4, slot);
node->InsertInput(zone(), 5, feedback_vector);
node->InsertInput(zone(), 6, receiver);
// After: {code, target, new_target, arity, slot, vector, receiver,
// ...args}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
} else {
const int stack_argument_count = arg_count + kReceiver;
Callable callable = Builtins::CallableFor(isolate(), Builtin::kConstruct);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* stub_arity = jsgraph()->Int32Constant(arg_count);
Node* receiver = jsgraph()->UndefinedConstant();
node->RemoveInput(n.FeedbackVectorIndex());
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 3, stub_arity);
node->InsertInput(zone(), 4, receiver);
const int stack_argument_count = arg_count + kReceiver;
Callable callable = Builtins::CallableFor(isolate(), Builtin::kConstruct);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* stub_arity = jsgraph()->Int32Constant(arg_count);
Node* receiver = jsgraph()->UndefinedConstant();
node->RemoveInput(n.FeedbackVectorIndex());
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 3, stub_arity);
node->InsertInput(zone(), 4, receiver);
// After: {code, target, new_target, arity, receiver, ...args}.
// After: {code, target, new_target, arity, receiver, ...args}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
void JSGenericLowering::LowerJSConstructWithArrayLike(Node* node) {
......@@ -924,58 +864,25 @@ void JSGenericLowering::LowerJSConstructWithArrayLike(Node* node) {
static constexpr int kReceiver = 1;
static constexpr int kArgumentList = 1;
static constexpr int kMaybeFeedbackVector = 1;
if (CollectFeedbackInGenericLowering() &&
CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
const int stack_argument_count =
arg_count - kArgumentList + kReceiver + kMaybeFeedbackVector;
Callable callable = Builtins::CallableFor(
isolate(), Builtin::kConstructWithArrayLike_WithFeedback);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(),
kMaybeFeedbackVector);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* receiver = jsgraph()->UndefinedConstant();
Node* slot = jsgraph()->UintPtrConstant(p.feedback().index());
Node* feedback_vector = node->RemoveInput(n.FeedbackVectorIndex());
// Register argument inputs are followed by stack argument inputs (such as
// feedback_vector). Both are listed in ascending order. Note that
// the receiver is implicitly placed on the stack and is thus inserted
// between explicitly-specified register and stack arguments.
// TODO(jgruber): Implement a simpler way to specify these mutations.
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 4, slot);
node->InsertInput(zone(), 5, feedback_vector);
node->InsertInput(zone(), 6, receiver);
// After: {code, target, new_target, arguments_list, slot, vector,
// receiver}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
} else {
const int stack_argument_count = arg_count - kArgumentList + kReceiver;
Callable callable =
Builtins::CallableFor(isolate(), Builtin::kConstructWithArrayLike);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* receiver = jsgraph()->UndefinedConstant();
node->RemoveInput(n.FeedbackVectorIndex());
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 4, receiver);
const int stack_argument_count = arg_count - kArgumentList + kReceiver;
Callable callable =
Builtins::CallableFor(isolate(), Builtin::kConstructWithArrayLike);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* receiver = jsgraph()->UndefinedConstant();
node->RemoveInput(n.FeedbackVectorIndex());
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 4, receiver);
// After: {code, target, new_target, arguments_list, receiver}.
// After: {code, target, new_target, arguments_list, receiver}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
void JSGenericLowering::LowerJSConstructWithSpread(Node* node) {
......@@ -987,80 +894,34 @@ void JSGenericLowering::LowerJSConstructWithSpread(Node* node) {
static constexpr int kReceiver = 1;
static constexpr int kTheSpread = 1; // Included in `arg_count`.
static constexpr int kMaybeFeedbackVector = 1;
if (CollectFeedbackInGenericLowering() &&
CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
const int stack_argument_count =
arg_count + kReceiver + kMaybeFeedbackVector;
Callable callable = Builtins::CallableFor(
isolate(), Builtin::kConstructWithSpread_WithFeedback);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(),
kTheSpread + kMaybeFeedbackVector);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* slot = jsgraph()->UintPtrConstant(p.feedback().index());
// The single available register is needed for `slot`, thus `spread` remains
// on the stack here.
Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
Node* receiver = jsgraph()->UndefinedConstant();
Node* feedback_vector = node->RemoveInput(n.FeedbackVectorIndex());
Node* spread = node->RemoveInput(n.LastArgumentIndex());
// Register argument inputs are followed by stack argument inputs (such as
// feedback_vector). Both are listed in ascending order. Note that
// the receiver is implicitly placed on the stack and is thus inserted
// between explicitly-specified register and stack arguments.
// TODO(jgruber): Implement a simpler way to specify these mutations.
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 3, stub_arity);
node->InsertInput(zone(), 4, slot);
// Arguments in the stack should be inserted in reversed order, ie, the last
// arguments defined in the interface descriptor should be inserted first.
DCHECK_EQ(callable.descriptor().GetStackArgumentOrder(),
StackArgumentOrder::kJS);
node->InsertInput(zone(), 5, feedback_vector);
node->InsertInput(zone(), 6, spread);
node->InsertInput(zone(), 7, receiver);
// After: {code, target, new_target, arity, slot, vector, spread, receiver,
// ...args}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
} else {
const int stack_argument_count = arg_count + kReceiver - kTheSpread;
Callable callable = CodeFactory::ConstructWithSpread(isolate());
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
const int stack_argument_count = arg_count + kReceiver - kTheSpread;
Callable callable = CodeFactory::ConstructWithSpread(isolate());
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
// We pass the spread in a register, not on the stack.
Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
Node* receiver = jsgraph()->UndefinedConstant();
DCHECK(n.FeedbackVectorIndex() > n.LastArgumentIndex());
node->RemoveInput(n.FeedbackVectorIndex());
Node* spread = node->RemoveInput(n.LastArgumentIndex());
// We pass the spread in a register, not on the stack.
Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
Node* receiver = jsgraph()->UndefinedConstant();
DCHECK(n.FeedbackVectorIndex() > n.LastArgumentIndex());
node->RemoveInput(n.FeedbackVectorIndex());
Node* spread = node->RemoveInput(n.LastArgumentIndex());
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 3, stub_arity);
node->InsertInput(zone(), 4, spread);
node->InsertInput(zone(), 5, receiver);
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 3, stub_arity);
node->InsertInput(zone(), 4, spread);
node->InsertInput(zone(), 5, receiver);
// After: {code, target, new_target, arity, spread, receiver, ...args}.
// After: {code, target, new_target, arity, spread, receiver, ...args}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
// TODO(jgruber,v8:8888): Should this collect feedback?
void JSGenericLowering::LowerJSCallForwardVarargs(Node* node) {
CallForwardVarargsParameters p = CallForwardVarargsParametersOf(node->op());
int const arg_count = static_cast<int>(p.arity() - 2);
......@@ -1083,34 +944,17 @@ void JSGenericLowering::LowerJSCall(Node* node) {
int const arg_count = p.arity_without_implicit_args();
ConvertReceiverMode const mode = p.convert_mode();
Node* feedback_vector = n.feedback_vector();
node->RemoveInput(n.FeedbackVectorIndex());
if (CollectFeedbackInGenericLowering() &&
CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
Callable callable = CodeFactory::Call_WithFeedback(isolate(), mode);
CallDescriptor::Flags flags = FrameStateFlagForCall(node);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), arg_count + 1, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* stub_arity = jsgraph()->Int32Constant(arg_count);
Node* slot = jsgraph()->UintPtrConstant(p.feedback().index());
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 2, stub_arity);
node->InsertInput(zone(), 3, slot);
node->InsertInput(zone(), 4, feedback_vector);
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
} else {
Callable callable = CodeFactory::Call(isolate(), mode);
CallDescriptor::Flags flags = FrameStateFlagForCall(node);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), arg_count + 1, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* stub_arity = jsgraph()->Int32Constant(arg_count);
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 2, stub_arity);
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
Callable callable = CodeFactory::Call(isolate(), mode);
CallDescriptor::Flags flags = FrameStateFlagForCall(node);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), arg_count + 1, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* stub_arity = jsgraph()->Int32Constant(arg_count);
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 2, stub_arity);
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
void JSGenericLowering::LowerJSCallWithArrayLike(Node* node) {
......@@ -1123,55 +967,25 @@ void JSGenericLowering::LowerJSCallWithArrayLike(Node* node) {
static constexpr int kArgumentsList = 1;
static constexpr int kReceiver = 1;
if (CollectFeedbackInGenericLowering() &&
CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
const int stack_argument_count = arg_count - kArgumentsList + kReceiver;
Callable callable = Builtins::CallableFor(
isolate(), Builtin::kCallWithArrayLike_WithFeedback);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* receiver = n.receiver();
Node* arguments_list = n.Argument(0);
Node* feedback_vector = n.feedback_vector();
Node* slot = jsgraph()->UintPtrConstant(p.feedback().index());
// Shuffling inputs.
// Before: {target, receiver, arguments_list, vector}.
node->ReplaceInput(1, arguments_list);
node->ReplaceInput(2, feedback_vector);
node->ReplaceInput(3, receiver);
// Now: {target, arguments_list, vector, receiver}.
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 3, slot);
// After: {code, target, arguments_list, slot, vector, receiver}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
} else {
const int stack_argument_count = arg_count - kArgumentsList + kReceiver;
Callable callable = CodeFactory::CallWithArrayLike(isolate());
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* receiver = n.receiver();
Node* arguments_list = n.Argument(0);
const int stack_argument_count = arg_count - kArgumentsList + kReceiver;
Callable callable = CodeFactory::CallWithArrayLike(isolate());
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* receiver = n.receiver();
Node* arguments_list = n.Argument(0);
// Shuffling inputs.
// Before: {target, receiver, arguments_list, vector}.
// Shuffling inputs.
// Before: {target, receiver, arguments_list, vector}.
node->RemoveInput(n.FeedbackVectorIndex());
node->InsertInput(zone(), 0, stub_code);
node->ReplaceInput(2, arguments_list);
node->ReplaceInput(3, receiver);
node->RemoveInput(n.FeedbackVectorIndex());
node->InsertInput(zone(), 0, stub_code);
node->ReplaceInput(2, arguments_list);
node->ReplaceInput(3, receiver);
// After: {code, target, arguments_list, receiver}.
// After: {code, target, arguments_list, receiver}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
void JSGenericLowering::LowerJSCallWithSpread(Node* node) {
......@@ -1183,73 +997,33 @@ void JSGenericLowering::LowerJSCallWithSpread(Node* node) {
static constexpr int kReceiver = 1;
static constexpr int kTheSpread = 1;
static constexpr int kMaybeFeedbackVector = 1;
if (CollectFeedbackInGenericLowering() &&
CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
const int stack_argument_count =
arg_count - kTheSpread + kReceiver + kMaybeFeedbackVector;
Callable callable =
Builtins::CallableFor(isolate(), Builtin::kCallWithSpread_WithFeedback);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(),
kMaybeFeedbackVector);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* slot = jsgraph()->UintPtrConstant(p.feedback().index());
// We pass the spread in a register, not on the stack.
Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
// Register argument inputs are followed by stack argument inputs (such as
// feedback_vector). Both are listed in ascending order. Note that
// the receiver is implicitly placed on the stack and is thus inserted
// between explicitly-specified register and stack arguments.
// TODO(jgruber): Implement a simpler way to specify these mutations.
// Shuffling inputs.
// Before: {target, receiver, ...args, spread, vector}.
Node* feedback_vector = node->RemoveInput(n.FeedbackVectorIndex());
Node* spread = node->RemoveInput(n.LastArgumentIndex());
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 2, stub_arity);
node->InsertInput(zone(), 3, spread);
node->InsertInput(zone(), 4, slot);
node->InsertInput(zone(), 5, feedback_vector);
// After: {code, target, arity, spread, slot, vector, receiver, ...args}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
} else {
const int stack_argument_count = arg_count - kTheSpread + kReceiver;
Callable callable = CodeFactory::CallWithSpread(isolate());
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
const int stack_argument_count = arg_count - kTheSpread + kReceiver;
Callable callable = CodeFactory::CallWithSpread(isolate());
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
// We pass the spread in a register, not on the stack.
Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
// We pass the spread in a register, not on the stack.
Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
// Shuffling inputs.
// Before: {target, receiver, ...args, spread, vector}.
// Shuffling inputs.
// Before: {target, receiver, ...args, spread, vector}.
node->RemoveInput(n.FeedbackVectorIndex());
Node* spread = node->RemoveInput(n.LastArgumentIndex());
node->RemoveInput(n.FeedbackVectorIndex());
Node* spread = node->RemoveInput(n.LastArgumentIndex());
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 2, stub_arity);
node->InsertInput(zone(), 3, spread);
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 2, stub_arity);
node->InsertInput(zone(), 3, spread);
// After: {code, target, arity, spread, receiver, ...args}.
// After: {code, target, arity, spread, receiver, ...args}.
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
}
void JSGenericLowering::LowerJSCallRuntime(Node* node) {
......
......@@ -537,12 +537,6 @@ bool HasMigrationTargets(const ZoneVector<MapRef>& maps) {
} // namespace
bool JSHeapBroker::CanUseFeedback(const FeedbackNexus& nexus) const {
// TODO(jgruber,v8:8888): Currently, nci code does not use any
// feedback. This restriction will be relaxed in the future.
return !is_native_context_independent() && !nexus.IsUninitialized();
}
const ProcessedFeedback& JSHeapBroker::NewInsufficientFeedback(
FeedbackSlotKind kind) const {
return *zone()->New<InsufficientFeedback>(kind);
......@@ -553,7 +547,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForPropertyAccess(
base::Optional<NameRef> static_name) {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
FeedbackSlotKind kind = nexus.kind();
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(kind);
if (nexus.IsUninitialized()) return NewInsufficientFeedback(kind);
ZoneVector<MapRefAndHandler> maps_and_handlers(zone());
ZoneVector<MapRef> maps(zone());
......@@ -622,7 +616,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForGlobalAccess(
nexus.kind() == FeedbackSlotKind::kLoadGlobalNotInsideTypeof ||
nexus.kind() == FeedbackSlotKind::kStoreGlobalSloppy ||
nexus.kind() == FeedbackSlotKind::kStoreGlobalStrict);
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
if (nexus.ic_state() != MONOMORPHIC || nexus.GetFeedback()->IsCleared()) {
return *zone()->New<GlobalAccessFeedback>(nexus.kind());
}
......@@ -662,7 +656,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForGlobalAccess(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForBinaryOperation(
FeedbackSource const& source) const {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
BinaryOperationHint hint = nexus.GetBinaryOperationFeedback();
DCHECK_NE(hint, BinaryOperationHint::kNone); // Not uninitialized.
return *zone()->New<BinaryOperationFeedback>(hint, nexus.kind());
......@@ -671,7 +665,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForBinaryOperation(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForCompareOperation(
FeedbackSource const& source) const {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
CompareOperationHint hint = nexus.GetCompareOperationFeedback();
DCHECK_NE(hint, CompareOperationHint::kNone); // Not uninitialized.
return *zone()->New<CompareOperationFeedback>(hint, nexus.kind());
......@@ -680,7 +674,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForCompareOperation(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForForIn(
FeedbackSource const& source) const {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
ForInHint hint = nexus.GetForInFeedback();
DCHECK_NE(hint, ForInHint::kNone); // Not uninitialized.
return *zone()->New<ForInFeedback>(hint, nexus.kind());
......@@ -689,7 +683,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForForIn(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForInstanceOf(
FeedbackSource const& source) {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
base::Optional<JSObjectRef> optional_constructor;
{
......@@ -705,7 +699,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForInstanceOf(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForArrayOrObjectLiteral(
FeedbackSource const& source) {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
HeapObject object;
if (!nexus.GetFeedback()->GetHeapObject(&object)) {
......@@ -723,7 +717,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForArrayOrObjectLiteral(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForRegExpLiteral(
FeedbackSource const& source) {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
HeapObject object;
if (!nexus.GetFeedback()->GetHeapObject(&object)) {
......@@ -741,7 +735,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForRegExpLiteral(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForTemplateObject(
FeedbackSource const& source) {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
HeapObject object;
if (!nexus.GetFeedback()->GetHeapObject(&object)) {
......@@ -755,7 +749,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForTemplateObject(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForCall(
FeedbackSource const& source) {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
base::Optional<HeapObjectRef> target_ref;
{
......
......@@ -118,16 +118,6 @@ class V8_EXPORT_PRIVATE JSHeapBroker {
bool tracing_enabled() const { return tracing_enabled_; }
bool is_concurrent_inlining() const { return is_concurrent_inlining_; }
bool is_isolate_bootstrapping() const { return is_isolate_bootstrapping_; }
bool is_native_context_independent() const {
// TODO(jgruber,v8:8888): Remove dependent code.
return false;
}
bool generate_full_feedback_collection() const {
// NCI code currently collects full feedback.
DCHECK_IMPLIES(is_native_context_independent(),
CollectFeedbackInGenericLowering());
return is_native_context_independent();
}
bool is_turboprop() const { return code_kind_ == CodeKind::TURBOPROP; }
NexusConfig feedback_nexus_config() const {
......@@ -372,7 +362,6 @@ class V8_EXPORT_PRIVATE JSHeapBroker {
friend class PropertyCellData;
ProcessedFeedback const& GetFeedback(FeedbackSource const& source) const;
bool CanUseFeedback(const FeedbackNexus& nexus) const;
const ProcessedFeedback& NewInsufficientFeedback(FeedbackSlotKind kind) const;
// Bottleneck FeedbackNexus access here, for storage in the broker
......
......@@ -2349,16 +2349,7 @@ Reduction JSTypedLowering::ReduceJSResolvePromise(Node* node) {
}
Reduction JSTypedLowering::Reduce(Node* node) {
const IrOpcode::Value opcode = node->opcode();
if (broker()->generate_full_feedback_collection() &&
IrOpcode::IsFeedbackCollectingOpcode(opcode)) {
// In NCI code, it is not valid to reduce feedback-collecting JS opcodes
// into non-feedback-collecting lower-level opcodes; missed feedback would
// result in soft deopts.
return NoChange();
}
switch (opcode) {
switch (node->opcode()) {
case IrOpcode::kJSEqual:
return ReduceJSEqual(node);
case IrOpcode::kJSStrictEqual:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment