Commit e519e6fa authored by ishell's avatar ishell Committed by Commit bot

[es6] Further fixing of tail Calls.

1) Update profiling counters in Full codegen.
2) Call Runtime::kTraceTailCall when tracing is on

test/mjsunit/es6/tail-call-simple.js is disabled for now, because Turbofan does not fully support TCO yet.

BUG=v8:4698
LOG=N

Review URL: https://codereview.chromium.org/1670133002

Cr-Commit-Position: refs/heads/master@{#33886}
parent e82588f6
......@@ -423,6 +423,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
}
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ b(pl, &ok);
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ push(r0);
}
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ pop(r0);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence");
......@@ -436,24 +460,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(r0);
__ CallRuntime(Runtime::kTraceExit);
}
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ b(pl, &ok);
__ push(r0);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ pop(r0);
EmitProfilingCounterReset();
__ bind(&ok);
EmitProfilingCounterHandlingForReturnSequence(false);
// Make sure that the constant pool is not emitted inside of the return
// sequence.
......@@ -2754,6 +2761,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code();
......
......@@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
}
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ B(pl, &ok);
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ Push(x0);
}
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ Pop(x0);
}
EmitProfilingCounterReset();
__ Bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence");
......@@ -430,24 +454,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ CallRuntime(Runtime::kTraceExit);
DCHECK(x0.Is(result_register()));
}
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ B(pl, &ok);
__ Push(x0);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ Pop(x0);
EmitProfilingCounterReset();
__ Bind(&ok);
EmitProfilingCounterHandlingForReturnSequence(false);
SetReturnPosition(literal());
const Register& current_sp = __ StackPointer();
......@@ -2556,7 +2563,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code();
......
......@@ -505,6 +505,7 @@ class FullCodeGenerator: public AstVisitor {
// Platform-specific return sequence
void EmitReturnSequence();
void EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call);
// Platform-specific code sequences for calls
void EmitCall(Call* expr, ConvertReceiverMode = ConvertReceiverMode::kAny);
......
......@@ -386,6 +386,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
}
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ j(positive, &ok, Label::kNear);
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ push(eax);
}
__ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ pop(eax);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence");
......@@ -398,24 +422,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(eax);
__ CallRuntime(Runtime::kTraceExit);
}
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ j(positive, &ok, Label::kNear);
__ push(eax);
__ call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ pop(eax);
EmitProfilingCounterReset();
__ bind(&ok);
EmitProfilingCounterHandlingForReturnSequence(false);
SetReturnPosition(literal());
__ leave();
......@@ -2639,6 +2646,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code();
......
......@@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
}
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ Branch(&ok, ge, a3, Operand(zero_reg));
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ push(v0);
}
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ pop(v0);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence");
......@@ -427,24 +451,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(v0);
__ CallRuntime(Runtime::kTraceExit);
}
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ Branch(&ok, ge, a3, Operand(zero_reg));
__ push(v0);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ pop(v0);
EmitProfilingCounterReset();
__ bind(&ok);
EmitProfilingCounterHandlingForReturnSequence(false);
// Make sure that the constant pool is not emitted inside of the return
// sequence.
......@@ -2741,6 +2748,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
// Record source position of the IC call.
SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code();
......
......@@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
}
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ Branch(&ok, ge, a3, Operand(zero_reg));
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ push(v0);
}
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ pop(v0);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence");
......@@ -427,24 +451,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(v0);
__ CallRuntime(Runtime::kTraceExit);
}
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ Branch(&ok, ge, a3, Operand(zero_reg));
__ push(v0);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ pop(v0);
EmitProfilingCounterReset();
__ bind(&ok);
EmitProfilingCounterHandlingForReturnSequence(false);
// Make sure that the constant pool is not emitted inside of the return
// sequence.
......@@ -2747,6 +2754,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
// Record source position of the IC call.
SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code();
......
......@@ -389,6 +389,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
}
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ j(positive, &ok, Label::kNear);
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ Push(rax);
}
__ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ Pop(rax);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence");
......@@ -400,24 +424,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ Push(rax);
__ CallRuntime(Runtime::kTraceExit);
}
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ j(positive, &ok, Label::kNear);
__ Push(rax);
__ call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ Pop(rax);
EmitProfilingCounterReset();
__ bind(&ok);
EmitProfilingCounterHandlingForReturnSequence(false);
SetReturnPosition(literal());
__ leave();
......@@ -2625,6 +2632,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code();
......
......@@ -408,53 +408,54 @@ RUNTIME_FUNCTION(Runtime_DisassembleFunction) {
return isolate->heap()->undefined_value();
}
namespace {
static int StackSize(Isolate* isolate) {
int StackSize(Isolate* isolate) {
int n = 0;
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) n++;
return n;
}
static void PrintTransition(Isolate* isolate, Object* result) {
// indentation
{
const int nmax = 80;
int n = StackSize(isolate);
if (n <= nmax)
PrintF("%4d:%*s", n, n, "");
else
PrintF("%4d:%*s", n, nmax, "...");
}
if (result == NULL) {
JavaScriptFrame::PrintTop(isolate, stdout, true, false);
PrintF(" {\n");
void PrintIndentation(Isolate* isolate) {
const int nmax = 80;
int n = StackSize(isolate);
if (n <= nmax) {
PrintF("%4d:%*s", n, n, "");
} else {
// function result
PrintF("} -> ");
result->ShortPrint();
PrintF("\n");
PrintF("%4d:%*s", n, nmax, "...");
}
}
} // namespace
RUNTIME_FUNCTION(Runtime_TraceEnter) {
SealHandleScope shs(isolate);
DCHECK(args.length() == 0);
PrintTransition(isolate, NULL);
DCHECK_EQ(0, args.length());
PrintIndentation(isolate);
JavaScriptFrame::PrintTop(isolate, stdout, true, false);
PrintF(" {\n");
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_TraceExit) {
SealHandleScope shs(isolate);
DCHECK(args.length() == 1);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, obj, 0);
PrintTransition(isolate, obj);
PrintIndentation(isolate);
PrintF("} -> ");
obj->ShortPrint();
PrintF("\n");
return obj; // return TOS
}
RUNTIME_FUNCTION(Runtime_TraceTailCall) {
SealHandleScope shs(isolate);
DCHECK_EQ(0, args.length());
PrintIndentation(isolate);
PrintF("} -> tail call ->\n");
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_HaveSameMap) {
SealHandleScope shs(isolate);
......
......@@ -919,7 +919,6 @@ namespace internal {
F(SymbolRegistry, 0, 1) \
F(SymbolIsPrivate, 1, 1)
#define FOR_EACH_INTRINSIC_TEST(F) \
F(DeoptimizeFunction, 1, 1) \
F(DeoptimizeNow, 0, 1) \
......@@ -947,6 +946,7 @@ namespace internal {
F(DisassembleFunction, 1, 1) \
F(TraceEnter, 0, 1) \
F(TraceExit, 1, 1) \
F(TraceTailCall, 0, 1) \
F(HaveSameMap, 2, 1) \
F(InNewSpace, 1, 1) \
F(HasFastSmiElements, 1, 1) \
......@@ -968,7 +968,6 @@ namespace internal {
F(HasFixedFloat64Elements, 1, 1) \
F(HasFixedUint8ClampedElements, 1, 1)
#define FOR_EACH_INTRINSIC_TYPEDARRAY(F) \
F(ArrayBufferGetByteLength, 1, 1) \
F(ArrayBufferSliceImpl, 4, 1) \
......
......@@ -15,6 +15,8 @@
return f(n - 1);
}
assertThrows(()=>{ f(1e6) });
%OptimizeFunctionOnNextCall(f);
assertThrows(()=>{ f(1e6) });
})();
......@@ -30,6 +32,8 @@
return f(n - 1);
}
assertEquals("foo", f(1e6));
%OptimizeFunctionOnNextCall(f);
assertEquals("foo", f(1e6));
})();
......@@ -49,6 +53,9 @@
}
assertEquals("foo", f(1e6));
assertEquals("bar", f(1e6 + 1));
%OptimizeFunctionOnNextCall(f);
assertEquals("foo", f(1e6));
assertEquals("bar", f(1e6 + 1));
})();
......@@ -61,9 +68,14 @@
if (n <= 0) {
return "foo";
}
return f(n - 1);
return f_bound(n - 1);
}
var f_bound = f0.bind({});
function f(n) {
return f_bound(n);
}
var f = f0.bind({});
assertEquals("foo", f(1e6));
%OptimizeFunctionOnNextCall(f);
assertEquals("foo", f(1e6));
})();
......@@ -74,17 +86,22 @@
if (n <= 0) {
return "foo";
}
return g(n - 1);
return g_bound(n - 1);
}
function g0(n){
if (n <= 0) {
return "bar";
}
return f(n - 1);
return f_bound(n - 1);
}
var f = f0.bind({});
var g = g0.bind({});
var f_bound = f0.bind({});
var g_bound = g0.bind({});
function f(n) {
return f_bound(n);
}
assertEquals("foo", f(1e6));
assertEquals("bar", f(1e6 + 1));
%OptimizeFunctionOnNextCall(f);
assertEquals("foo", f(1e6));
assertEquals("bar", f(1e6 + 1));
})();
......@@ -47,7 +47,8 @@ f(null);
eval('f(null)');
// Check called from strict builtin functions.
[null, null].sort(f);
// [null, null].sort(f); // Does not work because sort tail calls.
[null].forEach(f, null);
// Check called from sloppy builtin functions.
"abel".replace(/b/g, function h() {
......
......@@ -43,6 +43,9 @@
# This test non-deterministically runs out of memory on Windows ia32.
'regress/regress-crbug-160010': [SKIP],
# Issue 4698: not fully supported by Turbofan yet
'es6/tail-call-simple': [SKIP],
# Issue 3389: deopt_every_n_garbage_collections is unsafe
'regress/regress-2653': [SKIP],
......
......@@ -1149,7 +1149,9 @@ function CheckArgumentsPillDescriptor(func, name) {
function strict() {
"use strict";
return return_my_caller();
// Returning result via local variable to avoid tail call optimization.
var res = return_my_caller();
return res;
}
assertSame(null, strict());
......@@ -1163,7 +1165,9 @@ function CheckArgumentsPillDescriptor(func, name) {
(function TestNonStrictFunctionCallerPill() {
function strict(n) {
"use strict";
return non_strict(n);
// Returning result via local variable to avoid tail call optimization.
var res = non_strict(n);
return res;
}
function recurse(n, then) {
......@@ -1191,7 +1195,9 @@ function CheckArgumentsPillDescriptor(func, name) {
(function TestNonStrictFunctionCallerDescriptorPill() {
function strict(n) {
"use strict";
return non_strict(n);
// Returning result via local variable to avoid tail call optimization.
var res = non_strict(n);
return res;
}
function recurse(n, then) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment