Commit 63dd7954 authored by Michal Majewski's avatar Michal Majewski Committed by Commit Bot

Revive stress deopt counter in turbofan

Adds the counter to x64 only.

Bug: v8:6900
Change-Id: Ia290102b38f029a0b71c40e4b00ecc5f07dfa59c
Reviewed-on: https://chromium-review.googlesource.com/704678
Commit-Queue: Michał Majewski <majeski@google.com>
Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48378}
parent d9f26340
...@@ -2648,6 +2648,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { ...@@ -2648,6 +2648,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
if (!branch->fallthru) __ b(flabel); // no fallthru to flabel. if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
} }
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
AssembleArchBranch(instr, branch);
}
void CodeGenerator::AssembleArchJump(RpoNumber target) { void CodeGenerator::AssembleArchJump(RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target)); if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
......
...@@ -2325,6 +2325,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { ...@@ -2325,6 +2325,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
if (!branch->fallthru) __ B(flabel); // no fallthru to flabel. if (!branch->fallthru) __ B(flabel); // no fallthru to flabel.
} }
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
AssembleArchBranch(instr, branch);
}
void CodeGenerator::AssembleArchJump(RpoNumber target) { void CodeGenerator::AssembleArchJump(RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target)); if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target));
......
...@@ -504,7 +504,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction( ...@@ -504,7 +504,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
branch.false_label = &continue_label; branch.false_label = &continue_label;
branch.fallthru = true; branch.fallthru = true;
// Assemble architecture-specific branch. // Assemble architecture-specific branch.
AssembleArchBranch(instr, &branch); AssembleArchDeoptBranch(instr, &branch);
tasm()->bind(&continue_label); tasm()->bind(&continue_label);
break; break;
} }
......
...@@ -158,6 +158,10 @@ class CodeGenerator final : public GapResolver::Assembler { ...@@ -158,6 +158,10 @@ class CodeGenerator final : public GapResolver::Assembler {
CodeGenResult AssembleArchInstruction(Instruction* instr); CodeGenResult AssembleArchInstruction(Instruction* instr);
void AssembleArchJump(RpoNumber target); void AssembleArchJump(RpoNumber target);
void AssembleArchBranch(Instruction* instr, BranchInfo* branch); void AssembleArchBranch(Instruction* instr, BranchInfo* branch);
// Generates special branch for deoptimization condition.
void AssembleArchDeoptBranch(Instruction* instr, BranchInfo* branch);
void AssembleArchBoolean(Instruction* instr, FlagsCondition condition); void AssembleArchBoolean(Instruction* instr, FlagsCondition condition);
void AssembleArchTrap(Instruction* instr, FlagsCondition condition); void AssembleArchTrap(Instruction* instr, FlagsCondition condition);
void AssembleArchLookupSwitch(Instruction* instr); void AssembleArchLookupSwitch(Instruction* instr);
......
...@@ -2457,6 +2457,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { ...@@ -2457,6 +2457,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
if (!branch->fallthru) __ jmp(flabel); if (!branch->fallthru) __ jmp(flabel);
} }
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
AssembleArchBranch(instr, branch);
}
void CodeGenerator::AssembleArchJump(RpoNumber target) { void CodeGenerator::AssembleArchJump(RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target)); if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
......
...@@ -3065,6 +3065,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { ...@@ -3065,6 +3065,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
branch->fallthru); branch->fallthru);
} }
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
AssembleArchBranch(instr, branch);
}
void CodeGenerator::AssembleArchJump(RpoNumber target) { void CodeGenerator::AssembleArchJump(RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target)); if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target));
......
...@@ -3361,6 +3361,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { ...@@ -3361,6 +3361,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
branch->fallthru); branch->fallthru);
} }
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
AssembleArchBranch(instr, branch);
}
void CodeGenerator::AssembleArchJump(RpoNumber target) { void CodeGenerator::AssembleArchJump(RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target)); if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target));
......
...@@ -2110,6 +2110,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { ...@@ -2110,6 +2110,10 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
if (!branch->fallthru) __ b(flabel); // no fallthru to flabel. if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
} }
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
AssembleArchBranch(instr, branch);
}
void CodeGenerator::AssembleArchJump(RpoNumber target) { void CodeGenerator::AssembleArchJump(RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target)); if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
......
...@@ -2522,6 +2522,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { ...@@ -2522,6 +2522,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
if (!branch->fallthru) __ b(flabel); // no fallthru to flabel. if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
} }
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
AssembleArchBranch(instr, branch);
}
void CodeGenerator::AssembleArchJump(RpoNumber target) { void CodeGenerator::AssembleArchJump(RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target)); if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
} }
......
...@@ -2830,6 +2830,46 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { ...@@ -2830,6 +2830,46 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
if (!branch->fallthru) __ jmp(flabel, flabel_distance); if (!branch->fallthru) __ jmp(flabel, flabel_distance);
} }
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
Label::Distance flabel_distance =
branch->fallthru ? Label::kNear : Label::kFar;
Label* tlabel = branch->true_label;
Label* flabel = branch->false_label;
Label nodeopt;
if (branch->condition == kUnorderedEqual) {
__ j(parity_even, flabel, flabel_distance);
} else if (branch->condition == kUnorderedNotEqual) {
__ j(parity_even, tlabel);
}
__ j(FlagsConditionToCondition(branch->condition), tlabel);
if (FLAG_deopt_every_n_times > 0) {
ExternalReference counter =
ExternalReference::stress_deopt_count(isolate());
__ pushfq();
__ pushq(rax);
__ load_rax(counter);
__ decl(rax);
__ j(not_zero, &nodeopt);
__ Set(rax, FLAG_deopt_every_n_times);
__ store_rax(counter);
__ popq(rax);
__ popfq();
__ jmp(tlabel);
__ bind(&nodeopt);
__ store_rax(counter);
__ popq(rax);
__ popfq();
}
if (!branch->fallthru) {
__ jmp(flabel, flabel_distance);
}
}
void CodeGenerator::AssembleArchJump(RpoNumber target) { void CodeGenerator::AssembleArchJump(RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target)); if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --deopt-every-n-times=0 --opt --no-always-opt
// Check that --deopt-every-n-times 0 doesn't deopt
function f(x) {
return x + 1;
}
f(0);
%OptimizeFunctionOnNextCall(f);
f(1);
assertOptimized(f, undefined, undefined, false);
f(1);
assertOptimized(f, undefined, undefined, false);
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --opt --no-always-opt --deopt-every-n-times=6
// Check that stress deopt count resets correctly
// Function with two deopt points
function f(x) {
return x + 1;
}
f(1);
%OptimizeFunctionOnNextCall(f);
// stress_deopt_count == 6
f(1);
assertOptimized(f, undefined, undefined, false);
// stress_deopt_count == 4
f(1);
assertOptimized(f, undefined, undefined, false);
// stress_deopt_count == 2
f(1);
// deopt & counter reset
assertUnoptimized(f, undefined, undefined, false);
// stress_deopt_count == 6
%OptimizeFunctionOnNextCall(f);
f(1);
assertOptimized(f, undefined, undefined, false);
// stress_deopt_count == 4
f(1);
assertOptimized(f, undefined, undefined, false);
// stress_deopt_count == 2
f(1);
// deopt & counter reset
assertUnoptimized(f, undefined, undefined, false);
...@@ -582,7 +582,8 @@ var failWithMessage; ...@@ -582,7 +582,8 @@ var failWithMessage;
return OptimizationStatusImpl(fun, sync_opt); return OptimizationStatusImpl(fun, sync_opt);
} }
assertUnoptimized = function assertUnoptimized(fun, sync_opt, name_opt) { assertUnoptimized = function assertUnoptimized(fun, sync_opt, name_opt,
skip_if_maybe_deopted = true) {
if (sync_opt === undefined) sync_opt = ""; if (sync_opt === undefined) sync_opt = "";
var opt_status = OptimizationStatus(fun, sync_opt); var opt_status = OptimizationStatus(fun, sync_opt);
// Tests that use assertUnoptimized() do not make sense if --always-opt // Tests that use assertUnoptimized() do not make sense if --always-opt
...@@ -590,7 +591,8 @@ var failWithMessage; ...@@ -590,7 +591,8 @@ var failWithMessage;
assertFalse((opt_status & V8OptimizationStatus.kAlwaysOptimize) !== 0, assertFalse((opt_status & V8OptimizationStatus.kAlwaysOptimize) !== 0,
"test does not make sense with --always-opt"); "test does not make sense with --always-opt");
assertTrue((opt_status & V8OptimizationStatus.kIsFunction) !== 0, name_opt); assertTrue((opt_status & V8OptimizationStatus.kIsFunction) !== 0, name_opt);
if ((opt_status & V8OptimizationStatus.kMaybeDeopted) !== 0) { if (skip_if_maybe_deopted &&
(opt_status & V8OptimizationStatus.kMaybeDeopted) !== 0) {
// When --deopt-every-n-times flag is specified it's no longer guaranteed // When --deopt-every-n-times flag is specified it's no longer guaranteed
// that particular function is still deoptimized, so keep running the test // that particular function is still deoptimized, so keep running the test
// to stress test the deoptimizer. // to stress test the deoptimizer.
...@@ -599,7 +601,8 @@ var failWithMessage; ...@@ -599,7 +601,8 @@ var failWithMessage;
assertFalse((opt_status & V8OptimizationStatus.kOptimized) !== 0, name_opt); assertFalse((opt_status & V8OptimizationStatus.kOptimized) !== 0, name_opt);
} }
assertOptimized = function assertOptimized(fun, sync_opt, name_opt) { assertOptimized = function assertOptimized(fun, sync_opt, name_opt,
skip_if_maybe_deopted = true) {
if (sync_opt === undefined) sync_opt = ""; if (sync_opt === undefined) sync_opt = "";
var opt_status = OptimizationStatus(fun, sync_opt); var opt_status = OptimizationStatus(fun, sync_opt);
// Tests that use assertOptimized() do not make sense if --no-opt // Tests that use assertOptimized() do not make sense if --no-opt
...@@ -607,7 +610,8 @@ var failWithMessage; ...@@ -607,7 +610,8 @@ var failWithMessage;
assertFalse((opt_status & V8OptimizationStatus.kNeverOptimize) !== 0, assertFalse((opt_status & V8OptimizationStatus.kNeverOptimize) !== 0,
"test does not make sense with --no-opt"); "test does not make sense with --no-opt");
assertTrue((opt_status & V8OptimizationStatus.kIsFunction) !== 0, name_opt); assertTrue((opt_status & V8OptimizationStatus.kIsFunction) !== 0, name_opt);
if ((opt_status & V8OptimizationStatus.kMaybeDeopted) !== 0) { if (skip_if_maybe_deopted &&
(opt_status & V8OptimizationStatus.kMaybeDeopted) !== 0) {
// When --deopt-every-n-times flag is specified it's no longer guaranteed // When --deopt-every-n-times flag is specified it's no longer guaranteed
// that particular function is still optimized, so keep running the test // that particular function is still optimized, so keep running the test
// to stress test the deoptimizer. // to stress test the deoptimizer.
......
...@@ -657,4 +657,10 @@ ...@@ -657,4 +657,10 @@
'mjsunit-assertion-error' : [SKIP], 'mjsunit-assertion-error' : [SKIP],
}], # no_harness }], # no_harness
##############################################################################
['arch != x64 or deopt_fuzzer', {
# Skip stress-deopt-count tests since it's in x64 only
'compiler/stress-deopt-count-*': [SKIP],
}], # arch != x64 or deopt_fuzzer
] ]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment