Commit e47d1757 authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

[sfi] Remove opt_count

Remove opt_count from SFI, which only had two real uses:

  1. Detecting OSR in tests -- replaced with a stack walk in
     %GetOptimizationStatus
  2. Naming optimization log files -- replaced with the
     optimization id

This allows us to remove a field from the SFI, moving the
bailout reason into the counters field.

As a drive-by, add optimization marker information (e.g.
marked for optimization) to the optimization status.

Change-Id: Id77deb5dd5439dfba058a7e1e1748de26b717d0d
Reviewed-on: https://chromium-review.googlesource.com/592028Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47009}
parent 20d25f40
......@@ -166,11 +166,6 @@ void CompilationJob::RecordUnoptimizedCompilationStats() const {
void CompilationJob::RecordOptimizedCompilationStats() const {
DCHECK(info()->IsOptimizing());
Handle<JSFunction> function = info()->closure();
if (!function->IsOptimized()) {
// Concurrent recompilation and OSR may race. Increment only once.
int opt_count = function->shared()->opt_count();
function->shared()->set_opt_count(opt_count + 1);
}
double ms_creategraph = time_taken_to_prepare_.InMillisecondsF();
double ms_optimize = time_taken_to_execute_.InMillisecondsF();
double ms_codegen = time_taken_to_finalize_.InMillisecondsF();
......
......@@ -34,18 +34,15 @@ std::unique_ptr<char[]> GetVisualizerLogFileName(CompilationInfo* info,
const char* suffix) {
EmbeddedVector<char, 256> filename(0);
std::unique_ptr<char[]> debug_name = info->GetDebugName();
int optimization_id = info->optimization_id();
if (strlen(debug_name.get()) > 0) {
if (info->has_shared_info()) {
int attempt = info->shared_info()->opt_count();
SNPrintF(filename, "turbo-%s-%i", debug_name.get(), attempt);
} else {
SNPrintF(filename, "turbo-%s", debug_name.get());
}
SNPrintF(filename, "turbo-%s-%i", debug_name.get(), optimization_id);
} else if (info->has_shared_info()) {
int attempt = info->shared_info()->opt_count();
SNPrintF(filename, "turbo-%p-%i", static_cast<void*>(info), attempt);
SNPrintF(filename, "turbo-%p-%i",
static_cast<void*>(info->shared_info()->address()),
optimization_id);
} else {
SNPrintF(filename, "turbo-none-%s", phase);
SNPrintF(filename, "turbo-none-%i", optimization_id);
}
EmbeddedVector<char, 256> source_file(0);
bool source_available = false;
......
......@@ -2554,7 +2554,6 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
#if V8_SFI_HAS_UNIQUE_ID
share->set_unique_id(isolate()->GetNextUniqueSharedFunctionInfoId());
#endif
share->set_counters(0);
// Set integer fields (smi or int, depending on the architecture).
share->set_length(0);
......@@ -2565,7 +2564,7 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
share->set_function_token_position(0);
// All compiler hints default to false or 0.
share->set_compiler_hints(0);
share->set_opt_count_and_bailout_reason(0);
share->set_counters_and_bailout_reason(0);
share->set_kind(kind);
share->set_preparsed_scope_data(*null_value());
......
......@@ -13852,7 +13852,6 @@ void Map::StartInobjectSlackTracking() {
void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
set_ic_age(new_ic_age);
set_opt_count(0);
set_deopt_count(0);
}
......
......@@ -58,9 +58,8 @@ INT_ACCESSORS(SharedFunctionInfo, start_position_and_type,
INT_ACCESSORS(SharedFunctionInfo, function_token_position,
kFunctionTokenPositionOffset)
INT_ACCESSORS(SharedFunctionInfo, compiler_hints, kCompilerHintsOffset)
INT_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
kOptCountAndBailoutReasonOffset)
INT_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
INT_ACCESSORS(SharedFunctionInfo, counters_and_bailout_reason,
kCountersAndBailoutReasonOffset)
bool SharedFunctionInfo::has_shared_name() const {
return raw_name() != kNoSharedNameSentinel;
......@@ -356,28 +355,26 @@ void SharedFunctionInfo::set_inferred_name(String* inferred_name) {
set_function_identifier(inferred_name);
}
BIT_FIELD_ACCESSORS(SharedFunctionInfo, counters, ic_age,
BIT_FIELD_ACCESSORS(SharedFunctionInfo, counters_and_bailout_reason, ic_age,
SharedFunctionInfo::ICAgeBits)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, counters, deopt_count,
SharedFunctionInfo::DeoptCountBits)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, counters_and_bailout_reason,
deopt_count, SharedFunctionInfo::DeoptCountBits)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, counters_and_bailout_reason,
disable_optimization_reason,
SharedFunctionInfo::DisabledOptimizationReasonBits)
void SharedFunctionInfo::increment_deopt_count() {
int value = counters();
int value = counters_and_bailout_reason();
int deopt_count = DeoptCountBits::decode(value);
// Saturate the deopt count when incrementing, rather than overflowing.
if (deopt_count < DeoptCountBits::kMax) {
set_counters(DeoptCountBits::update(value, deopt_count + 1));
set_counters_and_bailout_reason(
DeoptCountBits::update(value, deopt_count + 1));
}
}
BIT_FIELD_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason, opt_count,
SharedFunctionInfo::OptCountBits)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
disable_optimization_reason,
SharedFunctionInfo::DisabledOptimizationReasonBits)
bool SharedFunctionInfo::IsUserJavaScript() {
Object* script_obj = script();
if (script_obj->IsUndefined(GetIsolate())) return false;
......
......@@ -337,19 +337,12 @@ class SharedFunctionInfo : public HeapObject {
Handle<Object> GetSourceCode();
Handle<Object> GetSourceCodeHarmony();
// Number of times the function was optimized.
DECL_INT_ACCESSORS(opt_count)
// Number of times the function was deoptimized.
DECL_INT_ACCESSORS(deopt_count)
inline void increment_deopt_count();
// Stores deopt_count and ic_age as bit-fields.
inline void set_counters(int value);
inline int counters() const;
// Stores opt_count and bailout_reason as bit-fields.
DECL_INT_ACCESSORS(opt_count_and_bailout_reason)
// Stores deopt_count, ic_age and bailout_reason as bit-fields.
DECL_INT_ACCESSORS(counters_and_bailout_reason)
inline BailoutReason disable_optimization_reason() const;
inline void set_disable_optimization_reason(BailoutReason reason);
......@@ -456,8 +449,7 @@ class SharedFunctionInfo : public HeapObject {
V(kEndPositionOffset, kInt32Size) \
V(kFunctionTokenPositionOffset, kInt32Size) \
V(kCompilerHintsOffset, kInt32Size) \
V(kOptCountAndBailoutReasonOffset, kInt32Size) \
V(kCountersOffset, kInt32Size) \
V(kCountersAndBailoutReasonOffset, kInt32Size) \
/* Total size. */ \
V(kSize, 0)
......@@ -521,21 +513,14 @@ class SharedFunctionInfo : public HeapObject {
DEFINE_BIT_FIELDS(DEBUGGER_HINTS_BIT_FIELDS)
#undef DEBUGGER_HINTS_BIT_FIELDS
// Bit fields in |counters|.
#define COUNTERS_BIT_FIELDS(V, _) \
V(DeoptCountBits, int, 4, _) \
V(ICAgeBits, int, 8, _)
DEFINE_BIT_FIELDS(COUNTERS_BIT_FIELDS)
#undef COUNTERS_BIT_FIELDS
// Bit fields in |opt_count_and_bailout_reason|.
#define OPT_COUNT_AND_BAILOUT_REASON_BIT_FIELDS(V, _) \
V(OptCountBits, int, 22, _) \
// Bit fields in |counters_and_bailout_reason|.
#define COUNTERS_AND_BAILOUT_REASON_BIT_FIELDS(V, _) \
V(DeoptCountBits, int, 4, _) \
V(ICAgeBits, int, 8, _) \
V(DisabledOptimizationReasonBits, BailoutReason, 8, _)
DEFINE_BIT_FIELDS(OPT_COUNT_AND_BAILOUT_REASON_BIT_FIELDS)
#undef OPT_COUNT_AND_BAILOUT_REASON_BIT_FIELDS
DEFINE_BIT_FIELDS(COUNTERS_AND_BAILOUT_REASON_BIT_FIELDS)
#undef COUNTERS_AND_BAILOUT_REASON_BIT_FIELDS
private:
// [raw_name]: Function name string or kNoSharedNameSentinel.
......
......@@ -151,8 +151,8 @@ RUNTIME_FUNCTION(Runtime_SetCode) {
target_shared->set_end_position(source_shared->end_position());
bool was_native = target_shared->native();
target_shared->set_compiler_hints(source_shared->compiler_hints());
target_shared->set_opt_count_and_bailout_reason(
source_shared->opt_count_and_bailout_reason());
target_shared->set_counters_and_bailout_reason(
source_shared->counters_and_bailout_reason());
target_shared->set_native(was_native);
target_shared->set_function_literal_id(source_shared->function_literal_id());
......
......@@ -362,6 +362,16 @@ RUNTIME_FUNCTION(Runtime_GetOptimizationStatus) {
base::OS::Sleep(base::TimeDelta::FromMilliseconds(50));
}
}
if (function->IsMarkedForOptimization()) {
status |= static_cast<int>(OptimizationStatus::kMarkedForOptimization);
} else if (function->IsInOptimizationQueue()) {
status |=
static_cast<int>(OptimizationStatus::kMarkedForConcurrentOptimization);
} else if (function->IsInOptimizationQueue()) {
status |= static_cast<int>(OptimizationStatus::kOptimizingConcurrently);
}
if (function->IsOptimized()) {
status |= static_cast<int>(OptimizationStatus::kOptimized);
if (function->code()->is_turbofanned()) {
......@@ -371,10 +381,29 @@ RUNTIME_FUNCTION(Runtime_GetOptimizationStatus) {
if (function->IsInterpreted()) {
status |= static_cast<int>(OptimizationStatus::kInterpreted);
}
// Additionally, detect activations of this frame on the stack, and report the
// status of the topmost frame.
JavaScriptFrame* frame = nullptr;
JavaScriptFrameIterator it(isolate);
while (!it.done()) {
if (it.frame()->function() == *function) {
frame = it.frame();
break;
}
it.Advance();
}
if (frame != nullptr) {
status |= static_cast<int>(OptimizationStatus::kIsExecuting);
if (frame->is_optimized()) {
status |=
static_cast<int>(OptimizationStatus::kTopmostFrameIsTurboFanned);
}
}
return Smi::FromInt(status);
}
RUNTIME_FUNCTION(Runtime_UnblockConcurrentRecompilation) {
DCHECK_EQ(0, args.length());
if (FLAG_block_concurrent_recompilation &&
......@@ -384,14 +413,6 @@ RUNTIME_FUNCTION(Runtime_UnblockConcurrentRecompilation) {
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_GetOptimizationCount) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
return Smi::FromInt(function->shared()->opt_count());
}
RUNTIME_FUNCTION(Runtime_GetDeoptCount) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
......
......@@ -552,7 +552,6 @@ namespace internal {
F(NeverOptimizeFunction, 1, 1) \
F(GetOptimizationStatus, -1, 1) \
F(UnblockConcurrentRecompilation, 0, 1) \
F(GetOptimizationCount, 1, 1) \
F(GetDeoptCount, 1, 1) \
F(GetUndetectable, 0, 1) \
F(GetCallable, 0, 1) \
......@@ -839,6 +838,11 @@ enum class OptimizationStatus {
kOptimized = 1 << 4,
kTurboFanned = 1 << 5,
kInterpreted = 1 << 6,
kMarkedForOptimization = 1 << 7,
kMarkedForConcurrentOptimization = 1 << 8,
kOptimizingConcurrently = 1 << 9,
kIsExecuting = 1 << 10,
kTopmostFrameIsTurboFanned = 1 << 11,
};
} // namespace internal
......
......@@ -2265,7 +2265,7 @@ TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
CcTest::CollectAllGarbage();
CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
CHECK_EQ(0, f->shared()->opt_count());
CHECK_EQ(0, f->shared()->deopt_count());
CHECK_EQ(0, f->feedback_vector()->profiler_ticks());
}
......@@ -2308,7 +2308,7 @@ TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
CcTest::CollectAllGarbage();
CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
CHECK_EQ(0, f->shared()->opt_count());
CHECK_EQ(0, f->shared()->deopt_count());
CHECK_EQ(0, f->feedback_vector()->profiler_ticks());
}
......
......@@ -190,6 +190,7 @@ assertEquals(foo, array[2]);
(function literals_after_osr() {
var color = [0];
// Trigger OSR.
while (%GetOptimizationCount(literals_after_osr) == 0) {}
while ((%GetOptimizationStatus(literals_after_osr) &
V8OptimizationStatus.kTopmostFrameIsTurboFanned) !== 0) {}
return [color[0]];
})();
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
// Flags: --noconcurrent-recompilation
if (%IsConcurrentRecompilationSupported()) {
print("Concurrent recompilation is turned on after all. Skipping this test.");
quit();
}
/**
* This class shows how to use %GetOptimizationCount() and
* %GetOptimizationStatus() to infer information about opts and deopts.
* Might be nice to put this into mjsunit.js, but that doesn't depend on
* the --allow-natives-syntax flag so far.
*/
function OptTracker() {
this.opt_counts_ = {};
}
/**
* Always call this at the beginning of your test, once for each function
* that you later want to track de/optimizations for. It is necessary because
* tests are sometimes executed several times in a row, and you want to
* disregard counts from previous runs.
*/
OptTracker.prototype.CheckpointOptCount = function(func) {
this.opt_counts_[func] = %GetOptimizationCount(func);
};
OptTracker.prototype.AssertOptCount = function(func, optcount) {
if (this.DisableAsserts_(func)) {
return;
}
assertEquals(optcount, this.GetOptCount_(func));
};
OptTracker.prototype.AssertDeoptCount = function(func, deopt_count) {
if (this.DisableAsserts_(func)) {
return;
}
assertEquals(deopt_count, this.GetDeoptCount_(func));
};
OptTracker.prototype.AssertDeoptHappened = function(func, expect_deopt) {
if (this.DisableAsserts_(func)) {
return;
}
if (expect_deopt) {
assertTrue(this.GetDeoptCount_(func) > 0);
} else {
assertEquals(0, this.GetDeoptCount_(func));
}
}
OptTracker.prototype.AssertIsOptimized = function(func, expect_optimized) {
if (this.DisableAsserts_(func)) {
return;
}
var opt_status = %GetOptimizationStatus(func);
assertTrue((opt_status & V8OptimizationStatus.kIsFunction) !== 0);
assertEquals(expect_optimized,
(opt_status & V8OptimizationStatus.kOptimized) !== 0);
}
/**
* @private
*/
OptTracker.prototype.GetOptCount_ = function(func) {
var raw_count = %GetOptimizationCount(func);
if (func in this.opt_counts_) {
var checkpointed_count = this.opt_counts_[func];
return raw_count - checkpointed_count;
}
return raw_count;
}
/**
* @private
*/
OptTracker.prototype.GetDeoptCount_ = function(func) {
var count = this.GetOptCount_(func);
var opt_status = %GetOptimizationStatus(func);
if ((opt_status & V8OptimizationStatus.kOptimized) !== 0) {
count -= 1;
}
return count;
}
/**
* @private
*/
OptTracker.prototype.DisableAsserts_ = function(func) {
var opt_status = %GetOptimizationStatus(func);
return (opt_status & V8OptimizationStatus.kAlwaysOptimize) !== 0 ||
(opt_status & V8OptimizationStatus.kNeverOptimize) !== 0;
}
// (End of class OptTracker.)
// Example function used by the test below.
function f(a) {
return a+1;
}
var tracker = new OptTracker();
tracker.CheckpointOptCount(f);
tracker.AssertOptCount(f, 0);
tracker.AssertIsOptimized(f, false);
tracker.AssertDeoptHappened(f, false);
tracker.AssertDeoptCount(f, 0);
f(1);
%OptimizeFunctionOnNextCall(f);
f(1);
tracker.AssertOptCount(f, 1);
tracker.AssertIsOptimized(f, true);
tracker.AssertDeoptHappened(f, false);
tracker.AssertDeoptCount(f, 0);
%DeoptimizeFunction(f);
tracker.AssertOptCount(f, 1);
tracker.AssertIsOptimized(f, false);
tracker.AssertDeoptHappened(f, true);
tracker.AssertDeoptCount(f, 1);
// Let's trigger optimization for another type.
for (var i = 0; i < 5; i++) f("a");
%OptimizeFunctionOnNextCall(f);
f("b");
tracker.AssertOptCount(f, 2);
tracker.AssertIsOptimized(f, true);
tracker.AssertDeoptHappened(f, true);
tracker.AssertDeoptCount(f, 1);
......@@ -15,7 +15,6 @@ function foo(i, deopt = false) {
}
}
assertEquals(0, %GetOptimizationCount(foo));
assertEquals(0, %GetDeoptCount(foo));
foo(10);
......@@ -24,11 +23,9 @@ foo(10);
foo(10);
assertOptimized(foo);
assertEquals(1, %GetOptimizationCount(foo));
assertEquals(0, %GetDeoptCount(foo));
foo(10, true);
assertUnoptimized(foo);
assertEquals(1, %GetOptimizationCount(foo));
assertEquals(1, %GetDeoptCount(foo));
......@@ -15,7 +15,6 @@ function foo(i, deopt = false) {
}
}
assertEquals(0, %GetOptimizationCount(foo));
assertEquals(0, %GetDeoptCount(foo));
foo(10);
......@@ -24,11 +23,9 @@ foo(10);
foo(10);
assertOptimized(foo);
assertEquals(1, %GetOptimizationCount(foo));
assertEquals(0, %GetDeoptCount(foo));
foo(10, true);
assertUnoptimized(foo);
assertEquals(1, %GetOptimizationCount(foo));
assertEquals(1, %GetDeoptCount(foo));
......@@ -16,7 +16,6 @@ function foo(i, deopt = false, deoptobj = null) {
}
}
assertEquals(0, %GetOptimizationCount(foo));
assertEquals(0, %GetDeoptCount(foo));
foo(10);
......@@ -25,12 +24,10 @@ foo(10);
foo(10);
assertOptimized(foo);
assertEquals(1, %GetOptimizationCount(foo));
assertEquals(0, %GetDeoptCount(foo));
foo(10, true, { bar: function(){} });
assertUnoptimized(foo);
assertEquals(1, %GetOptimizationCount(foo));
// Soft deopts don't count to the deopt count.
assertEquals(0, %GetDeoptCount(foo));
......@@ -6,7 +6,6 @@
function foo() {}
assertEquals(0, %GetOptimizationCount(foo));
assertEquals(0, %GetDeoptCount(foo));
foo();
......@@ -15,18 +14,15 @@ foo();
foo();
assertOptimized(foo);
assertEquals(1, %GetOptimizationCount(foo));
assertEquals(0, %GetDeoptCount(foo));
// Unlink the function.
%DeoptimizeFunction(foo);
assertUnoptimized(foo);
assertEquals(1, %GetOptimizationCount(foo));
assertEquals(1, %GetDeoptCount(foo));
foo();
assertUnoptimized(foo);
assertEquals(1, %GetOptimizationCount(foo));
assertEquals(1, %GetDeoptCount(foo));
......@@ -146,7 +146,12 @@ var V8OptimizationStatus = {
kMaybeDeopted: 1 << 3,
kOptimized: 1 << 4,
kTurboFanned: 1 << 5,
kInterpreted: 1 << 6
kInterpreted: 1 << 6,
kMarkedForOptimization: 1 << 7,
kMarkedForConcurrentOptimization: 1 << 8,
kOptimizingConcurrently: 1 << 9,
kIsExecuting: 1 << 10,
kTopmostFrameIsTurboFanned: 1 << 11,
};
// Returns true if --no-opt mode is on.
......
......@@ -546,7 +546,6 @@
['deopt_fuzzer == True', {
# Skip tests that are not suitable for deoptimization fuzzing.
'assert-opt-and-deopt': [SKIP],
'never-optimize': [SKIP],
'regress/regress-2185-2': [SKIP],
'readonly': [SKIP],
......
......@@ -26,17 +26,26 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --use-osr --allow-natives-syntax --ignition-osr --opt
// Flags: --no-always-opt
// Can't OSR with always-opt.
assertFalse(isAlwaysOptimize());
function f() {
do {
do {
for (var i = 0; i < 10; i++) %OptimizeOsr();
// Note: this check can't be wrapped in a function, because
// calling that function causes a deopt from lack of call
// feedback.
var opt_status = %GetOptimizationStatus(f);
assertTrue(
(opt_status & V8OptimizationStatus.kTopmostFrameIsTurboFanned) !== 0);
} while (false);
} while (false);
}
f();
assertTrue(%GetOptimizationCount(f) > 0);
function g() {
for (var i = 0; i < 1; i++) { }
......@@ -56,6 +65,9 @@ function g() {
do {
do {
for (var i = 0; i < 10; i++) %OptimizeOsr();
var opt_status = %GetOptimizationStatus(g);
assertTrue((opt_status
& V8OptimizationStatus.kTopmostFrameIsTurboFanned) !== 0);
} while (false);
} while (false);
} while (false);
......@@ -67,4 +79,3 @@ function g() {
}
g();
assertTrue(%GetOptimizationCount(g) > 0);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment