Commit 8422e25b authored by yangguo's avatar yangguo Committed by Commit bot

[debugger] add precise mode for code coverage.

Collecting precise invocation counts need to be explicitly
enabled. Once enabled, we disable optimization (optimized
code does not increment invocation count, and may inline
callees), and make sure feedback vectors interesting for
code coverage is not garbage-collected.

R=hpayer@chromium.org, jgruber@chromium.org
BUG=v8:5808

Review-Url: https://codereview.chromium.org/2686063002
Cr-Commit-Position: refs/heads/master@{#43082}
parent a4f4c740
......@@ -5,6 +5,8 @@
#include "src/debug/debug-coverage.h"
#include "src/base/hashmap.h"
#include "src/deoptimizer.h"
#include "src/isolate.h"
#include "src/objects-inl.h"
#include "src/objects.h"
......@@ -65,17 +67,33 @@ class ScriptDataBuilder {
std::vector<Coverage::ScriptData> Coverage::Collect(Isolate* isolate) {
SharedToCounterMap counter_map;
// Iterate the heap to find all feedback vectors and accumulate the
// invocation counts into the map for each shared function info.
HeapIterator heap_iterator(isolate->heap());
HeapObject* current_obj;
while ((current_obj = heap_iterator.next())) {
if (!current_obj->IsFeedbackVector()) continue;
FeedbackVector* vector = FeedbackVector::cast(current_obj);
SharedFunctionInfo* shared = vector->shared_function_info();
if (!shared->IsSubjectToDebugging()) continue;
uint32_t count = static_cast<uint32_t>(vector->invocation_count());
counter_map.Add(shared, count);
if (isolate->IsCodeCoverageEnabled()) {
// Feedback vectors are already listed to prevent losing them to GC.
Handle<ArrayList> list =
Handle<ArrayList>::cast(isolate->factory()->code_coverage_list());
for (int i = 0; i < list->Length(); i++) {
FeedbackVector* vector = FeedbackVector::cast(list->Get(i));
SharedFunctionInfo* shared = vector->shared_function_info();
DCHECK(shared->IsSubjectToDebugging());
uint32_t count = static_cast<uint32_t>(vector->invocation_count());
counter_map.Add(shared, count);
}
} else {
// Iterate the heap to find all feedback vectors and accumulate the
// invocation counts into the map for each shared function info.
HeapIterator heap_iterator(isolate->heap());
// Initializing the heap iterator might have triggered a GC, which
// invalidates entries in the counter_map.
DCHECK_EQ(0, counter_map.occupancy());
while (HeapObject* current_obj = heap_iterator.next()) {
if (!current_obj->IsFeedbackVector()) continue;
FeedbackVector* vector = FeedbackVector::cast(current_obj);
SharedFunctionInfo* shared = vector->shared_function_info();
if (!shared->IsSubjectToDebugging()) continue;
uint32_t count = static_cast<uint32_t>(vector->invocation_count());
counter_map.Add(shared, count);
}
}
// Make sure entries in the counter map is not invalidated by GC.
......@@ -140,5 +158,34 @@ std::vector<Coverage::ScriptData> Coverage::Collect(Isolate* isolate) {
return result;
}
void Coverage::EnablePrecise(Isolate* isolate) {
HandleScope scope(isolate);
// Remove all optimized function. Optimized and inlined functions do not
// increment invocation count.
Deoptimizer::DeoptimizeAll(isolate);
// Collect existing feedback vectors.
std::vector<Handle<FeedbackVector>> vectors;
{
HeapIterator heap_iterator(isolate->heap());
while (HeapObject* current_obj = heap_iterator.next()) {
if (!current_obj->IsFeedbackVector()) continue;
FeedbackVector* vector = FeedbackVector::cast(current_obj);
SharedFunctionInfo* shared = vector->shared_function_info();
if (!shared->IsSubjectToDebugging()) continue;
vector->clear_invocation_count();
vectors.emplace_back(vector, isolate);
}
}
// Add collected feedback vectors to the root list lest we lose them to GC.
Handle<ArrayList> list =
ArrayList::New(isolate, static_cast<int>(vectors.size()));
for (const auto& vector : vectors) list = ArrayList::Add(list, vector);
isolate->SetCodeCoverageList(*list);
}
void Coverage::DisablePrecise(Isolate* isolate) {
isolate->SetCodeCoverageList(isolate->heap()->undefined_value());
}
} // namespace internal
} // namespace v8
......@@ -8,7 +8,6 @@
#include <vector>
#include "src/allocation.h"
#include "src/base/macros.h"
namespace v8 {
namespace internal {
......@@ -31,6 +30,11 @@ class Coverage : public AllStatic {
};
static std::vector<ScriptData> Collect(Isolate* isolate);
// Enable precise code coverage. This disables optimization and makes sure
// invocation count is not affected by GC.
static void EnablePrecise(Isolate* isolate);
static void DisablePrecise(Isolate* isolate);
};
} // namespace internal
......
......@@ -96,6 +96,10 @@ int FeedbackVector::invocation_count() const {
return Smi::cast(get(kInvocationCountIndex))->value();
}
void FeedbackVector::clear_invocation_count() {
set(kInvocationCountIndex, Smi::kZero);
}
// Conversion from an integer index to either a slot or an ic slot.
// static
FeedbackSlot FeedbackVector::ToSlot(int index) {
......
......@@ -230,7 +230,10 @@ Handle<FeedbackVector> FeedbackVector::New(Isolate* isolate,
}
i += entry_size;
}
return Handle<FeedbackVector>::cast(array);
Handle<FeedbackVector> result = Handle<FeedbackVector>::cast(array);
if (isolate->IsCodeCoverageEnabled()) AddToCodeCoverageList(isolate, result);
return result;
}
// static
......@@ -239,9 +242,21 @@ Handle<FeedbackVector> FeedbackVector::Copy(Isolate* isolate,
Handle<FeedbackVector> result;
result = Handle<FeedbackVector>::cast(
isolate->factory()->CopyFixedArray(Handle<FixedArray>::cast(vector)));
if (isolate->IsCodeCoverageEnabled()) AddToCodeCoverageList(isolate, result);
return result;
}
// static
void FeedbackVector::AddToCodeCoverageList(Isolate* isolate,
Handle<FeedbackVector> vector) {
DCHECK(isolate->IsCodeCoverageEnabled());
if (!vector->shared_function_info()->IsSubjectToDebugging()) return;
Handle<ArrayList> list =
Handle<ArrayList>::cast(isolate->factory()->code_coverage_list());
list = ArrayList::Add(list, vector);
isolate->SetCodeCoverageList(*list);
}
// This logic is copied from
// StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget.
static bool ClearLogic(Isolate* isolate) {
......
......@@ -281,6 +281,7 @@ class FeedbackVector : public FixedArray {
inline FeedbackMetadata* metadata() const;
inline SharedFunctionInfo* shared_function_info() const;
inline int invocation_count() const;
inline void clear_invocation_count();
// Conversion from a slot to an integer index to the underlying array.
static int GetIndex(FeedbackSlot slot) {
......@@ -352,6 +353,8 @@ class FeedbackVector : public FixedArray {
private:
void ClearSlotsImpl(SharedFunctionInfo* shared, bool force_clear);
static void AddToCodeCoverageList(Isolate* isolate,
Handle<FeedbackVector> vector);
DISALLOW_IMPLICIT_CONSTRUCTORS(FeedbackVector);
};
......
......@@ -2779,6 +2779,8 @@ void Heap::CreateInitialObjects() {
ArrayList::cast(*(factory->NewFixedArray(16, TENURED))));
weak_new_space_object_to_code_list()->SetLength(0);
set_code_coverage_list(undefined_value());
set_script_list(Smi::kZero);
Handle<SeededNumberDictionary> slow_element_dictionary =
......@@ -2901,6 +2903,7 @@ bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
case kWeakObjectToCodeTableRootIndex:
case kWeakNewSpaceObjectToCodeListRootIndex:
case kRetainedMapsRootIndex:
case kCodeCoverageListRootIndex:
case kNoScriptSharedFunctionInfosRootIndex:
case kWeakStackTraceListRootIndex:
case kSerializedTemplatesRootIndex:
......
......@@ -208,6 +208,8 @@ using v8::MemoryPressureLevel;
/* slots refer to the code with the reference to the weak object. */ \
V(ArrayList, weak_new_space_object_to_code_list, \
WeakNewSpaceObjectToCodeList) \
/* List to hold onto feedback vectors that we need for code coverage */ \
V(Object, code_coverage_list, CodeCoverageList) \
V(Object, weak_stack_trace_list, WeakStackTraceList) \
V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos) \
V(FixedArray, serialized_templates, SerializedTemplates) \
......
......@@ -2901,10 +2901,9 @@ Map* Isolate::get_initial_js_array_map(ElementsKind kind) {
return nullptr;
}
bool Isolate::use_crankshaft() const {
bool Isolate::use_crankshaft() {
return FLAG_opt && FLAG_crankshaft && !serializer_enabled_ &&
CpuFeatures::SupportsCrankshaft();
CpuFeatures::SupportsCrankshaft() && !IsCodeCoverageEnabled();
}
bool Isolate::NeedsSourcePositionsForProfiling() const {
......@@ -2913,6 +2912,15 @@ bool Isolate::NeedsSourcePositionsForProfiling() const {
debug_->is_active() || logger_->is_logging();
}
bool Isolate::IsCodeCoverageEnabled() {
return heap()->code_coverage_list()->IsArrayList();
}
void Isolate::SetCodeCoverageList(Object* value) {
DCHECK(value->IsUndefined(this) || value->IsArrayList());
heap()->set_code_coverage_list(value);
}
bool Isolate::IsArrayOrObjectPrototype(Object* object) {
Object* context = heap()->native_contexts_list();
while (!context->IsUndefined(this)) {
......
......@@ -960,12 +960,15 @@ class Isolate {
bool IsDead() { return has_fatal_error_; }
void SignalFatalError() { has_fatal_error_ = true; }
bool use_crankshaft() const;
bool use_crankshaft();
bool initialized_from_snapshot() { return initialized_from_snapshot_; }
bool NeedsSourcePositionsForProfiling() const;
bool IsCodeCoverageEnabled();
void SetCodeCoverageList(Object* value);
double time_millis_since_init() {
return heap_.MonotonicallyIncreasingTimeInMs() - time_millis_at_init_;
}
......
......@@ -10050,6 +10050,12 @@ Handle<ArrayList> ArrayList::Add(Handle<ArrayList> array, Handle<Object> obj1,
return array;
}
Handle<ArrayList> ArrayList::New(Isolate* isolate, int size) {
Handle<ArrayList> result = Handle<ArrayList>::cast(
isolate->factory()->NewFixedArray(size + kFirstIndex));
result->SetLength(0);
return result;
}
bool ArrayList::IsFull() {
int capacity = length();
......
......@@ -3029,6 +3029,7 @@ class ArrayList : public FixedArray {
AddMode mode = kNone);
static Handle<ArrayList> Add(Handle<ArrayList> array, Handle<Object> obj1,
Handle<Object> obj2, AddMode = kNone);
static Handle<ArrayList> New(Isolate* isolate, int size);
inline int Length();
inline void SetLength(int length);
inline Object* Get(int index);
......
......@@ -1935,5 +1935,16 @@ RUNTIME_FUNCTION(Runtime_DebugCollectCoverage) {
return *factory->NewJSArrayWithElements(scripts_array, FAST_ELEMENTS);
}
RUNTIME_FUNCTION(Runtime_DebugTogglePreciseCoverage) {
SealHandleScope shs(isolate);
CONVERT_BOOLEAN_ARG_CHECKED(enable, 0);
if (enable) {
Coverage::EnablePrecise(isolate);
} else {
Coverage::DisablePrecise(isolate);
}
return isolate->heap()->undefined_value();
}
} // namespace internal
} // namespace v8
......@@ -203,7 +203,8 @@ namespace internal {
F(DebugAsyncFunctionPromiseCreated, 1, 1) \
F(DebugIsActive, 0, 1) \
F(DebugBreakInOptimizedCode, 0, 1) \
F(DebugCollectCoverage, 0, 1)
F(DebugCollectCoverage, 0, 1) \
F(DebugTogglePreciseCoverage, 1, 1)
#define FOR_EACH_INTRINSIC_ERROR(F) F(ErrorToString, 1, 1)
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --no-always-opt
// Test precise code coverage.
function GetCoverage(source) {
var scripts = %DebugGetLoadedScripts();
for (var script of scripts) {
if (script.source == source) {
var coverage = %DebugCollectCoverage();
for (var data of coverage) {
if (data.script_id == script.id) return data.entries;
}
}
}
return undefined;
}
function ApplyCoverageToSource(source, coverage) {
var result = "";
var cursor = 0;
for (var entry of coverage) {
var chunk = source.substring(cursor, entry.end_position);
cursor = entry.end_position;
result += `[${chunk}[${entry.count}]]`;
}
return result;
}
function TestCoverage(name, source, expectation) {
source = source.trim();
eval(source);
var coverage = GetCoverage(source);
if (expectation === undefined) {
assertEquals(undefined, coverage);
} else {
expectation = expectation.trim();
var result = ApplyCoverageToSource(source, coverage);
print(result);
assertEquals(expectation, result, name + " failed");
}
}
// Without precise coverage enabled, we lose coverage data to the GC.
TestCoverage(
"call an IIFE",
`
(function f() {})();
`,
undefined // The IIFE has been garbage-collected.
);
TestCoverage(
"call locally allocated function",
`
for (var i = 0; i < 10; i++) {
let f = () => 1;
i += f();
}
`,
undefined
);
// This does not happen with precise coverage enabled.
%DebugTogglePreciseCoverage(true);
TestCoverage(
"call an IIFE",
`
(function f() {})();
`,
`
[(function f() {})();[1]]
`
);
TestCoverage(
"call locally allocated function",
`
for (var i = 0; i < 10; i++) {
let f = () => 1;
i += f();
}
`,
`
[for (var i = 0; i < 10; i++) {
let f = [1]][() => 1[5]][;
i += f();
}[1]]
`
);
%DebugTogglePreciseCoverage(false);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment