Commit 5bc51bb4 authored by erikcorry's avatar erikcorry Committed by Commit bot

Postpone counters triggered during GC, and use a HandleScope when calling back.

R=jkummerow@chromium.org,hpayer@chromium.org
BUG=

Review URL: https://codereview.chromium.org/1125383007

Cr-Commit-Position: refs/heads/master@{#28335}
parent 0b81f67b
...@@ -5107,6 +5107,7 @@ class V8_EXPORT Isolate { ...@@ -5107,6 +5107,7 @@ class V8_EXPORT Isolate {
kStoreBufferOverflow = 4, kStoreBufferOverflow = 4,
kSlotsBufferOverflow = 5, kSlotsBufferOverflow = 5,
kObjectObserve = 6, kObjectObserve = 6,
kForcedGC = 7,
kUseCounterFeatureCount // This enum value must be last. kUseCounterFeatureCount // This enum value must be last.
}; };
......
...@@ -395,6 +395,20 @@ void Heap::ReportStatisticsAfterGC() { ...@@ -395,6 +395,20 @@ void Heap::ReportStatisticsAfterGC() {
#else #else
if (FLAG_log_gc) new_space_.ReportStatistics(); if (FLAG_log_gc) new_space_.ReportStatistics();
#endif // DEBUG #endif // DEBUG
for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
++i) {
int count = deferred_counters_[i];
deferred_counters_[i] = 0;
while (count > 0) {
count--;
isolate()->CountUsage(static_cast<v8::Isolate::UseCounterFeature>(i));
}
}
}
void Heap::IncrementDeferredCount(v8::Isolate::UseCounterFeature feature) {
deferred_counters_[feature]++;
} }
...@@ -925,6 +939,11 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, ...@@ -925,6 +939,11 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
tracer()->Stop(collector); tracer()->Stop(collector);
} }
if (collector == MARK_COMPACTOR &&
(gc_callback_flags & kGCCallbackFlagForced) != 0) {
isolate()->CountUsage(v8::Isolate::kForcedGC);
}
// Start incremental marking for the next cycle. The heap snapshot // Start incremental marking for the next cycle. The heap snapshot
// generator needs incremental marking to stay off after it aborted. // generator needs incremental marking to stay off after it aborted.
if (!mark_compact_collector()->abort_incremental_marking() && if (!mark_compact_collector()->abort_incremental_marking() &&
...@@ -5422,6 +5441,12 @@ bool Heap::SetUp() { ...@@ -5422,6 +5441,12 @@ bool Heap::SetUp() {
} }
} }
for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
i++) {
deferred_counters_[i] = 0;
}
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
LOG(isolate_, IntPtrTEvent("heap-available", Available())); LOG(isolate_, IntPtrTEvent("heap-available", Available()));
......
...@@ -1311,6 +1311,8 @@ class Heap { ...@@ -1311,6 +1311,8 @@ class Heap {
// Returns minimal interval between two subsequent collections. // Returns minimal interval between two subsequent collections.
double get_min_in_mutator() { return min_in_mutator_; } double get_min_in_mutator() { return min_in_mutator_; }
void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
MarkCompactCollector* mark_compact_collector() { MarkCompactCollector* mark_compact_collector() {
return &mark_compact_collector_; return &mark_compact_collector_;
} }
...@@ -2047,6 +2049,8 @@ class Heap { ...@@ -2047,6 +2049,8 @@ class Heap {
// Total RegExp code ever generated // Total RegExp code ever generated
double total_regexp_code_generated_; double total_regexp_code_generated_;
int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
GCTracer tracer_; GCTracer tracer_;
// Creates and installs the full-sized number string cache. // Creates and installs the full-sized number string cache.
......
...@@ -2669,8 +2669,15 @@ void Isolate::SetUseCounterCallback(v8::Isolate::UseCounterCallback callback) { ...@@ -2669,8 +2669,15 @@ void Isolate::SetUseCounterCallback(v8::Isolate::UseCounterCallback callback) {
void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) { void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) {
if (use_counter_callback_) { // The counter callback may cause the embedder to call into V8, which is not
use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature); // generally possible during GC.
if (heap_.gc_state() == Heap::NOT_IN_GC) {
if (use_counter_callback_) {
HandleScope handle_scope(this);
use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
}
} else {
heap_.IncrementDeferredCount(feature);
} }
} }
......
...@@ -3203,6 +3203,31 @@ TEST(ReleaseOverReservedPages) { ...@@ -3203,6 +3203,31 @@ TEST(ReleaseOverReservedPages) {
CHECK_EQ(1, old_space->CountTotalPages()); CHECK_EQ(1, old_space->CountTotalPages());
} }
static int forced_gc_counter = 0;
void MockUseCounterCallback(v8::Isolate* isolate,
v8::Isolate::UseCounterFeature feature) {
isolate->GetCallingContext();
if (feature == v8::Isolate::kForcedGC) {
forced_gc_counter++;
}
}
TEST(CountForcedGC) {
i::FLAG_expose_gc = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::HandleScope scope(CcTest::isolate());
isolate->SetUseCounterCallback(MockUseCounterCallback);
forced_gc_counter = 0;
const char* source = "gc();";
CompileRun(source);
CHECK_GT(forced_gc_counter, 0);
}
TEST(Regress2237) { TEST(Regress2237) {
i::FLAG_stress_compaction = false; i::FLAG_stress_compaction = false;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment