Commit 94d84cc9 authored by Ross McIlroy's avatar Ross McIlroy Committed by Commit Bot

[Heap] Don't age bytecode on forced GCs.

Forced GCs are performed during memory benchmarking to record memory usage,
and as such, should not age the bytecode and cause it to be prematurely flushed.

BUG=v8:8395

Change-Id: I75c16d8fec29963e49d70738c6a399fd826f6e8b
Reviewed-on: https://chromium-review.googlesource.com/c/1393298
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58749}
parent 4a9f186b
...@@ -80,7 +80,8 @@ class ConcurrentMarkingVisitor final ...@@ -80,7 +80,8 @@ class ConcurrentMarkingVisitor final
ConcurrentMarking::MarkingWorklist* shared, ConcurrentMarking::MarkingWorklist* shared,
MemoryChunkDataMap* memory_chunk_data, WeakObjects* weak_objects, MemoryChunkDataMap* memory_chunk_data, WeakObjects* weak_objects,
ConcurrentMarking::EmbedderTracingWorklist* embedder_objects, int task_id, ConcurrentMarking::EmbedderTracingWorklist* embedder_objects, int task_id,
bool embedder_tracing_enabled, unsigned mark_compact_epoch) bool embedder_tracing_enabled, unsigned mark_compact_epoch,
bool is_forced_gc)
: shared_(shared, task_id), : shared_(shared, task_id),
weak_objects_(weak_objects), weak_objects_(weak_objects),
embedder_objects_(embedder_objects, task_id), embedder_objects_(embedder_objects, task_id),
...@@ -88,7 +89,8 @@ class ConcurrentMarkingVisitor final ...@@ -88,7 +89,8 @@ class ConcurrentMarkingVisitor final
memory_chunk_data_(memory_chunk_data), memory_chunk_data_(memory_chunk_data),
task_id_(task_id), task_id_(task_id),
embedder_tracing_enabled_(embedder_tracing_enabled), embedder_tracing_enabled_(embedder_tracing_enabled),
mark_compact_epoch_(mark_compact_epoch) {} mark_compact_epoch_(mark_compact_epoch),
is_forced_gc_(is_forced_gc) {}
template <typename T> template <typename T>
static V8_INLINE T Cast(HeapObject object) { static V8_INLINE T Cast(HeapObject object) {
...@@ -379,7 +381,9 @@ class ConcurrentMarkingVisitor final ...@@ -379,7 +381,9 @@ class ConcurrentMarkingVisitor final
int size = BytecodeArray::BodyDescriptor::SizeOf(map, object); int size = BytecodeArray::BodyDescriptor::SizeOf(map, object);
VisitMapPointer(object, object->map_slot()); VisitMapPointer(object, object->map_slot());
BytecodeArray::BodyDescriptor::IterateBody(map, object, size, this); BytecodeArray::BodyDescriptor::IterateBody(map, object, size, this);
if (!is_forced_gc_) {
object->MakeOlder(); object->MakeOlder();
}
return size; return size;
} }
...@@ -660,6 +664,7 @@ class ConcurrentMarkingVisitor final ...@@ -660,6 +664,7 @@ class ConcurrentMarkingVisitor final
SlotSnapshot slot_snapshot_; SlotSnapshot slot_snapshot_;
bool embedder_tracing_enabled_; bool embedder_tracing_enabled_;
const unsigned mark_compact_epoch_; const unsigned mark_compact_epoch_;
bool is_forced_gc_;
}; };
// Strings can change maps due to conversion to thin string or external strings. // Strings can change maps due to conversion to thin string or external strings.
...@@ -738,10 +743,10 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -738,10 +743,10 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
GCTracer::BackgroundScope::MC_BACKGROUND_MARKING); GCTracer::BackgroundScope::MC_BACKGROUND_MARKING);
size_t kBytesUntilInterruptCheck = 64 * KB; size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000; int kObjectsUntilInterrupCheck = 1000;
ConcurrentMarkingVisitor visitor(shared_, &task_state->memory_chunk_data, ConcurrentMarkingVisitor visitor(
weak_objects_, embedder_objects_, task_id, shared_, &task_state->memory_chunk_data, weak_objects_, embedder_objects_,
heap_->local_embedder_heap_tracer()->InUse(), task_id, heap_->local_embedder_heap_tracer()->InUse(),
task_state->mark_compact_epoch); task_state->mark_compact_epoch, task_state->is_forced_gc);
double time_ms; double time_ms;
size_t marked_bytes = 0; size_t marked_bytes = 0;
if (FLAG_trace_concurrent_marking) { if (FLAG_trace_concurrent_marking) {
...@@ -871,6 +876,7 @@ void ConcurrentMarking::ScheduleTasks() { ...@@ -871,6 +876,7 @@ void ConcurrentMarking::ScheduleTasks() {
task_state_[i].preemption_request = false; task_state_[i].preemption_request = false;
task_state_[i].mark_compact_epoch = task_state_[i].mark_compact_epoch =
heap_->mark_compact_collector()->epoch(); heap_->mark_compact_collector()->epoch();
task_state_[i].is_forced_gc = heap_->is_current_gc_forced();
is_pending_[i] = true; is_pending_[i] = true;
++pending_task_count_; ++pending_task_count_;
auto task = auto task =
......
...@@ -106,6 +106,7 @@ class ConcurrentMarking { ...@@ -106,6 +106,7 @@ class ConcurrentMarking {
MemoryChunkDataMap memory_chunk_data; MemoryChunkDataMap memory_chunk_data;
size_t marked_bytes = 0; size_t marked_bytes = 0;
unsigned mark_compact_epoch; unsigned mark_compact_epoch;
bool is_forced_gc;
char cache_line_padding[64]; char cache_line_padding[64];
}; };
class Task; class Task;
......
...@@ -138,6 +138,7 @@ Heap::Heap() ...@@ -138,6 +138,7 @@ Heap::Heap()
old_generation_allocation_limit_(initial_old_generation_size_), old_generation_allocation_limit_(initial_old_generation_size_),
global_pretenuring_feedback_(kInitialFeedbackCapacity), global_pretenuring_feedback_(kInitialFeedbackCapacity),
current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags), current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags),
is_current_gc_forced_(false),
external_string_table_(this) { external_string_table_(this) {
// Ensure old_generation_size_ is a multiple of kPageSize. // Ensure old_generation_size_ is a multiple of kPageSize.
DCHECK_EQ(0, max_old_generation_size_ & (Page::kPageSize - 1)); DCHECK_EQ(0, max_old_generation_size_ & (Page::kPageSize - 1));
...@@ -1263,6 +1264,7 @@ bool Heap::CollectGarbage(AllocationSpace space, ...@@ -1263,6 +1264,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
const v8::GCCallbackFlags gc_callback_flags) { const v8::GCCallbackFlags gc_callback_flags) {
const char* collector_reason = nullptr; const char* collector_reason = nullptr;
GarbageCollector collector = SelectGarbageCollector(space, &collector_reason); GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
is_current_gc_forced_ = gc_callback_flags & v8::kGCCallbackFlagForced;
if (!CanExpandOldGeneration(new_space()->Capacity())) { if (!CanExpandOldGeneration(new_space()->Capacity())) {
InvokeNearHeapLimitCallback(); InvokeNearHeapLimitCallback();
...@@ -1327,6 +1329,11 @@ bool Heap::CollectGarbage(AllocationSpace space, ...@@ -1327,6 +1329,11 @@ bool Heap::CollectGarbage(AllocationSpace space,
} }
} }
// Clear is_current_gc_forced now that the current GC is complete. Do this
// before GarbageCollectionEpilogue() since that could trigger another
// unforced GC.
is_current_gc_forced_ = false;
GarbageCollectionEpilogue(); GarbageCollectionEpilogue();
if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) { if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) {
isolate()->CheckDetachedContextsAfterGC(); isolate()->CheckDetachedContextsAfterGC();
......
...@@ -1114,6 +1114,8 @@ class Heap { ...@@ -1114,6 +1114,8 @@ class Heap {
int gc_count() const { return gc_count_; } int gc_count() const { return gc_count_; }
bool is_current_gc_forced() const { return is_current_gc_forced_; }
// Returns the size of objects residing in non-new spaces. // Returns the size of objects residing in non-new spaces.
// Excludes external memory held by those objects. // Excludes external memory held by those objects.
size_t OldGenerationSizeOfObjects(); size_t OldGenerationSizeOfObjects();
...@@ -1954,6 +1956,8 @@ class Heap { ...@@ -1954,6 +1956,8 @@ class Heap {
// the embedder and V8's GC. // the embedder and V8's GC.
GCCallbackFlags current_gc_callback_flags_; GCCallbackFlags current_gc_callback_flags_;
bool is_current_gc_forced_;
ExternalStringTable external_string_table_; ExternalStringTable external_string_table_;
base::Mutex relocation_mutex_; base::Mutex relocation_mutex_;
......
...@@ -55,7 +55,10 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, ...@@ -55,7 +55,10 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
BytecodeArray array) { BytecodeArray array) {
int size = BytecodeArray::BodyDescriptor::SizeOf(map, array); int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this); BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
if (!heap_->is_current_gc_forced()) {
array->MakeOlder(); array->MakeOlder();
}
return size; return size;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment