Commit 94d84cc9 authored by Ross McIlroy's avatar Ross McIlroy Committed by Commit Bot

[Heap] Don't age bytecode on forced GCs.

Forced GCs are performed during memory benchmarking to record memory usage,
and as such, should not age the bytecode and cause it to be prematurely flushed.

BUG=v8:8395

Change-Id: I75c16d8fec29963e49d70738c6a399fd826f6e8b
Reviewed-on: https://chromium-review.googlesource.com/c/1393298
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58749}
parent 4a9f186b
......@@ -80,7 +80,8 @@ class ConcurrentMarkingVisitor final
ConcurrentMarking::MarkingWorklist* shared,
MemoryChunkDataMap* memory_chunk_data, WeakObjects* weak_objects,
ConcurrentMarking::EmbedderTracingWorklist* embedder_objects, int task_id,
bool embedder_tracing_enabled, unsigned mark_compact_epoch)
bool embedder_tracing_enabled, unsigned mark_compact_epoch,
bool is_forced_gc)
: shared_(shared, task_id),
weak_objects_(weak_objects),
embedder_objects_(embedder_objects, task_id),
......@@ -88,7 +89,8 @@ class ConcurrentMarkingVisitor final
memory_chunk_data_(memory_chunk_data),
task_id_(task_id),
embedder_tracing_enabled_(embedder_tracing_enabled),
mark_compact_epoch_(mark_compact_epoch) {}
mark_compact_epoch_(mark_compact_epoch),
is_forced_gc_(is_forced_gc) {}
template <typename T>
static V8_INLINE T Cast(HeapObject object) {
......@@ -379,7 +381,9 @@ class ConcurrentMarkingVisitor final
int size = BytecodeArray::BodyDescriptor::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
BytecodeArray::BodyDescriptor::IterateBody(map, object, size, this);
object->MakeOlder();
if (!is_forced_gc_) {
object->MakeOlder();
}
return size;
}
......@@ -660,6 +664,7 @@ class ConcurrentMarkingVisitor final
SlotSnapshot slot_snapshot_;
bool embedder_tracing_enabled_;
const unsigned mark_compact_epoch_;
bool is_forced_gc_;
};
// Strings can change maps due to conversion to thin string or external strings.
......@@ -738,10 +743,10 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
GCTracer::BackgroundScope::MC_BACKGROUND_MARKING);
size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000;
ConcurrentMarkingVisitor visitor(shared_, &task_state->memory_chunk_data,
weak_objects_, embedder_objects_, task_id,
heap_->local_embedder_heap_tracer()->InUse(),
task_state->mark_compact_epoch);
ConcurrentMarkingVisitor visitor(
shared_, &task_state->memory_chunk_data, weak_objects_, embedder_objects_,
task_id, heap_->local_embedder_heap_tracer()->InUse(),
task_state->mark_compact_epoch, task_state->is_forced_gc);
double time_ms;
size_t marked_bytes = 0;
if (FLAG_trace_concurrent_marking) {
......@@ -871,6 +876,7 @@ void ConcurrentMarking::ScheduleTasks() {
task_state_[i].preemption_request = false;
task_state_[i].mark_compact_epoch =
heap_->mark_compact_collector()->epoch();
task_state_[i].is_forced_gc = heap_->is_current_gc_forced();
is_pending_[i] = true;
++pending_task_count_;
auto task =
......
......@@ -106,6 +106,7 @@ class ConcurrentMarking {
MemoryChunkDataMap memory_chunk_data;
size_t marked_bytes = 0;
unsigned mark_compact_epoch;
bool is_forced_gc;
char cache_line_padding[64];
};
class Task;
......
......@@ -138,6 +138,7 @@ Heap::Heap()
old_generation_allocation_limit_(initial_old_generation_size_),
global_pretenuring_feedback_(kInitialFeedbackCapacity),
current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags),
is_current_gc_forced_(false),
external_string_table_(this) {
// Ensure old_generation_size_ is a multiple of kPageSize.
DCHECK_EQ(0, max_old_generation_size_ & (Page::kPageSize - 1));
......@@ -1263,6 +1264,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
const v8::GCCallbackFlags gc_callback_flags) {
const char* collector_reason = nullptr;
GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
is_current_gc_forced_ = gc_callback_flags & v8::kGCCallbackFlagForced;
if (!CanExpandOldGeneration(new_space()->Capacity())) {
InvokeNearHeapLimitCallback();
......@@ -1327,6 +1329,11 @@ bool Heap::CollectGarbage(AllocationSpace space,
}
}
// Clear is_current_gc_forced now that the current GC is complete. Do this
// before GarbageCollectionEpilogue() since that could trigger another
// unforced GC.
is_current_gc_forced_ = false;
GarbageCollectionEpilogue();
if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) {
isolate()->CheckDetachedContextsAfterGC();
......
......@@ -1114,6 +1114,8 @@ class Heap {
int gc_count() const { return gc_count_; }
bool is_current_gc_forced() const { return is_current_gc_forced_; }
// Returns the size of objects residing in non-new spaces.
// Excludes external memory held by those objects.
size_t OldGenerationSizeOfObjects();
......@@ -1954,6 +1956,8 @@ class Heap {
// the embedder and V8's GC.
GCCallbackFlags current_gc_callback_flags_;
bool is_current_gc_forced_;
ExternalStringTable external_string_table_;
base::Mutex relocation_mutex_;
......
......@@ -55,7 +55,10 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
BytecodeArray array) {
int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
array->MakeOlder();
if (!heap_->is_current_gc_forced()) {
array->MakeOlder();
}
return size;
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment