Commit cb93a308 authored by Yang Guo's avatar Yang Guo Committed by Commit Bot

Revert "[heap] Improve embedder tracing during incremental marking"

This reverts commit caed2cc0.

Reason for revert: Breaks layout tests, e.g.

https://test-results.appspot.com/data/layout_results/V8-Blink_Linux_64__dbg_/14924/webkit_layout_tests%20%28with%20patch%29/layout-test-results/results.html

crash log for renderer (pid <unknown>):
STDOUT: <empty>
STDERR: 
STDERR: 
STDERR: #
STDERR: # Fatal error in ../../v8/src/base/platform/elapsed-timer.h, line 24
STDERR: # Debug check failed: !IsStarted().
STDERR: #
STDERR: #
STDERR: #
STDERR: #FailureMessage Object: 0x7ffc46707640#0 0x565409263b6f base::debug::StackTrace::StackTrace()
STDERR: #1 0x56540a8a32fb gin::(anonymous namespace)::PrintStackTrace()
STDERR: #2 0x56540a8980d8 V8_Fatal()
STDERR: #3 0x56540a897e35 v8::base::(anonymous namespace)::DefaultDcheckHandler()
STDERR: #4 0x565407971f02 v8::base::ElapsedTimer::Start()
STDERR: #5 0x565407d08edf v8::internal::TimedHistogram::Start()
STDERR: #6 0x565407e500d5 v8::internal::IncrementalMarking::AdvanceIncrementalMarkingOnAllocation()
STDERR: #7 0x565407e4f977 v8::internal::IncrementalMarking::Observer::Step()
STDERR: #8 0x565407e48092 v8::internal::AllocationObserver::AllocationStep()
STDERR: #9 0x565407eb0751 v8::internal::SpaceWithLinearArea::InlineAllocationStep()
STDERR: #10 0x565407eb3e44 v8::internal::NewSpace::EnsureAllocation()
STDERR: #11 0x565407e258ff v8::internal::NewSpace::AllocateRaw()
STDERR: #12 0x565407e06b2d v8::internal::Heap::AllocateRaw()
STDERR: #13 0x565407e432ef v8::internal::Heap::AllocateRawWithLightRetry()
STDERR: #14 0x565407e433cf v8::internal::Heap::AllocateRawWithRetryOrFail()
STDERR: #15 0x565407e04d48 v8::internal::Factory::NewFixedArrayWithFiller()
STDERR: #16 0x565407fd6339 v8::internal::HashTable<>::New()
STDERR: #17 0x565407fd7be8 v8::internal::HashTable<>::EnsureCapacity()
STDERR: #18 0x565407fc7e95 v8::internal::Dictionary<>::Add()
STDERR: #19 0x565407fcf453 v8::internal::BaseNameDictionary<>::Add()
STDERR: #20 0x565407f89ee4 v8::internal::LookupIterator::ApplyTransitionToDataProperty()
STDERR: #21 0x5654080036e2 v8::internal::Object::AddDataProperty()
STDERR: #22 0x56540793061f v8::internal::(anonymous namespace)::DefineDataProperty()
STDERR: #23 0x56540792da59 v8::internal::(anonymous namespace)::InstantiateObject()
STDERR: #24 0x56540792b75a v8::internal::(anonymous namespace)::InstantiateFunction()
STDERR: #25 0x56540792b4db v8::internal::ApiNatives::InstantiateFunction()
STDERR: #26 0x5654079594bf v8::FunctionTemplate::GetFunction()
STDERR: #27 0x56540a7af74e blink::V8ObjectConstructor::CreateInterfaceObject()
STDERR: #28 0x56540a7afe01 blink::V8PerContextData::ConstructorForTypeSlowCase()
STDERR: #29 0x56540a7afdd6 blink::V8PerContextData::ConstructorForTypeSlowCase()
STDERR: #30 0x56540a7afdd6 blink::V8PerContextData::ConstructorForTypeSlowCase()
STDERR: #31 0x56540a7afcb4 blink::V8PerContextData::CreateWrapperFromCacheSlowCase()
STDERR: #32 0x56540a7aef73 blink::V8DOMWrapper::CreateWrapper()
STDERR: #33 0x56540a7abf6b blink::ScriptWrappable::Wrap()
STDERR: #34 0x56540a677199 blink::V8Document::documentElementAttributeGetterCallback()
STDERR: #35 0x565407a0aec3 v8::internal::FunctionCallbackArguments::Call()
STDERR: #36 0x565407a097be v8::internal::(anonymous namespace)::HandleApiCallHelper<>()
STDERR: #37 0x565407a0877b v8::internal::Builtins::InvokeApiFunction()
STDERR: #38 0x565407fe785a v8::internal::Object::GetPropertyWithAccessor()
STDERR: #39 0x565407fe697e v8::internal::Object::GetProperty()
STDERR: #40 0x565407ec8c71 v8::internal::LoadIC::Load()
STDERR: #41 0x565407ed6401 v8::internal::__RT_impl_Runtime_LoadIC_Miss()
STDERR: #42 0x5654087593f2 <unknown>
STDERR: [16162:16185:1122/143518.356897:WARNING:crash_handler_host_linux.cc(341)] Could not translate tid, attempt = 1 retry ...


Original change's description:
> [heap] Improve embedder tracing during incremental marking
> 
> Add a path into embedder tracing on allocation. This is safe as as Blink
> is not allowed to call into V8 during object construction.
> 
> Bug: chromium:843903
> Change-Id: I5af053c3169f5a33778ebce5d7c5c43e4efb1aa4
> Reviewed-on: https://chromium-review.googlesource.com/c/1348749
> Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#57757}

TBR=ulan@chromium.org,mlippautz@chromium.org

Change-Id: Ide2c0b284b52bee17573adcc89f14be4e40dab91
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: chromium:843903
Reviewed-on: https://chromium-review.googlesource.com/c/1349189Reviewed-by: 's avatarYang Guo <yangguo@chromium.org>
Commit-Queue: Yang Guo <yangguo@chromium.org>
Cr-Commit-Position: refs/heads/master@{#57759}
parent c1b527f8
......@@ -14,7 +14,6 @@ void LocalEmbedderHeapTracer::TracePrologue() {
CHECK(cached_wrappers_to_trace_.empty());
num_v8_marking_worklist_was_empty_ = 0;
embedder_worklist_empty_ = false;
remote_tracer_->TracePrologue();
}
......
......@@ -58,18 +58,13 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
void NotifyV8MarkingWorklistWasEmpty() {
num_v8_marking_worklist_was_empty_++;
}
bool ShouldFinalizeIncrementalMarking() {
static const size_t kMaxIncrementalFixpointRounds = 3;
return !FLAG_incremental_marking_wrappers || !InUse() ||
(IsRemoteTracingDone() && embedder_worklist_empty_) ||
IsRemoteTracingDone() ||
num_v8_marking_worklist_was_empty_ > kMaxIncrementalFixpointRounds;
}
void SetEmbedderWorklistEmpty(bool empty) {
embedder_worklist_empty_ = empty;
}
void SetEmbedderStackStateForNextFinalization(
EmbedderHeapTracer::EmbedderStackState stack_state);
......@@ -83,11 +78,6 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
EmbedderHeapTracer::EmbedderStackState embedder_stack_state_ =
EmbedderHeapTracer::kUnknown;
// Indicates whether the embedder worklist was observed empty on the main
// thread. This is opportunistic as concurrent marking tasks may hold local
// segments of potential embedder fields to move to the main thread.
bool embedder_worklist_empty_ = false;
friend class EmbedderStackStateScope;
};
......
......@@ -808,27 +808,30 @@ intptr_t IncrementalMarking::ProcessMarkingWorklist(
}
void IncrementalMarking::EmbedderStep(double duration_ms) {
constexpr size_t kObjectsToProcessBeforeInterrupt = 500;
constexpr int kObjectsToProcessBeforeInterrupt = 100;
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_TRACING);
double deadline = heap_->MonotonicallyIncreasingTimeInMs() + duration_ms;
do {
HeapObject* object;
size_t cnt = 0;
bool embedder_fields_empty = true;
while (marking_worklist()->embedder()->Pop(0, &object)) {
heap_->TracePossibleWrapper(JSObject::cast(object));
if (++cnt == kObjectsToProcessBeforeInterrupt) {
cnt = 0;
embedder_fields_empty = false;
const double deadline =
heap_->MonotonicallyIncreasingTimeInMs() + duration_ms;
HeapObject* object;
int cnt = 0;
while (marking_worklist()->embedder()->Pop(0, &object)) {
heap_->TracePossibleWrapper(JSObject::cast(object));
if (++cnt == kObjectsToProcessBeforeInterrupt) {
cnt = 0;
if (heap_->MonotonicallyIncreasingTimeInMs() > deadline) {
break;
}
}
heap_->local_embedder_heap_tracer()->SetEmbedderWorklistEmpty(
embedder_fields_empty);
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
}
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
if (!heap_->local_embedder_heap_tracer()
->ShouldFinalizeIncrementalMarking()) {
heap_->local_embedder_heap_tracer()->Trace(deadline);
} while (heap_->MonotonicallyIncreasingTimeInMs() < deadline);
}
}
void IncrementalMarking::Hurry() {
......@@ -938,11 +941,6 @@ void IncrementalMarking::Epilogue() {
finalize_marking_completed_ = false;
}
bool IncrementalMarking::ShouldDoEmbedderStep() {
return state_ == MARKING && FLAG_incremental_marking_wrappers &&
heap_->local_embedder_heap_tracer()->InUse();
}
double IncrementalMarking::AdvanceIncrementalMarking(
double deadline_in_ms, CompletionAction completion_action,
StepOrigin step_origin) {
......@@ -955,20 +953,23 @@ double IncrementalMarking::AdvanceIncrementalMarking(
0, heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
double remaining_time_in_ms = 0.0;
intptr_t step_size_in_bytes = GCIdleTimeHandler::EstimateMarkingStepSize(
kStepSizeInMs,
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
const bool incremental_wrapper_tracing =
state_ == MARKING && FLAG_incremental_marking_wrappers &&
heap_->local_embedder_heap_tracer()->InUse();
do {
if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
if (incremental_wrapper_tracing && trace_wrappers_toggle_) {
EmbedderStep(kStepSizeInMs);
} else {
const intptr_t step_size_in_bytes =
GCIdleTimeHandler::EstimateMarkingStepSize(
kStepSizeInMs,
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
Step(step_size_in_bytes, completion_action, step_origin);
}
trace_wrappers_toggle_ = !trace_wrappers_toggle_;
remaining_time_in_ms =
deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
} while (remaining_time_in_ms > kStepSizeInMs && !IsComplete() &&
} while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() &&
!marking_worklist()->IsEmpty());
return remaining_time_in_ms;
}
......@@ -1020,58 +1021,49 @@ void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
return;
}
HistogramTimerScope incremental_marking_scope(
heap_->isolate()->counters()->gc_incremental_marking());
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
EmbedderStep(kMaxStepSizeInMs);
} else {
size_t bytes_to_process =
StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) {
HistogramTimerScope incremental_marking_scope(
heap_->isolate()->counters()->gc_incremental_marking());
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
// The first step after Scavenge will see many allocated bytes.
// Cap the step size to distribute the marking work more uniformly.
size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
kMaxStepSizeInMs,
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
bytes_to_process = Min(bytes_to_process, max_step_size);
size_t bytes_processed = 0;
if (FLAG_concurrent_marking) {
bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD,
StepOrigin::kV8, WorklistToProcess::kBailout);
bytes_to_process = (bytes_processed >= bytes_to_process)
? 0
: bytes_to_process - bytes_processed;
size_t current_bytes_marked_concurrently =
heap()->concurrent_marking()->TotalMarkedBytes();
// The concurrent_marking()->TotalMarkedBytes() is not monothonic for a
// short period of time when a concurrent marking task is finishing.
if (current_bytes_marked_concurrently > bytes_marked_concurrently_) {
bytes_marked_ahead_of_schedule_ +=
current_bytes_marked_concurrently - bytes_marked_concurrently_;
bytes_marked_concurrently_ = current_bytes_marked_concurrently;
}
}
if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
// Steps performed in tasks and concurrently have put us ahead of
// schedule. We skip processing of marking dequeue here and thus shift
// marking time from inside V8 to standalone tasks.
bytes_marked_ahead_of_schedule_ -= bytes_to_process;
bytes_processed += bytes_to_process;
bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
size_t bytes_to_process =
StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) {
HistogramTimerScope incremental_marking_scope(
heap_->isolate()->counters()->gc_incremental_marking());
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
// The first step after Scavenge will see many allocated bytes.
// Cap the step size to distribute the marking work more uniformly.
size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
kMaxStepSizeInMs,
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
bytes_to_process = Min(bytes_to_process, max_step_size);
size_t bytes_processed = 0;
if (FLAG_concurrent_marking) {
bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD,
StepOrigin::kV8, WorklistToProcess::kBailout);
bytes_to_process = (bytes_processed >= bytes_to_process)
? 0
: bytes_to_process - bytes_processed;
size_t current_bytes_marked_concurrently =
heap()->concurrent_marking()->TotalMarkedBytes();
// The concurrent_marking()->TotalMarkedBytes() is not monothonic for a
// short period of time when a concurrent marking task is finishing.
if (current_bytes_marked_concurrently > bytes_marked_concurrently_) {
bytes_marked_ahead_of_schedule_ +=
current_bytes_marked_concurrently - bytes_marked_concurrently_;
bytes_marked_concurrently_ = current_bytes_marked_concurrently;
}
bytes_processed += Step(bytes_to_process, GC_VIA_STACK_GUARD,
StepOrigin::kV8, WorklistToProcess::kAll);
bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
}
if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
// Steps performed in tasks and concurrently have put us ahead of
// schedule. We skip processing of marking dequeue here and thus shift
// marking time from inside V8 to standalone tasks.
bytes_marked_ahead_of_schedule_ -= bytes_to_process;
bytes_processed += bytes_to_process;
bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
}
bytes_processed += Step(bytes_to_process, GC_VIA_STACK_GUARD,
StepOrigin::kV8, WorklistToProcess::kAll);
bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
}
trace_wrappers_toggle_ = !trace_wrappers_toggle_;
}
size_t IncrementalMarking::Step(size_t bytes_to_process,
......
......@@ -177,8 +177,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
size_t Step(size_t bytes_to_process, CompletionAction action,
StepOrigin step_origin,
WorklistToProcess worklist_to_process = WorklistToProcess::kAll);
bool ShouldDoEmbedderStep();
void EmbedderStep(double duration);
inline void RestartIfNotMarking();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment