Commit c32a378f authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Reland improvements for embedder tracing

Add a path into embedder tracing on allocation. This is safe as as Blink
is not allowed to call into V8 during object construction.

This is a reland of caed2cc0.

Also relands the cleanups of ce02d86b.

Bug: chromium:843903
Change-Id: Ic89792fe68337c540a1a93629aee2e92b8774ab2
Reviewed-on: https://chromium-review.googlesource.com/c/1350992Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#57847}
parent bf6b6fe6
...@@ -5,22 +5,31 @@ ...@@ -5,22 +5,31 @@
#include "src/heap/embedder-tracing.h" #include "src/heap/embedder-tracing.h"
#include "src/base/logging.h" #include "src/base/logging.h"
#include "src/objects/embedder-data-slot.h"
#include "src/objects/js-objects-inl.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
void LocalEmbedderHeapTracer::SetRemoteTracer(EmbedderHeapTracer* tracer) {
if (remote_tracer_) remote_tracer_->isolate_ = nullptr;
remote_tracer_ = tracer;
if (remote_tracer_)
remote_tracer_->isolate_ = reinterpret_cast<v8::Isolate*>(isolate_);
}
void LocalEmbedderHeapTracer::TracePrologue() { void LocalEmbedderHeapTracer::TracePrologue() {
if (!InUse()) return; if (!InUse()) return;
CHECK(cached_wrappers_to_trace_.empty());
num_v8_marking_worklist_was_empty_ = 0; num_v8_marking_worklist_was_empty_ = 0;
embedder_worklist_empty_ = false;
remote_tracer_->TracePrologue(); remote_tracer_->TracePrologue();
} }
void LocalEmbedderHeapTracer::TraceEpilogue() { void LocalEmbedderHeapTracer::TraceEpilogue() {
if (!InUse()) return; if (!InUse()) return;
CHECK(cached_wrappers_to_trace_.empty());
remote_tracer_->TraceEpilogue(); remote_tracer_->TraceEpilogue();
} }
...@@ -36,37 +45,58 @@ void LocalEmbedderHeapTracer::EnterFinalPause() { ...@@ -36,37 +45,58 @@ void LocalEmbedderHeapTracer::EnterFinalPause() {
bool LocalEmbedderHeapTracer::Trace(double deadline) { bool LocalEmbedderHeapTracer::Trace(double deadline) {
if (!InUse()) return true; if (!InUse()) return true;
DCHECK_EQ(0, NumberOfCachedWrappersToTrace());
return remote_tracer_->AdvanceTracing(deadline); return remote_tracer_->AdvanceTracing(deadline);
} }
bool LocalEmbedderHeapTracer::IsRemoteTracingDone() { bool LocalEmbedderHeapTracer::IsRemoteTracingDone() {
return (InUse()) ? cached_wrappers_to_trace_.empty() && return !InUse() || remote_tracer_->IsTracingDone();
remote_tracer_->IsTracingDone()
: true;
} }
void LocalEmbedderHeapTracer::RegisterWrappersWithRemoteTracer() { void LocalEmbedderHeapTracer::SetEmbedderStackStateForNextFinalization(
EmbedderHeapTracer::EmbedderStackState stack_state) {
if (!InUse()) return; if (!InUse()) return;
if (cached_wrappers_to_trace_.empty()) { embedder_stack_state_ = stack_state;
return; }
}
remote_tracer_->RegisterV8References(cached_wrappers_to_trace_); LocalEmbedderHeapTracer::ProcessingScope::ProcessingScope(
cached_wrappers_to_trace_.clear(); LocalEmbedderHeapTracer* tracer)
: tracer_(tracer) {
wrapper_cache_.reserve(kWrapperCacheSize);
} }
bool LocalEmbedderHeapTracer::RequiresImmediateWrapperProcessing() { LocalEmbedderHeapTracer::ProcessingScope::~ProcessingScope() {
const size_t kTooManyWrappers = 16000; if (!wrapper_cache_.empty()) {
return cached_wrappers_to_trace_.size() > kTooManyWrappers; tracer_->remote_tracer()->RegisterV8References(std::move(wrapper_cache_));
}
} }
void LocalEmbedderHeapTracer::SetEmbedderStackStateForNextFinalization( void LocalEmbedderHeapTracer::ProcessingScope::TracePossibleWrapper(
EmbedderHeapTracer::EmbedderStackState stack_state) { JSObject* js_object) {
if (!InUse()) return; DCHECK(js_object->IsApiWrapper());
if (js_object->GetEmbedderFieldCount() < 2) return;
embedder_stack_state_ = stack_state; void* pointer0;
void* pointer1;
if (EmbedderDataSlot(js_object, 0).ToAlignedPointer(&pointer0) && pointer0 &&
EmbedderDataSlot(js_object, 1).ToAlignedPointer(&pointer1)) {
wrapper_cache_.push_back({pointer0, pointer1});
}
FlushWrapperCacheIfFull();
}
void LocalEmbedderHeapTracer::ProcessingScope::FlushWrapperCacheIfFull() {
if (wrapper_cache_.size() == wrapper_cache_.capacity()) {
tracer_->remote_tracer()->RegisterV8References(std::move(wrapper_cache_));
wrapper_cache_.clear();
wrapper_cache_.reserve(kWrapperCacheSize);
}
}
void LocalEmbedderHeapTracer::ProcessingScope::AddWrapperInfoForTesting(
WrapperInfo info) {
wrapper_cache_.push_back(info);
FlushWrapperCacheIfFull();
} }
} // namespace internal } // namespace internal
......
...@@ -13,70 +13,76 @@ namespace v8 { ...@@ -13,70 +13,76 @@ namespace v8 {
namespace internal { namespace internal {
class Heap; class Heap;
class JSObject;
class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final { class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
public: public:
typedef std::pair<void*, void*> WrapperInfo; typedef std::pair<void*, void*> WrapperInfo;
typedef std::vector<WrapperInfo> WrapperCache;
explicit LocalEmbedderHeapTracer(Isolate* isolate) : isolate_(isolate) {} class V8_EXPORT_PRIVATE ProcessingScope {
public:
explicit ProcessingScope(LocalEmbedderHeapTracer* tracer);
~ProcessingScope();
~LocalEmbedderHeapTracer() { void TracePossibleWrapper(JSObject* js_object);
if (remote_tracer_) remote_tracer_->isolate_ = nullptr;
}
EmbedderHeapTracer* remote_tracer() const { return remote_tracer_; } void AddWrapperInfoForTesting(WrapperInfo info);
void SetRemoteTracer(EmbedderHeapTracer* tracer) { private:
if (remote_tracer_) remote_tracer_->isolate_ = nullptr; static constexpr size_t kWrapperCacheSize = 1000;
void FlushWrapperCacheIfFull();
LocalEmbedderHeapTracer* const tracer_;
WrapperCache wrapper_cache_;
};
remote_tracer_ = tracer; explicit LocalEmbedderHeapTracer(Isolate* isolate) : isolate_(isolate) {}
if (remote_tracer_)
remote_tracer_->isolate_ = reinterpret_cast<v8::Isolate*>(isolate_); ~LocalEmbedderHeapTracer() {
if (remote_tracer_) remote_tracer_->isolate_ = nullptr;
} }
bool InUse() const { return remote_tracer_ != nullptr; } bool InUse() const { return remote_tracer_ != nullptr; }
EmbedderHeapTracer* remote_tracer() const { return remote_tracer_; }
void SetRemoteTracer(EmbedderHeapTracer* tracer);
void TracePrologue(); void TracePrologue();
void TraceEpilogue(); void TraceEpilogue();
void EnterFinalPause(); void EnterFinalPause();
bool Trace(double deadline); bool Trace(double deadline);
bool IsRemoteTracingDone(); bool IsRemoteTracingDone();
size_t NumberOfCachedWrappersToTrace() {
return cached_wrappers_to_trace_.size();
}
void AddWrapperToTrace(WrapperInfo entry) {
cached_wrappers_to_trace_.push_back(entry);
}
void ClearCachedWrappersToTrace() { cached_wrappers_to_trace_.clear(); }
void RegisterWrappersWithRemoteTracer();
// In order to avoid running out of memory we force tracing wrappers if there
// are too many of them.
bool RequiresImmediateWrapperProcessing();
void NotifyV8MarkingWorklistWasEmpty() { void NotifyV8MarkingWorklistWasEmpty() {
num_v8_marking_worklist_was_empty_++; num_v8_marking_worklist_was_empty_++;
} }
bool ShouldFinalizeIncrementalMarking() { bool ShouldFinalizeIncrementalMarking() {
static const size_t kMaxIncrementalFixpointRounds = 3; static const size_t kMaxIncrementalFixpointRounds = 3;
return !FLAG_incremental_marking_wrappers || !InUse() || return !FLAG_incremental_marking_wrappers || !InUse() ||
IsRemoteTracingDone() || (IsRemoteTracingDone() && embedder_worklist_empty_) ||
num_v8_marking_worklist_was_empty_ > kMaxIncrementalFixpointRounds; num_v8_marking_worklist_was_empty_ > kMaxIncrementalFixpointRounds;
} }
void SetEmbedderStackStateForNextFinalization( void SetEmbedderStackStateForNextFinalization(
EmbedderHeapTracer::EmbedderStackState stack_state); EmbedderHeapTracer::EmbedderStackState stack_state);
private: void SetEmbedderWorklistEmpty(bool is_empty) {
typedef std::vector<WrapperInfo> WrapperCache; embedder_worklist_empty_ = is_empty;
}
private:
Isolate* const isolate_; Isolate* const isolate_;
WrapperCache cached_wrappers_to_trace_;
EmbedderHeapTracer* remote_tracer_ = nullptr; EmbedderHeapTracer* remote_tracer_ = nullptr;
size_t num_v8_marking_worklist_was_empty_ = 0; size_t num_v8_marking_worklist_was_empty_ = 0;
EmbedderHeapTracer::EmbedderStackState embedder_stack_state_ = EmbedderHeapTracer::EmbedderStackState embedder_stack_state_ =
EmbedderHeapTracer::kUnknown; EmbedderHeapTracer::kUnknown;
// Indicates whether the embedder worklist was observed empty on the main
// thread. This is opportunistic as concurrent marking tasks may hold local
// segments of potential embedder fields to move to the main thread.
bool embedder_worklist_empty_ = false;
friend class EmbedderStackStateScope; friend class EmbedderStackStateScope;
}; };
......
...@@ -2964,9 +2964,6 @@ void Heap::RegisterDeserializedObjectsForBlackAllocation( ...@@ -2964,9 +2964,6 @@ void Heap::RegisterDeserializedObjectsForBlackAllocation(
} }
} }
} }
// We potentially deserialized wrappers which require registering with the
// embedder as the marker will not find them.
local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
// Large object space doesn't use reservations, so it needs custom handling. // Large object space doesn't use reservations, so it needs custom handling.
for (HeapObject* object : large_objects) { for (HeapObject* object : large_objects) {
...@@ -4546,18 +4543,6 @@ EmbedderHeapTracer* Heap::GetEmbedderHeapTracer() const { ...@@ -4546,18 +4543,6 @@ EmbedderHeapTracer* Heap::GetEmbedderHeapTracer() const {
return local_embedder_heap_tracer()->remote_tracer(); return local_embedder_heap_tracer()->remote_tracer();
} }
void Heap::TracePossibleWrapper(JSObject* js_object) {
DCHECK(js_object->IsApiWrapper());
if (js_object->GetEmbedderFieldCount() < 2) return;
void* pointer0;
void* pointer1;
if (EmbedderDataSlot(js_object, 0).ToAlignedPointer(&pointer0) && pointer0 &&
EmbedderDataSlot(js_object, 1).ToAlignedPointer(&pointer1)) {
local_embedder_heap_tracer()->AddWrapperToTrace(
std::pair<void*, void*>(pointer0, pointer1));
}
}
void Heap::RegisterExternallyReferencedObject(Address* location) { void Heap::RegisterExternallyReferencedObject(Address* location) {
// The embedder is not aware of whether numbers are materialized as heap // The embedder is not aware of whether numbers are materialized as heap
// objects are just passed around as Smis. // objects are just passed around as Smis.
......
...@@ -893,7 +893,6 @@ class Heap { ...@@ -893,7 +893,6 @@ class Heap {
void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer); void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
EmbedderHeapTracer* GetEmbedderHeapTracer() const; EmbedderHeapTracer* GetEmbedderHeapTracer() const;
void TracePossibleWrapper(JSObject* js_object);
void RegisterExternallyReferencedObject(Address* location); void RegisterExternallyReferencedObject(Address* location);
void SetEmbedderStackStateForNextFinalizaton( void SetEmbedderStackStateForNextFinalizaton(
EmbedderHeapTracer::EmbedderStackState stack_state); EmbedderHeapTracer::EmbedderStackState stack_state);
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#include "src/compilation-cache.h" #include "src/compilation-cache.h"
#include "src/conversions.h" #include "src/conversions.h"
#include "src/heap/concurrent-marking.h" #include "src/heap/concurrent-marking.h"
#include "src/heap/embedder-tracing.h"
#include "src/heap/gc-idle-time-handler.h" #include "src/heap/gc-idle-time-handler.h"
#include "src/heap/gc-tracer.h" #include "src/heap/gc-tracer.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
...@@ -815,39 +816,35 @@ intptr_t IncrementalMarking::ProcessMarkingWorklist( ...@@ -815,39 +816,35 @@ intptr_t IncrementalMarking::ProcessMarkingWorklist(
int size = VisitObject(obj->map(), obj); int size = VisitObject(obj->map(), obj);
bytes_processed += size - unscanned_bytes_of_large_object_; bytes_processed += size - unscanned_bytes_of_large_object_;
} }
// Report all found wrappers to the embedder. This is necessary as the
// embedder could potentially invalidate wrappers as soon as V8 is done
// with its incremental marking processing. Any cached wrappers could
// result in broken pointers at this point.
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
return bytes_processed; return bytes_processed;
} }
void IncrementalMarking::EmbedderStep(double duration_ms) { void IncrementalMarking::EmbedderStep(double duration_ms) {
constexpr int kObjectsToProcessBeforeInterrupt = 100; constexpr size_t kObjectsToProcessBeforeInterrupt = 500;
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_TRACING); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_TRACING);
double deadline = heap_->MonotonicallyIncreasingTimeInMs() + duration_ms;
const double deadline = bool empty_worklist;
heap_->MonotonicallyIncreasingTimeInMs() + duration_ms; do {
{
HeapObject* object; LocalEmbedderHeapTracer::ProcessingScope scope(
int cnt = 0; heap_->local_embedder_heap_tracer());
while (marking_worklist()->embedder()->Pop(0, &object)) { HeapObject* object;
heap_->TracePossibleWrapper(JSObject::cast(object)); size_t cnt = 0;
if (++cnt == kObjectsToProcessBeforeInterrupt) { empty_worklist = true;
cnt = 0; while (marking_worklist()->embedder()->Pop(0, &object)) {
if (heap_->MonotonicallyIncreasingTimeInMs() > deadline) { scope.TracePossibleWrapper(JSObject::cast(object));
break; if (++cnt == kObjectsToProcessBeforeInterrupt) {
cnt = 0;
empty_worklist = false;
break;
}
} }
} }
}
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
if (!heap_->local_embedder_heap_tracer()
->ShouldFinalizeIncrementalMarking()) {
heap_->local_embedder_heap_tracer()->Trace(deadline); heap_->local_embedder_heap_tracer()->Trace(deadline);
} } while (!empty_worklist &&
(heap_->MonotonicallyIncreasingTimeInMs() < deadline));
heap_->local_embedder_heap_tracer()->SetEmbedderWorklistEmpty(empty_worklist);
} }
void IncrementalMarking::Hurry() { void IncrementalMarking::Hurry() {
...@@ -957,6 +954,11 @@ void IncrementalMarking::Epilogue() { ...@@ -957,6 +954,11 @@ void IncrementalMarking::Epilogue() {
finalize_marking_completed_ = false; finalize_marking_completed_ = false;
} }
bool IncrementalMarking::ShouldDoEmbedderStep() {
return state_ == MARKING && FLAG_incremental_marking_wrappers &&
heap_->local_embedder_heap_tracer()->InUse();
}
double IncrementalMarking::AdvanceIncrementalMarking( double IncrementalMarking::AdvanceIncrementalMarking(
double deadline_in_ms, CompletionAction completion_action, double deadline_in_ms, CompletionAction completion_action,
StepOrigin step_origin) { StepOrigin step_origin) {
...@@ -965,27 +967,22 @@ double IncrementalMarking::AdvanceIncrementalMarking( ...@@ -965,27 +967,22 @@ double IncrementalMarking::AdvanceIncrementalMarking(
TRACE_EVENT0("v8", "V8.GCIncrementalMarking"); TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL); TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
DCHECK(!IsStopped()); DCHECK(!IsStopped());
DCHECK_EQ(
0, heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
double remaining_time_in_ms = 0.0; double remaining_time_in_ms = 0.0;
intptr_t step_size_in_bytes = GCIdleTimeHandler::EstimateMarkingStepSize(
kStepSizeInMs,
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
const bool incremental_wrapper_tracing =
state_ == MARKING && FLAG_incremental_marking_wrappers &&
heap_->local_embedder_heap_tracer()->InUse();
do { do {
if (incremental_wrapper_tracing && trace_wrappers_toggle_) { if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
EmbedderStep(kStepSizeInMs); EmbedderStep(kStepSizeInMs);
} else { } else {
const intptr_t step_size_in_bytes =
GCIdleTimeHandler::EstimateMarkingStepSize(
kStepSizeInMs,
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
Step(step_size_in_bytes, completion_action, step_origin); Step(step_size_in_bytes, completion_action, step_origin);
} }
trace_wrappers_toggle_ = !trace_wrappers_toggle_; trace_wrappers_toggle_ = !trace_wrappers_toggle_;
remaining_time_in_ms = remaining_time_in_ms =
deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs(); deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
} while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() && } while (remaining_time_in_ms > kStepSizeInMs && !IsComplete() &&
!marking_worklist()->IsEmpty()); !marking_worklist()->IsEmpty());
return remaining_time_in_ms; return remaining_time_in_ms;
} }
...@@ -1037,49 +1034,54 @@ void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() { ...@@ -1037,49 +1034,54 @@ void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
return; return;
} }
size_t bytes_to_process = HistogramTimerScope incremental_marking_scope(
StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress(); heap_->isolate()->counters()->gc_incremental_marking());
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) { TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
HistogramTimerScope incremental_marking_scope(
heap_->isolate()->counters()->gc_incremental_marking()); if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
TRACE_EVENT0("v8", "V8.GCIncrementalMarking"); EmbedderStep(kMaxStepSizeInMs);
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL); } else {
// The first step after Scavenge will see many allocated bytes. size_t bytes_to_process =
// Cap the step size to distribute the marking work more uniformly. StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize( if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) {
kMaxStepSizeInMs, // The first step after Scavenge will see many allocated bytes.
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond()); // Cap the step size to distribute the marking work more uniformly.
bytes_to_process = Min(bytes_to_process, max_step_size); size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
size_t bytes_processed = 0; kMaxStepSizeInMs,
if (FLAG_concurrent_marking) { heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD, bytes_to_process = Min(bytes_to_process, max_step_size);
StepOrigin::kV8, WorklistToProcess::kBailout); size_t bytes_processed = 0;
bytes_to_process = (bytes_processed >= bytes_to_process) if (FLAG_concurrent_marking) {
? 0 bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD,
: bytes_to_process - bytes_processed; StepOrigin::kV8, WorklistToProcess::kBailout);
size_t current_bytes_marked_concurrently = bytes_to_process = (bytes_processed >= bytes_to_process)
heap()->concurrent_marking()->TotalMarkedBytes(); ? 0
// The concurrent_marking()->TotalMarkedBytes() is not monothonic for a : bytes_to_process - bytes_processed;
// short period of time when a concurrent marking task is finishing. size_t current_bytes_marked_concurrently =
if (current_bytes_marked_concurrently > bytes_marked_concurrently_) { heap()->concurrent_marking()->TotalMarkedBytes();
bytes_marked_ahead_of_schedule_ += // The concurrent_marking()->TotalMarkedBytes() is not monothonic for a
current_bytes_marked_concurrently - bytes_marked_concurrently_; // short period of time when a concurrent marking task is finishing.
bytes_marked_concurrently_ = current_bytes_marked_concurrently; if (current_bytes_marked_concurrently > bytes_marked_concurrently_) {
bytes_marked_ahead_of_schedule_ +=
current_bytes_marked_concurrently - bytes_marked_concurrently_;
bytes_marked_concurrently_ = current_bytes_marked_concurrently;
}
} }
if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
// Steps performed in tasks and concurrently have put us ahead of
// schedule. We skip processing of marking dequeue here and thus shift
// marking time from inside V8 to standalone tasks.
bytes_marked_ahead_of_schedule_ -= bytes_to_process;
bytes_processed += bytes_to_process;
bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
}
bytes_processed += Step(bytes_to_process, GC_VIA_STACK_GUARD,
StepOrigin::kV8, WorklistToProcess::kAll);
bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
} }
if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
// Steps performed in tasks and concurrently have put us ahead of
// schedule. We skip processing of marking dequeue here and thus shift
// marking time from inside V8 to standalone tasks.
bytes_marked_ahead_of_schedule_ -= bytes_to_process;
bytes_processed += bytes_to_process;
bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
}
bytes_processed += Step(bytes_to_process, GC_VIA_STACK_GUARD,
StepOrigin::kV8, WorklistToProcess::kAll);
bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
} }
trace_wrappers_toggle_ = !trace_wrappers_toggle_;
} }
size_t IncrementalMarking::Step(size_t bytes_to_process, size_t IncrementalMarking::Step(size_t bytes_to_process,
......
...@@ -177,6 +177,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -177,6 +177,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
size_t Step(size_t bytes_to_process, CompletionAction action, size_t Step(size_t bytes_to_process, CompletionAction action,
StepOrigin step_origin, StepOrigin step_origin,
WorklistToProcess worklist_to_process = WorklistToProcess::kAll); WorklistToProcess worklist_to_process = WorklistToProcess::kAll);
bool ShouldDoEmbedderStep();
void EmbedderStep(double duration); void EmbedderStep(double duration);
inline void RestartIfNotMarking(); inline void RestartIfNotMarking();
......
...@@ -74,7 +74,8 @@ V8_INLINE int ...@@ -74,7 +74,8 @@ V8_INLINE int
MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitEmbedderTracingSubclass(Map map, T* object) { MarkingState>::VisitEmbedderTracingSubclass(Map map, T* object) {
if (heap_->local_embedder_heap_tracer()->InUse()) { if (heap_->local_embedder_heap_tracer()->InUse()) {
heap_->TracePossibleWrapper(object); marking_worklist()->embedder()->Push(MarkCompactCollectorBase::kMainThread,
object);
} }
int size = T::BodyDescriptor::SizeOf(map, object); int size = T::BodyDescriptor::SizeOf(map, object);
T::BodyDescriptor::IterateBody(map, object, size, this); T::BodyDescriptor::IterateBody(map, object, size, this);
......
...@@ -1487,6 +1487,7 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() { ...@@ -1487,6 +1487,7 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
work_to_do = work_to_do || !marking_worklist()->IsEmpty() || work_to_do = work_to_do || !marking_worklist()->IsEmpty() ||
heap()->concurrent_marking()->ephemeron_marked() || heap()->concurrent_marking()->ephemeron_marked() ||
!marking_worklist()->IsEmbedderEmpty() ||
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone(); !heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
++iterations; ++iterations;
} }
...@@ -1614,11 +1615,14 @@ void MarkCompactCollector::ProcessEphemeronsLinear() { ...@@ -1614,11 +1615,14 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
void MarkCompactCollector::PerformWrapperTracing() { void MarkCompactCollector::PerformWrapperTracing() {
if (heap_->local_embedder_heap_tracer()->InUse()) { if (heap_->local_embedder_heap_tracer()->InUse()) {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_TRACING); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_TRACING);
HeapObject* object; {
while (marking_worklist()->embedder()->Pop(kMainThread, &object)) { LocalEmbedderHeapTracer::ProcessingScope scope(
heap_->TracePossibleWrapper(JSObject::cast(object)); heap_->local_embedder_heap_tracer());
HeapObject* object;
while (marking_worklist()->embedder()->Pop(kMainThread, &object)) {
scope.TracePossibleWrapper(JSObject::cast(object));
}
} }
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
heap_->local_embedder_heap_tracer()->Trace( heap_->local_embedder_heap_tracer()->Trace(
std::numeric_limits<double>::infinity()); std::numeric_limits<double>::infinity());
} }
...@@ -1779,7 +1783,8 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -1779,7 +1783,8 @@ void MarkCompactCollector::MarkLiveObjects() {
// once. // once.
PerformWrapperTracing(); PerformWrapperTracing();
ProcessMarkingWorklist(); ProcessMarkingWorklist();
} while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone()); } while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone() ||
!marking_worklist()->IsEmbedderEmpty());
DCHECK(marking_worklist()->IsEmbedderEmpty()); DCHECK(marking_worklist()->IsEmbedderEmpty());
DCHECK(marking_worklist()->IsEmpty()); DCHECK(marking_worklist()->IsEmpty());
} }
......
...@@ -235,6 +235,8 @@ enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY }; ...@@ -235,6 +235,8 @@ enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
// Base class for minor and full MC collectors. // Base class for minor and full MC collectors.
class MarkCompactCollectorBase { class MarkCompactCollectorBase {
public: public:
static const int kMainThread = 0;
virtual ~MarkCompactCollectorBase() = default; virtual ~MarkCompactCollectorBase() = default;
virtual void SetUp() = 0; virtual void SetUp() = 0;
...@@ -245,7 +247,6 @@ class MarkCompactCollectorBase { ...@@ -245,7 +247,6 @@ class MarkCompactCollectorBase {
inline Isolate* isolate(); inline Isolate* isolate();
protected: protected:
static const int kMainThread = 0;
explicit MarkCompactCollectorBase(Heap* heap) explicit MarkCompactCollectorBase(Heap* heap)
: heap_(heap), old_to_new_slots_(0) {} : heap_(heap), old_to_new_slots_(0) {}
......
...@@ -280,10 +280,6 @@ void ScavengerCollector::CollectGarbage() { ...@@ -280,10 +280,6 @@ void ScavengerCollector::CollectGarbage() {
// Update how much has survived scavenge. // Update how much has survived scavenge.
heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedNewSpaceObjectSize()); heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedNewSpaceObjectSize());
// Scavenger may find new wrappers by iterating objects promoted onto a black
// page.
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
} }
void ScavengerCollector::HandleSurvivingNewLargeObjects() { void ScavengerCollector::HandleSurvivingNewLargeObjects() {
......
...@@ -83,6 +83,14 @@ TEST(LocalEmbedderHeapTracer, EnterFinalPauseForwards) { ...@@ -83,6 +83,14 @@ TEST(LocalEmbedderHeapTracer, EnterFinalPauseForwards) {
local_tracer.EnterFinalPause(); local_tracer.EnterFinalPause();
} }
TEST(LocalEmbedderHeapTracer, IsRemoteTracingDoneForwards) {
StrictMock<MockEmbedderHeapTracer> remote_tracer;
LocalEmbedderHeapTracer local_tracer(nullptr);
local_tracer.SetRemoteTracer(&remote_tracer);
EXPECT_CALL(remote_tracer, IsTracingDone());
local_tracer.IsRemoteTracingDone();
}
TEST(LocalEmbedderHeapTracer, EnterFinalPauseDefaultStackStateUnkown) { TEST(LocalEmbedderHeapTracer, EnterFinalPauseDefaultStackStateUnkown) {
StrictMock<MockEmbedderHeapTracer> remote_tracer; StrictMock<MockEmbedderHeapTracer> remote_tracer;
LocalEmbedderHeapTracer local_tracer(nullptr); LocalEmbedderHeapTracer local_tracer(nullptr);
...@@ -152,52 +160,19 @@ TEST(LocalEmbedderHeapTracer, IsRemoteTracingDoneIncludesRemote) { ...@@ -152,52 +160,19 @@ TEST(LocalEmbedderHeapTracer, IsRemoteTracingDoneIncludesRemote) {
local_tracer.IsRemoteTracingDone(); local_tracer.IsRemoteTracingDone();
} }
TEST(LocalEmbedderHeapTracer, NumberOfCachedWrappersToTraceExcludesRemote) { TEST(LocalEmbedderHeapTracer, RegisterV8ReferencesWithRemoteTracer) {
LocalEmbedderHeapTracer local_tracer(nullptr);
StrictMock<MockEmbedderHeapTracer> remote_tracer;
local_tracer.SetRemoteTracer(&remote_tracer);
local_tracer.NumberOfCachedWrappersToTrace();
}
TEST(LocalEmbedderHeapTracer, RegisterWrappersWithRemoteTracer) {
StrictMock<MockEmbedderHeapTracer> remote_tracer; StrictMock<MockEmbedderHeapTracer> remote_tracer;
LocalEmbedderHeapTracer local_tracer(nullptr); LocalEmbedderHeapTracer local_tracer(nullptr);
local_tracer.SetRemoteTracer(&remote_tracer); local_tracer.SetRemoteTracer(&remote_tracer);
local_tracer.AddWrapperToTrace(CreateWrapperInfo()); {
EXPECT_EQ(1u, local_tracer.NumberOfCachedWrappersToTrace()); LocalEmbedderHeapTracer::ProcessingScope scope(&local_tracer);
EXPECT_CALL(remote_tracer, RegisterV8References(_)); scope.AddWrapperInfoForTesting(CreateWrapperInfo());
local_tracer.RegisterWrappersWithRemoteTracer(); EXPECT_CALL(remote_tracer, RegisterV8References(_));
EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace()); }
EXPECT_CALL(remote_tracer, IsTracingDone()).WillOnce(Return(false)); EXPECT_CALL(remote_tracer, IsTracingDone()).WillOnce(Return(false));
EXPECT_FALSE(local_tracer.IsRemoteTracingDone()); EXPECT_FALSE(local_tracer.IsRemoteTracingDone());
} }
TEST(LocalEmbedderHeapTracer, TraceFinishes) {
StrictMock<MockEmbedderHeapTracer> remote_tracer;
LocalEmbedderHeapTracer local_tracer(nullptr);
local_tracer.SetRemoteTracer(&remote_tracer);
local_tracer.AddWrapperToTrace(CreateWrapperInfo());
EXPECT_EQ(1u, local_tracer.NumberOfCachedWrappersToTrace());
EXPECT_CALL(remote_tracer, RegisterV8References(_));
local_tracer.RegisterWrappersWithRemoteTracer();
EXPECT_CALL(remote_tracer, AdvanceTracing(_)).WillOnce(Return(true));
EXPECT_TRUE(local_tracer.Trace(std::numeric_limits<double>::infinity()));
EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace());
}
TEST(LocalEmbedderHeapTracer, TraceDoesNotFinish) {
StrictMock<MockEmbedderHeapTracer> remote_tracer;
LocalEmbedderHeapTracer local_tracer(nullptr);
local_tracer.SetRemoteTracer(&remote_tracer);
local_tracer.AddWrapperToTrace(CreateWrapperInfo());
EXPECT_EQ(1u, local_tracer.NumberOfCachedWrappersToTrace());
EXPECT_CALL(remote_tracer, RegisterV8References(_));
local_tracer.RegisterWrappersWithRemoteTracer();
EXPECT_CALL(remote_tracer, AdvanceTracing(_)).WillOnce(Return(false));
EXPECT_FALSE(local_tracer.Trace(1.0));
EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace());
}
TEST_F(LocalEmbedderHeapTracerWithIsolate, SetRemoteTracerSetsIsolate) { TEST_F(LocalEmbedderHeapTracerWithIsolate, SetRemoteTracerSetsIsolate) {
StrictMock<MockEmbedderHeapTracer> remote_tracer; StrictMock<MockEmbedderHeapTracer> remote_tracer;
LocalEmbedderHeapTracer local_tracer(isolate()); LocalEmbedderHeapTracer local_tracer(isolate());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment