embedder-tracing.cc 4 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/heap/embedder-tracing.h"

#include "src/base/logging.h"
8
#include "src/heap/gc-tracer.h"
9 10
#include "src/objects/embedder-data-slot.h"
#include "src/objects/js-objects-inl.h"
11 12 13 14

namespace v8 {
namespace internal {

15 16 17 18 19 20 21 22
void LocalEmbedderHeapTracer::SetRemoteTracer(EmbedderHeapTracer* tracer) {
  if (remote_tracer_) remote_tracer_->isolate_ = nullptr;

  remote_tracer_ = tracer;
  if (remote_tracer_)
    remote_tracer_->isolate_ = reinterpret_cast<v8::Isolate*>(isolate_);
}

23 24
void LocalEmbedderHeapTracer::TracePrologue(
    EmbedderHeapTracer::TraceFlags flags) {
25 26
  if (!InUse()) return;

27
  num_v8_marking_worklist_was_empty_ = 0;
28
  embedder_worklist_empty_ = false;
29
  remote_tracer_->TracePrologue(flags);
30 31 32 33 34
}

void LocalEmbedderHeapTracer::TraceEpilogue() {
  if (!InUse()) return;

35 36
  EmbedderHeapTracer::TraceSummary summary;
  remote_tracer_->TraceEpilogue(&summary);
37
  remote_stats_.used_size = summary.allocated_size;
38 39 40 41 42 43 44 45
  // Force a check next time increased memory is reported. This allows for
  // setting limits close to actual heap sizes.
  remote_stats_.allocated_size_limit_for_check = 0;
  constexpr double kMinReportingTimeMs = 0.5;
  if (summary.time > kMinReportingTimeMs) {
    isolate_->heap()->tracer()->RecordEmbedderSpeed(summary.allocated_size,
                                                    summary.time);
  }
46 47 48 49 50
}

void LocalEmbedderHeapTracer::EnterFinalPause() {
  if (!InUse()) return;

51 52 53 54
  remote_tracer_->EnterFinalPause(embedder_stack_state_);
  // Resetting to state unknown as there may be follow up garbage collections
  // triggered from callbacks that have a different stack state.
  embedder_stack_state_ = EmbedderHeapTracer::kUnknown;
55 56
}

57 58
bool LocalEmbedderHeapTracer::Trace(double deadline) {
  if (!InUse()) return true;
59

60
  return remote_tracer_->AdvanceTracing(deadline);
61 62
}

63
bool LocalEmbedderHeapTracer::IsRemoteTracingDone() {
64
  return !InUse() || remote_tracer_->IsTracingDone();
65 66
}

67 68
void LocalEmbedderHeapTracer::SetEmbedderStackStateForNextFinalization(
    EmbedderHeapTracer::EmbedderStackState stack_state) {
69 70
  if (!InUse()) return;

71 72
  embedder_stack_state_ = stack_state;
}
73

74 75 76 77
LocalEmbedderHeapTracer::ProcessingScope::ProcessingScope(
    LocalEmbedderHeapTracer* tracer)
    : tracer_(tracer) {
  wrapper_cache_.reserve(kWrapperCacheSize);
78 79
}

80 81 82 83
LocalEmbedderHeapTracer::ProcessingScope::~ProcessingScope() {
  if (!wrapper_cache_.empty()) {
    tracer_->remote_tracer()->RegisterV8References(std::move(wrapper_cache_));
  }
84 85
}

86
void LocalEmbedderHeapTracer::ProcessingScope::TracePossibleWrapper(
87
    JSObject js_object) {
88 89
  DCHECK(js_object.IsApiWrapper());
  if (js_object.GetEmbedderFieldCount() < 2) return;
90

91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111
  void* pointer0;
  void* pointer1;
  if (EmbedderDataSlot(js_object, 0).ToAlignedPointer(&pointer0) && pointer0 &&
      EmbedderDataSlot(js_object, 1).ToAlignedPointer(&pointer1)) {
    wrapper_cache_.push_back({pointer0, pointer1});
  }
  FlushWrapperCacheIfFull();
}

void LocalEmbedderHeapTracer::ProcessingScope::FlushWrapperCacheIfFull() {
  if (wrapper_cache_.size() == wrapper_cache_.capacity()) {
    tracer_->remote_tracer()->RegisterV8References(std::move(wrapper_cache_));
    wrapper_cache_.clear();
    wrapper_cache_.reserve(kWrapperCacheSize);
  }
}

void LocalEmbedderHeapTracer::ProcessingScope::AddWrapperInfoForTesting(
    WrapperInfo info) {
  wrapper_cache_.push_back(info);
  FlushWrapperCacheIfFull();
112 113
}

114
void LocalEmbedderHeapTracer::StartIncrementalMarkingIfNeeded() {
115
  if (!FLAG_global_gc_scheduling || !FLAG_incremental_marking) return;
116

117 118 119 120
  Heap* heap = isolate_->heap();
  heap->StartIncrementalMarkingIfAllocationLimitIsReached(
      heap->GCFlagsForIncrementalMarking(),
      kGCCallbackScheduleIdleGarbageCollection);
121 122 123 124
  if (heap->AllocationLimitOvershotByLargeMargin()) {
    heap->FinalizeIncrementalMarkingAtomically(
        i::GarbageCollectionReason::kExternalFinalize);
  }
125 126
}

127 128
}  // namespace internal
}  // namespace v8