Commit 7eded300 authored by Sathya Gunasekaran's avatar Sathya Gunasekaran Committed by Commit Bot

Revert "[heap] Attempt to incorporate backing store counters into heap sizing...

Revert "[heap] Attempt to incorporate backing store counters into heap sizing and GC trigger stragery."

This reverts commit ba735dde.

Reason for revert: https://ci.chromium.org/p/v8/builders/luci.v8.ci/V8%20Linux64%20TSAN/21991

Original change's description:
> [heap] Attempt to incorporate backing store counters into heap sizing and GC trigger stragery.
> 
> Bug: chromium:845409
> Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng
> Change-Id: Ic62a4339110e3dd2a6b1961a246e2bee0c07c03b
> Reviewed-on: https://chromium-review.googlesource.com/1160162
> Commit-Queue: Rodrigo Bruno <rfbpb@google.com>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#55128}

TBR=ulan@chromium.org,mlippautz@chromium.org,rfbpb@google.com

Change-Id: Iaf65227c65c11effa11662ac7d7bd7736f4d7846
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: chromium:845409
Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng
Reviewed-on: https://chromium-review.googlesource.com/1174858Reviewed-by: 's avatarSathya Gunasekaran <gsathya@chromium.org>
Commit-Queue: Sathya Gunasekaran <gsathya@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55129}
parent ba735dde
......@@ -9486,7 +9486,7 @@ class Internals {
kExternalMemoryLimitOffset + kApiInt64Size;
static const int kIsolateRootsOffset = kExternalMemoryLimitOffset +
kApiInt64Size + kApiInt64Size +
kApiInt64Size + kApiPointerSize;
kApiPointerSize + kApiPointerSize;
static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6;
......
......@@ -8693,8 +8693,7 @@ void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
heap_statistics->malloced_memory_ =
isolate->allocator()->GetCurrentMemoryUsage() +
isolate->wasm_engine()->allocator()->GetCurrentMemoryUsage();
heap_statistics->external_memory_ = isolate->heap()->external_memory() +
isolate->heap()->backing_story_bytes();
heap_statistics->external_memory_ = isolate->heap()->external_memory();
heap_statistics->peak_malloced_memory_ =
isolate->allocator()->GetMaxMemoryUsage() +
isolate->wasm_engine()->allocator()->GetMaxMemoryUsage();
......
......@@ -47,6 +47,8 @@ class ArrayBufferCollector::FreeingTask final : public CancelableTask {
};
void ArrayBufferCollector::FreeAllocationsOnBackgroundThread() {
// TODO(wez): Remove backing-store from external memory accounting.
heap_->account_external_memory_concurrently_freed();
if (!heap_->IsTearingDown() && FLAG_concurrent_array_buffer_freeing) {
V8::GetCurrentPlatform()->CallOnWorkerThread(
base::make_unique<FreeingTask>(heap_));
......
......@@ -30,6 +30,12 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
tracker->Add(buffer, length);
}
// TODO(wez): Remove backing-store from external memory accounting.
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}
void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
......@@ -43,6 +49,9 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
tracker->Remove(buffer, length);
}
// TODO(wez): Remove backing-store from external memory accounting.
heap->update_external_memory(-static_cast<intptr_t>(length));
}
Space* LocalArrayBufferTracker::space() { return page_->owner(); }
......@@ -67,6 +76,10 @@ void LocalArrayBufferTracker::Free(Callback should_free) {
if (freed_memory > 0) {
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, freed_memory);
// TODO(wez): Remove backing-store from external memory accounting.
page_->heap()->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
}
......@@ -86,6 +99,7 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
page_->IncrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);
auto ret = array_buffers_.insert(
{buffer,
{buffer->backing_store(), length, buffer->backing_store(),
......@@ -99,6 +113,7 @@ void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) {
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);
TrackingData::iterator it = array_buffers_.find(buffer);
// Check that we indeed find a key to remove.
DCHECK(it != array_buffers_.end());
......
......@@ -54,6 +54,7 @@ void LocalArrayBufferTracker::Process(Callback callback) {
tracker->Add(new_buffer, size);
}
moved_memory += it->second.length;
} else if (result == kRemoveEntry) {
freed_memory += it->second.length;
// We pass backing_store() and stored length to the collector for freeing
......@@ -66,6 +67,11 @@ void LocalArrayBufferTracker::Process(Callback callback) {
UNREACHABLE();
}
}
if (moved_memory || freed_memory) {
// TODO(wez): Remove backing-store from external memory accounting.
page_->heap()->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
array_buffers_.swap(kept_array_buffers);
......
......@@ -66,10 +66,35 @@ double MemoryController::GrowingFactor(double gc_speed, double mutator_speed,
return factor;
}
double MemoryController::MaxGrowingFactor(size_t curr_max_size) {
const double min_small_factor = 1.3;
const double max_small_factor = 2.0;
const double high_factor = 4.0;
size_t max_size_in_mb = curr_max_size / MB;
max_size_in_mb = Max(max_size_in_mb, kMinSize);
// If we are on a device with lots of memory, we allow a high heap
// growing factor.
if (max_size_in_mb >= kMaxSize) {
return high_factor;
}
DCHECK_GE(max_size_in_mb, kMinSize);
DCHECK_LT(max_size_in_mb, kMaxSize);
// On smaller devices we linearly scale the factor: (X-A)/(B-A)*(D-C)+C
double factor = (max_size_in_mb - kMinSize) *
(max_small_factor - min_small_factor) /
(kMaxSize - kMinSize) +
min_small_factor;
return factor;
}
size_t MemoryController::CalculateAllocationLimit(
size_t curr_size, size_t max_size, double max_factor, double gc_speed,
double mutator_speed, size_t new_space_capacity,
Heap::HeapGrowingMode growing_mode) {
size_t curr_size, size_t max_size, double gc_speed, double mutator_speed,
size_t new_space_capacity, Heap::HeapGrowingMode growing_mode) {
double max_factor = MaxGrowingFactor(max_size);
double factor = GrowingFactor(gc_speed, mutator_speed, max_factor);
if (FLAG_trace_gc_verbose) {
......@@ -100,7 +125,7 @@ size_t MemoryController::CalculateAllocationLimit(
MinimumAllocationLimitGrowingStep(growing_mode));
limit += new_space_capacity;
uint64_t halfway_to_the_max =
(static_cast<uint64_t>(curr_size) + static_cast<uint64_t>(max_size)) / 2;
(static_cast<uint64_t>(curr_size) + max_size) / 2;
size_t result = static_cast<size_t>(Min(limit, halfway_to_the_max));
if (FLAG_trace_gc_verbose) {
......@@ -122,30 +147,5 @@ size_t MemoryController::MinimumAllocationLimitGrowingStep(
: kRegularAllocationLimitGrowingStep);
}
double HeapController::MaxGrowingFactor(size_t curr_max_size) {
const double min_small_factor = 1.3;
const double max_small_factor = 2.0;
const double high_factor = 4.0;
size_t max_size_in_mb = curr_max_size / MB;
max_size_in_mb = Max(max_size_in_mb, kMinSize);
// If we are on a device with lots of memory, we allow a high heap
// growing factor.
if (max_size_in_mb >= kMaxSize) {
return high_factor;
}
DCHECK_GE(max_size_in_mb, kMinSize);
DCHECK_LT(max_size_in_mb, kMaxSize);
// On smaller devices we linearly scale the factor: (X-A)/(B-A)*(D-C)+C
double factor = (max_size_in_mb - kMinSize) *
(max_small_factor - min_small_factor) /
(kMaxSize - kMinSize) +
min_small_factor;
return factor;
}
} // namespace internal
} // namespace v8
......@@ -18,18 +18,20 @@ class V8_EXPORT_PRIVATE MemoryController {
MemoryController(Heap* heap, double min_growing_factor,
double max_growing_factor,
double conservative_growing_factor,
double target_mutator_utilization)
double target_mutator_utilization, size_t min_size,
size_t max_size)
: heap_(heap),
kMinGrowingFactor(min_growing_factor),
kMaxGrowingFactor(max_growing_factor),
kConservativeGrowingFactor(conservative_growing_factor),
kTargetMutatorUtilization(target_mutator_utilization) {}
kTargetMutatorUtilization(target_mutator_utilization),
kMinSize(min_size),
kMaxSize(max_size) {}
virtual ~MemoryController() {}
// Computes the allocation limit to trigger the next garbage collection.
size_t CalculateAllocationLimit(size_t curr_size, size_t max_size,
double max_factor, double gc_speed,
double mutator_speed,
double gc_speed, double mutator_speed,
size_t new_space_capacity,
Heap::HeapGrowingMode growing_mode);
......@@ -39,6 +41,7 @@ class V8_EXPORT_PRIVATE MemoryController {
protected:
double GrowingFactor(double gc_speed, double mutator_speed,
double max_factor);
double MaxGrowingFactor(size_t curr_max_size);
virtual const char* ControllerName() = 0;
Heap* const heap_;
......@@ -47,6 +50,9 @@ class V8_EXPORT_PRIVATE MemoryController {
const double kMaxGrowingFactor;
const double kConservativeGrowingFactor;
const double kTargetMutatorUtilization;
// Sizes are in MB.
const size_t kMinSize;
const size_t kMaxSize;
FRIEND_TEST(HeapControllerTest, HeapGrowingFactor);
FRIEND_TEST(HeapControllerTest, MaxHeapGrowingFactor);
......@@ -57,26 +63,17 @@ class V8_EXPORT_PRIVATE MemoryController {
class HeapController : public MemoryController {
public:
explicit HeapController(Heap* heap)
: MemoryController(heap, 1.1, 4.0, 1.3, 0.97) {}
V8_EXPORT_PRIVATE double MaxGrowingFactor(size_t curr_max_size);
: MemoryController(heap, 1.1, 4.0, 1.3, 0.97, kMinHeapSize,
kMaxHeapSize) {}
// Sizes are in MB.
static const size_t kMinSize = 128 * Heap::kPointerMultiplier;
static const size_t kMaxSize = 1024 * Heap::kPointerMultiplier;
static const size_t kMinHeapSize = 128 * Heap::kPointerMultiplier;
static const size_t kMaxHeapSize = 1024 * Heap::kPointerMultiplier;
protected:
const char* ControllerName() { return "HeapController"; }
};
class GlobalMemoryController : public MemoryController {
public:
explicit GlobalMemoryController(Heap* heap)
: MemoryController(heap, 1.1, 4.0, 1.3, 0.97) {}
protected:
const char* ControllerName() { return "GlobalMemoryController"; }
};
} // namespace internal
} // namespace v8
......
......@@ -138,7 +138,7 @@ Heap::Heap()
: external_memory_(0),
external_memory_limit_(kExternalAllocationSoftLimit),
external_memory_at_last_mark_compact_(0),
backing_store_bytes_(0),
external_memory_concurrently_freed_(0),
isolate_(nullptr),
code_range_size_(0),
// semispace_size_ should be a power of 2 and old_generation_size_ should
......@@ -146,9 +146,6 @@ Heap::Heap()
max_semi_space_size_(8 * (kPointerSize / 4) * MB),
initial_semispace_size_(kMinSemiSpaceSizeInKB * KB),
max_old_generation_size_(700ul * (kPointerSize / 4) * MB),
max_global_memory_size_(
Min(static_cast<uint64_t>(std::numeric_limits<size_t>::max()),
static_cast<uint64_t>(16) * GB)),
initial_max_old_generation_size_(max_old_generation_size_),
initial_old_generation_size_(max_old_generation_size_ /
kInitalOldGenerationLimitFactor),
......@@ -187,7 +184,6 @@ Heap::Heap()
mmap_region_base_(0),
remembered_unmapped_pages_index_(0),
old_generation_allocation_limit_(initial_old_generation_size_),
global_memory_allocation_limit_(initial_old_generation_size_),
inline_allocation_disabled_(false),
tracer_(nullptr),
promoted_objects_size_(0),
......@@ -263,8 +259,8 @@ size_t Heap::ComputeMaxOldGenerationSize(uint64_t physical_memory) {
size_t computed_size = static_cast<size_t>(physical_memory / i::MB /
old_space_physical_memory_factor *
kPointerMultiplier);
return Max(Min(computed_size, HeapController::kMaxSize),
HeapController::kMinSize);
return Max(Min(computed_size, HeapController::kMaxHeapSize),
HeapController::kMinHeapSize);
}
size_t Heap::Capacity() {
......@@ -481,8 +477,6 @@ void Heap::PrintShortHeapStatistics() {
CommittedMemoryOfHeapAndUnmapper() / KB);
PrintIsolate(isolate_, "External memory reported: %6" PRId64 " KB\n",
external_memory_ / KB);
PrintIsolate(isolate_, "Backing store memory reported: %6" PRId64 " KB\n",
backing_store_bytes_ / KB);
PrintIsolate(isolate_, "External memory global %zu KB\n",
external_memory_callback_() / KB);
PrintIsolate(isolate_, "Total time spent in GC : %.1f ms\n",
......@@ -1469,11 +1463,8 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
if (reached_limit == IncrementalMarkingLimit::kSoftLimit) {
incremental_marking()->incremental_marking_job()->ScheduleTask(this);
} else if (reached_limit == IncrementalMarkingLimit::kHardLimit) {
StartIncrementalMarking(
gc_flags,
OldGenerationSpaceAvailable() <= new_space_->Capacity()
? GarbageCollectionReason::kAllocationLimit
: GarbageCollectionReason::kGlobalAllocationLimit,
StartIncrementalMarking(gc_flags,
GarbageCollectionReason::kAllocationLimit,
gc_callback_flags);
}
}
......@@ -1755,7 +1746,7 @@ bool Heap::PerformGarbageCollection(
}
UpdateSurvivalStatistics(static_cast<int>(start_new_space_size));
ConfigureInitialAllocationLimits();
ConfigureInitialOldGenerationSize();
if (collector != MARK_COMPACTOR) {
// Objects that died in the new space might have been accounted
......@@ -1788,32 +1779,27 @@ bool Heap::PerformGarbageCollection(
double gc_speed = tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond();
double mutator_speed =
tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond();
double max_factor =
heap_controller()->MaxGrowingFactor(max_old_generation_size_);
size_t old_gen_size = OldGenerationSizeOfObjects();
size_t new_old_limit = heap_controller()->CalculateAllocationLimit(
old_gen_size, max_old_generation_size_, max_factor, gc_speed,
mutator_speed, new_space()->Capacity(), CurrentHeapGrowingMode());
size_t new_global_limit = global_controller()->CalculateAllocationLimit(
backing_store_bytes_ + old_gen_size, max_global_memory_size_, max_factor,
gc_speed, mutator_speed, new_space()->Capacity(),
CurrentHeapGrowingMode());
if (collector == MARK_COMPACTOR) {
// Register the amount of external allocated memory.
external_memory_at_last_mark_compact_ = external_memory_;
external_memory_limit_ = external_memory_ + kExternalAllocationSoftLimit;
set_allocation_limits(new_old_limit, new_global_limit);
size_t new_limit = heap_controller()->CalculateAllocationLimit(
old_gen_size, max_old_generation_size_, gc_speed, mutator_speed,
new_space()->Capacity(), CurrentHeapGrowingMode());
old_generation_allocation_limit_ = new_limit;
CheckIneffectiveMarkCompact(
old_gen_size, tracer()->AverageMarkCompactMutatorUtilization());
} else if (HasLowYoungGenerationAllocationRate() &&
old_generation_size_configured_) {
set_allocation_limits(
Min(old_generation_allocation_limit_, new_old_limit),
Min(global_memory_allocation_limit_, new_global_limit));
size_t new_limit = heap_controller()->CalculateAllocationLimit(
old_gen_size, max_old_generation_size_, gc_speed, mutator_speed,
new_space()->Capacity(), CurrentHeapGrowingMode());
if (new_limit < old_generation_allocation_limit_) {
old_generation_allocation_limit_ = new_limit;
}
}
{
......@@ -2639,20 +2625,14 @@ void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
ArrayBufferTracker::Unregister(this, buffer);
}
size_t Heap::ConfigureInitialControllerSize(MemoryController* controller,
size_t curr_limit) {
return Max(
controller->MinimumAllocationLimitGrowingStep(CurrentHeapGrowingMode()),
static_cast<size_t>(static_cast<double>(curr_limit) *
(tracer()->AverageSurvivalRatio() / 100)));
}
void Heap::ConfigureInitialAllocationLimits() {
void Heap::ConfigureInitialOldGenerationSize() {
if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
size_t new_old_limit = ConfigureInitialControllerSize(
heap_controller_, old_generation_allocation_limit_);
size_t new_global_limit = ConfigureInitialControllerSize(
global_controller_, global_memory_allocation_limit_);
set_allocation_limits(new_old_limit, new_global_limit);
old_generation_allocation_limit_ =
Max(heap_controller()->MinimumAllocationLimitGrowingStep(
CurrentHeapGrowingMode()),
static_cast<size_t>(
static_cast<double>(old_generation_allocation_limit_) *
(tracer()->AverageSurvivalRatio() / 100)));
}
}
......@@ -3602,8 +3582,8 @@ void Heap::CollectGarbageOnMemoryPressure() {
double end = MonotonicallyIncreasingTimeInMs();
// Estimate how much memory we can free.
int64_t potential_garbage = (CommittedMemory() - SizeOfObjects()) +
external_memory_ + backing_store_bytes_;
int64_t potential_garbage =
(CommittedMemory() - SizeOfObjects()) + external_memory_;
// If we can potentially free large amount of memory, then start GC right
// away instead of waiting for memory reducer.
if (potential_garbage >= kGarbageThresholdInBytes &&
......@@ -3762,8 +3742,6 @@ const char* Heap::GarbageCollectionReasonToString(
return "testing";
case GarbageCollectionReason::kExternalFinalize:
return "external finalize";
case GarbageCollectionReason::kGlobalAllocationLimit:
return "global allocation limit";
case GarbageCollectionReason::kUnknown:
return "unknown";
}
......@@ -4560,10 +4538,8 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
}
size_t old_generation_space_available = OldGenerationSpaceAvailable();
size_t global_available = GlobalMemorySpaceAvailable();
if (old_generation_space_available > new_space_->Capacity() &&
global_available > 0) {
if (old_generation_space_available > new_space_->Capacity()) {
return IncrementalMarkingLimit::kNoLimit;
}
if (ShouldOptimizeForMemoryUsage()) {
......@@ -4729,7 +4705,6 @@ void Heap::SetUp() {
store_buffer_ = new StoreBuffer(this);
heap_controller_ = new HeapController(this);
global_controller_ = new GlobalMemoryController(this);
mark_compact_collector_ = new MarkCompactCollector(this);
incremental_marking_ =
......@@ -4973,11 +4948,6 @@ void Heap::TearDown() {
heap_controller_ = nullptr;
}
if (global_controller_ != nullptr) {
delete global_controller_;
global_controller_ = nullptr;
}
if (mark_compact_collector_ != nullptr) {
mark_compact_collector_->TearDown();
delete mark_compact_collector_;
......
......@@ -173,7 +173,6 @@ class GCIdleTimeHandler;
class GCIdleTimeHeapState;
class GCTracer;
class HeapController;
class GlobalMemoryController;
class HeapObjectAllocationTracker;
class HeapObjectsFilter;
class HeapStats;
......@@ -181,7 +180,6 @@ class HistogramTimer;
class Isolate;
class LocalEmbedderHeapTracer;
class MemoryAllocator;
class MemoryController;
class MemoryReducer;
class MinorMarkCompactCollector;
class ObjectIterator;
......@@ -237,8 +235,7 @@ enum class GarbageCollectionReason {
kSamplingProfiler = 19,
kSnapshotCreator = 20,
kTesting = 21,
kExternalFinalize = 22,
kGlobalAllocationLimit = 23
kExternalFinalize = 22
// If you add new items here, then update the incremental_marking_reason,
// mark_compact_reason, and scavenge_reason counters in counters.h.
// Also update src/tools/metrics/histograms/histograms.xml in chromium.
......@@ -674,10 +671,15 @@ class Heap {
int64_t external_memory_hard_limit() { return MaxOldGenerationSize() / 2; }
int64_t external_memory() { return external_memory_; }
int64_t backing_story_bytes() const { return backing_store_bytes_; }
void update_external_memory(int64_t delta) { external_memory_ += delta; }
void update_backing_store_bytes(int64_t amount) {
backing_store_bytes_ += amount;
void update_external_memory_concurrently_freed(intptr_t freed) {
external_memory_concurrently_freed_ += freed;
}
void account_external_memory_concurrently_freed() {
external_memory_ -= external_memory_concurrently_freed_;
external_memory_concurrently_freed_ = 0;
}
void ProcessMovedExternalString(Page* old_page, Page* new_page,
......@@ -1689,9 +1691,7 @@ class Heap {
// Flush the number to string cache.
void FlushNumberStringCache();
size_t ConfigureInitialControllerSize(MemoryController* controller,
size_t curr_limit);
void ConfigureInitialAllocationLimits();
void ConfigureInitialOldGenerationSize();
bool HasLowYoungGenerationAllocationRate();
bool HasLowOldGenerationAllocationRate();
......@@ -1806,13 +1806,6 @@ class Heap {
OldGenerationObjectsAndPromotedExternalMemorySize());
}
inline size_t GlobalMemorySpaceAvailable() {
size_t global_used = OldGenerationSizeOfObjects() + backing_store_bytes_;
return global_used > global_memory_allocation_limit_
? 0
: global_memory_allocation_limit_ - global_used;
}
// We allow incremental marking to overshoot the allocation limit for
// performace reasons. If the overshoot is too large then we are more
// eager to finalize incremental marking.
......@@ -1847,7 +1840,6 @@ class Heap {
// ===========================================================================
HeapController* heap_controller() { return heap_controller_; }
GlobalMemoryController* global_controller() { return global_controller_; }
MemoryReducer* memory_reducer() { return memory_reducer_; }
// For some webpages RAIL mode does not switch from PERFORMANCE_LOAD.
......@@ -1858,12 +1850,6 @@ class Heap {
bool ShouldOptimizeForLoadTime();
void set_allocation_limits(size_t old_generation_allocation_limit,
size_t global_memory_allocation_limit) {
old_generation_allocation_limit_ = old_generation_allocation_limit;
global_memory_allocation_limit_ = global_memory_allocation_limit;
}
size_t old_generation_allocation_limit() const {
return old_generation_allocation_limit_;
}
......@@ -1978,8 +1964,8 @@ class Heap {
// Caches the amount of external memory registered at the last MC.
int64_t external_memory_at_last_mark_compact_;
// Backing store bytes (array buffers and external strings).
std::atomic<int64_t> backing_store_bytes_;
// The amount of memory that has been freed concurrently.
std::atomic<intptr_t> external_memory_concurrently_freed_;
// This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods.
......@@ -2012,7 +1998,6 @@ class Heap {
size_t max_semi_space_size_;
size_t initial_semispace_size_;
size_t max_old_generation_size_;
size_t max_global_memory_size_;
size_t initial_max_old_generation_size_;
size_t initial_old_generation_size_;
bool old_generation_size_configured_;
......@@ -2113,10 +2098,6 @@ class Heap {
// generation and on every allocation in large object space.
size_t old_generation_allocation_limit_;
// The limit when to trigger memory pressure. This limit accounts for JS
// memory and external memory (array buffers and external strings).
size_t global_memory_allocation_limit_;
// Indicates that inline bump-pointer allocation has been globally disabled
// for all spaces. This is used to disable allocations in generated code.
bool inline_allocation_disabled_;
......@@ -2170,7 +2151,6 @@ class Heap {
StoreBuffer* store_buffer_;
HeapController* heap_controller_;
GlobalMemoryController* global_controller_;
IncrementalMarking* incremental_marking_;
ConcurrentMarking* concurrent_marking_;
......
......@@ -803,6 +803,7 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) {
space->PrepareForMarkCompact();
}
heap()->account_external_memory_concurrently_freed();
#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
......@@ -3848,6 +3849,8 @@ void MinorMarkCompactCollector::CollectGarbage() {
RememberedSet<OLD_TO_NEW>::PreFreeEmptyBuckets(chunk);
}
});
heap()->account_external_memory_concurrently_freed();
}
void MinorMarkCompactCollector::MakeIterable(
......
......@@ -1003,13 +1003,11 @@ class Space : public Malloced {
void IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
size_t amount) {
external_backing_store_bytes_[type] += amount;
heap()->update_backing_store_bytes(static_cast<int64_t>(amount));
}
void DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
size_t amount) {
DCHECK_GE(external_backing_store_bytes_[type], amount);
external_backing_store_bytes_[type] -= amount;
heap()->update_backing_store_bytes(-static_cast<int64_t>(amount));
}
V8_EXPORT_PRIVATE void* GetRandomMmapAddr();
......
......@@ -51,14 +51,15 @@ TEST_F(HeapControllerTest, HeapGrowingFactor) {
TEST_F(HeapControllerTest, MaxHeapGrowingFactor) {
HeapController heap_controller(i_isolate()->heap());
CheckEqualRounded(
1.3, heap_controller.MaxGrowingFactor(HeapController::kMinSize * MB));
1.3, heap_controller.MaxGrowingFactor(heap_controller.kMinSize * MB));
CheckEqualRounded(1.600, heap_controller.MaxGrowingFactor(
HeapController::kMaxSize / 2 * MB));
heap_controller.kMaxSize / 2 * MB));
CheckEqualRounded(
1.999, heap_controller.MaxGrowingFactor(
(HeapController::kMaxSize - Heap::kPointerMultiplier) * MB));
CheckEqualRounded(
4.0, heap_controller.MaxGrowingFactor(HeapController::kMaxSize * MB));
(heap_controller.kMaxSize - Heap::kPointerMultiplier) * MB));
CheckEqualRounded(4.0,
heap_controller.MaxGrowingFactor(
static_cast<size_t>(heap_controller.kMaxSize) * MB));
}
TEST_F(HeapControllerTest, OldGenerationAllocationLimit) {
......@@ -74,43 +75,39 @@ TEST_F(HeapControllerTest, OldGenerationAllocationLimit) {
double factor =
heap_controller.GrowingFactor(gc_speed, mutator_speed, max_factor);
EXPECT_EQ(
static_cast<size_t>(old_gen_size * factor + new_space_capacity),
EXPECT_EQ(static_cast<size_t>(old_gen_size * factor + new_space_capacity),
heap->heap_controller()->CalculateAllocationLimit(
old_gen_size, max_old_generation_size, max_factor, gc_speed,
mutator_speed, new_space_capacity, Heap::HeapGrowingMode::kDefault));
old_gen_size, max_old_generation_size, gc_speed, mutator_speed,
new_space_capacity, Heap::HeapGrowingMode::kDefault));
factor = Min(factor, heap_controller.kConservativeGrowingFactor);
EXPECT_EQ(
static_cast<size_t>(old_gen_size * factor + new_space_capacity),
EXPECT_EQ(static_cast<size_t>(old_gen_size * factor + new_space_capacity),
heap->heap_controller()->CalculateAllocationLimit(
old_gen_size, max_old_generation_size, max_factor, gc_speed,
mutator_speed, new_space_capacity, Heap::HeapGrowingMode::kSlow));
old_gen_size, max_old_generation_size, gc_speed, mutator_speed,
new_space_capacity, Heap::HeapGrowingMode::kSlow));
factor = Min(factor, heap_controller.kConservativeGrowingFactor);
EXPECT_EQ(static_cast<size_t>(old_gen_size * factor + new_space_capacity),
heap->heap_controller()->CalculateAllocationLimit(
old_gen_size, max_old_generation_size, max_factor, gc_speed,
mutator_speed, new_space_capacity,
Heap::HeapGrowingMode::kConservative));
old_gen_size, max_old_generation_size, gc_speed, mutator_speed,
new_space_capacity, Heap::HeapGrowingMode::kConservative));
factor = heap_controller.kMinGrowingFactor;
EXPECT_EQ(
static_cast<size_t>(old_gen_size * factor + new_space_capacity),
EXPECT_EQ(static_cast<size_t>(old_gen_size * factor + new_space_capacity),
heap->heap_controller()->CalculateAllocationLimit(
old_gen_size, max_old_generation_size, max_factor, gc_speed,
mutator_speed, new_space_capacity, Heap::HeapGrowingMode::kMinimal));
old_gen_size, max_old_generation_size, gc_speed, mutator_speed,
new_space_capacity, Heap::HeapGrowingMode::kMinimal));
}
TEST_F(HeapControllerTest, MaxOldGenerationSize) {
HeapController heap_controller(i_isolate()->heap());
uint64_t configurations[][2] = {
{0, HeapController::kMinSize},
{512, HeapController::kMinSize},
{0, heap_controller.kMinSize},
{512, heap_controller.kMinSize},
{1 * GB, 256 * Heap::kPointerMultiplier},
{2 * static_cast<uint64_t>(GB), 512 * Heap::kPointerMultiplier},
{4 * static_cast<uint64_t>(GB), HeapController::kMaxSize},
{8 * static_cast<uint64_t>(GB), HeapController::kMaxSize}};
{4 * static_cast<uint64_t>(GB), heap_controller.kMaxSize},
{8 * static_cast<uint64_t>(GB), heap_controller.kMaxSize}};
for (auto configuration : configurations) {
ASSERT_EQ(configuration[1],
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment