Commit f2823886 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Clean up Heap::ConfigureHeap

This re-arranges the implementation of the function to make it more
consistent. The only functional change is replacement of RoundUp with
RoundDown, which makes more sense for the limits.

Bug: v8:9306
Change-Id: Id1d4bc6cc414e3618c3878de8cb87a9ed59711f5
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1643432Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61997}
parent 3fcbfdaa
......@@ -987,22 +987,6 @@ void ResourceConstraints::ConfigureDefaults(uint64_t physical_memory,
}
}
void SetResourceConstraints(i::Isolate* isolate,
const ResourceConstraints& constraints) {
size_t semi_space_size = constraints.max_semi_space_size_in_kb();
size_t old_space_size = constraints.max_old_space_size();
size_t code_range_size = constraints.code_range_size();
if (semi_space_size != 0 || old_space_size != 0 || code_range_size != 0) {
isolate->heap()->ConfigureHeap(semi_space_size, old_space_size,
code_range_size);
}
if (constraints.stack_limit() != nullptr) {
uintptr_t limit = reinterpret_cast<uintptr_t>(constraints.stack_limit());
isolate->stack_guard()->SetStackLimit(limit);
}
}
i::Address* V8::GlobalizeReference(i::Isolate* isolate, i::Address* obj) {
LOG_API(isolate, Persistent, New);
i::Handle<i::Object> result = isolate->global_handles()->Create(*obj);
......@@ -7820,7 +7804,12 @@ void Isolate::Initialize(Isolate* isolate,
i_isolate->set_api_external_references(params.external_references);
i_isolate->set_allow_atomics_wait(params.allow_atomics_wait);
SetResourceConstraints(i_isolate, params.constraints);
i_isolate->heap()->ConfigureHeap(params.constraints);
if (params.constraints.stack_limit() != nullptr) {
uintptr_t limit =
reinterpret_cast<uintptr_t>(params.constraints.stack_limit());
i_isolate->stack_guard()->SetStackLimit(limit);
}
// TODO(jochen): Once we got rid of Isolate::Current(), we can remove this.
Isolate::Scope isolate_scope(isolate);
if (!i::Snapshot::Initialize(i_isolate)) {
......
......@@ -4279,85 +4279,82 @@ void Heap::IterateBuiltins(RootVisitor* v) {
// TODO(1236194): Since the heap size is configurable on the command line
// and through the API, we should gracefully handle the case that the heap
// size is not big enough to fit all the initial objects.
void Heap::ConfigureHeap(size_t max_semi_space_size_in_kb,
size_t max_old_generation_size_in_mb,
size_t code_range_size_in_mb) {
// Overwrite default configuration.
if (max_semi_space_size_in_kb != 0) {
void Heap::ConfigureHeap(const v8::ResourceConstraints& constraints) {
// Initialize max_semi_space_size_.
{
if (constraints.max_semi_space_size_in_kb() > 0) {
max_semi_space_size_ = constraints.max_semi_space_size_in_kb() * KB;
}
if (FLAG_max_semi_space_size > 0) {
max_semi_space_size_ = static_cast<size_t>(FLAG_max_semi_space_size) * MB;
}
if (FLAG_stress_compaction) {
// This will cause more frequent GCs when stressing.
max_semi_space_size_ = MB;
}
// The new space size must be a power of two to support single-bit testing
// for containment.
// TODO(ulan): Rounding to a power of 2 is not longer needed. Remove it.
max_semi_space_size_ =
RoundUp<Page::kPageSize>(max_semi_space_size_in_kb * KB);
}
if (max_old_generation_size_in_mb != 0) {
max_old_generation_size_ = max_old_generation_size_in_mb * MB;
static_cast<size_t>(base::bits::RoundUpToPowerOfTwo64(
static_cast<uint64_t>(max_semi_space_size_)));
max_semi_space_size_ = Max(max_semi_space_size_, kMinSemiSpaceSize);
max_semi_space_size_ = RoundDown<Page::kPageSize>(max_semi_space_size_);
}
// If max space size flags are specified overwrite the configuration.
if (FLAG_max_semi_space_size > 0) {
max_semi_space_size_ = static_cast<size_t>(FLAG_max_semi_space_size) * MB;
}
if (FLAG_max_old_space_size > 0) {
// Initialize max_old_generation_size_.
{
if (constraints.max_old_space_size() > 0) {
max_old_generation_size_ = constraints.max_old_space_size() * MB;
}
if (FLAG_max_old_space_size > 0) {
max_old_generation_size_ =
static_cast<size_t>(FLAG_max_old_space_size) * MB;
}
int paged_space_count =
LAST_GROWABLE_PAGED_SPACE - FIRST_GROWABLE_PAGED_SPACE + 1;
max_old_generation_size_ =
static_cast<size_t>(FLAG_max_old_space_size) * MB;
}
if (Page::kPageSize > MB) {
max_semi_space_size_ = RoundUp<Page::kPageSize>(max_semi_space_size_);
Max(max_old_generation_size_,
static_cast<size_t>(paged_space_count * Page::kPageSize));
max_old_generation_size_ =
RoundUp<Page::kPageSize>(max_old_generation_size_);
}
if (FLAG_stress_compaction) {
// This will cause more frequent GCs when stressing.
max_semi_space_size_ = MB;
RoundDown<Page::kPageSize>(max_old_generation_size_);
}
// The new space size must be a power of two to support single-bit testing
// for containment.
max_semi_space_size_ = static_cast<size_t>(base::bits::RoundUpToPowerOfTwo64(
static_cast<uint64_t>(max_semi_space_size_)));
if (max_semi_space_size_ == kMaxSemiSpaceSize) {
// Start with at least 1*MB semi-space on machines with a lot of memory.
initial_semispace_size_ =
Max(initial_semispace_size_, static_cast<size_t>(1 * MB));
}
if (FLAG_min_semi_space_size > 0) {
size_t initial_semispace_size =
static_cast<size_t>(FLAG_min_semi_space_size) * MB;
if (initial_semispace_size > max_semi_space_size_) {
initial_semispace_size_ = max_semi_space_size_;
if (FLAG_trace_gc) {
PrintIsolate(isolate_,
"Min semi-space size cannot be more than the maximum "
"semi-space size of %zu MB\n",
max_semi_space_size_ / MB);
}
} else {
// Initialize initial_semispace_size_.
{
if (max_semi_space_size_ == kMaxSemiSpaceSize) {
// Start with at least 1*MB semi-space on machines with a lot of memory.
initial_semispace_size_ =
Max(initial_semispace_size_, static_cast<size_t>(1 * MB));
}
if (FLAG_min_semi_space_size > 0) {
initial_semispace_size_ =
RoundUp<Page::kPageSize>(initial_semispace_size);
static_cast<size_t>(FLAG_min_semi_space_size) * MB;
}
initial_semispace_size_ =
Min(initial_semispace_size_, max_semi_space_size_);
initial_semispace_size_ =
RoundDown<Page::kPageSize>(initial_semispace_size_);
}
initial_semispace_size_ = Min(initial_semispace_size_, max_semi_space_size_);
// Initialize initial_old_space_size_.
{
initial_old_generation_size_ = kMaxInitialOldGenerationSize;
if (FLAG_initial_old_space_size > 0) {
initial_old_generation_size_ =
static_cast<size_t>(FLAG_initial_old_space_size) * MB;
old_generation_size_configured_ = true;
}
initial_old_generation_size_ =
Min(initial_old_generation_size_, max_old_generation_size_);
initial_old_generation_size_ =
RoundDown<Page::kPageSize>(initial_old_generation_size_);
}
if (FLAG_semi_space_growth_factor < 2) {
FLAG_semi_space_growth_factor = 2;
}
// The old generation is paged and needs at least one page for each space.
int paged_space_count =
LAST_GROWABLE_PAGED_SPACE - FIRST_GROWABLE_PAGED_SPACE + 1;
initial_max_old_generation_size_ = max_old_generation_size_ =
Max(static_cast<size_t>(paged_space_count * Page::kPageSize),
max_old_generation_size_);
if (FLAG_initial_old_space_size > 0) {
initial_old_generation_size_ = FLAG_initial_old_space_size * MB;
} else {
initial_old_generation_size_ =
Min(max_old_generation_size_, kMaxInitialOldGenerationSize);
}
old_generation_allocation_limit_ = initial_old_generation_size_;
// We rely on being able to allocate new arrays in paged spaces.
......@@ -4366,12 +4363,11 @@ void Heap::ConfigureHeap(size_t max_semi_space_size_in_kb,
FixedArray::SizeFor(JSArray::kInitialMaxFastElementArray) +
AllocationMemento::kSize));
code_range_size_ = code_range_size_in_mb * MB;
code_range_size_ = constraints.code_range_size() * MB;
configured_ = true;
}
void Heap::AddToRingBuffer(const char* string) {
size_t first_part =
Min(strlen(string), kTraceRingBufferSize - ring_buffer_end_);
......@@ -4395,7 +4391,10 @@ void Heap::GetFromRingBuffer(char* buffer) {
memcpy(buffer + copied, trace_ring_buffer_, ring_buffer_end_);
}
void Heap::ConfigureHeapDefault() { ConfigureHeap(0, 0, 0); }
void Heap::ConfigureHeapDefault() {
v8::ResourceConstraints constraints;
ConfigureHeap(constraints);
}
void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->start_marker = HeapStats::kStartMarker;
......
......@@ -623,13 +623,7 @@ class Heap {
// Initialization. ===========================================================
// ===========================================================================
// Configure heap sizes
// max_semi_space_size_in_kb: maximum semi-space size in KB
// max_old_generation_size_in_mb: maximum old generation size in MB
// code_range_size_in_mb: code range size in MB
void ConfigureHeap(size_t max_semi_space_size_in_kb,
size_t max_old_generation_size_in_mb,
size_t code_range_size_in_mb);
void ConfigureHeap(const v8::ResourceConstraints& constraints);
void ConfigureHeapDefault();
// Prepares the heap, setting up for deserialization.
......
......@@ -264,7 +264,9 @@ int main(int argc, char** argv) {
? i::kMaxPCRelativeCodeRangeInMB
: std::min(i::kMaximalCodeRangeSize / i::MB,
i::kMaxPCRelativeCodeRangeInMB);
i_isolate->heap()->ConfigureHeap(0, 0, code_range_size);
v8::ResourceConstraints constraints;
constraints.set_code_range_size(code_range_size);
i_isolate->heap()->ConfigureHeap(constraints);
// The isolate contains data from builtin compilation that needs
// to be written out if builtins are embedded.
i_isolate->RegisterEmbeddedFileWriter(&embedded_writer);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment