Commit 5ad8474a authored by Clemens Backes's avatar Clemens Backes Committed by Commit Bot

[wasm] Remove always-on --wasm-far-jump-table flag

This shipped in v7.9, which is stable since six weeks. We do not test
the previous configuration any more and don't plan to move back, hence
remove the flag and clean up the code.

R=ahaas@chromium.org

Bug: v8:10155
Change-Id: I6b981f4be686473a911f041952cb684749d9fe7e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2030732
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Cr-Commit-Position: refs/heads/master@{#66063}
parent d8bb229d
...@@ -781,9 +781,6 @@ DEFINE_BOOL(wasm_code_gc, true, "enable garbage collection of wasm code") ...@@ -781,9 +781,6 @@ DEFINE_BOOL(wasm_code_gc, true, "enable garbage collection of wasm code")
DEFINE_BOOL(trace_wasm_code_gc, false, "trace garbage collection of wasm code") DEFINE_BOOL(trace_wasm_code_gc, false, "trace garbage collection of wasm code")
DEFINE_BOOL(stress_wasm_code_gc, false, DEFINE_BOOL(stress_wasm_code_gc, false,
"stress test garbage collection of wasm code") "stress test garbage collection of wasm code")
DEFINE_BOOL(wasm_far_jump_table, true,
"use multiple separate code spaces that might require far jumps "
"between them")
DEFINE_INT(wasm_max_initial_code_space_reservation, 0, DEFINE_INT(wasm_max_initial_code_space_reservation, 0,
"maximum size of the initial wasm code space reservation (in MB)") "maximum size of the initial wasm code space reservation (in MB)")
......
...@@ -20,9 +20,8 @@ namespace wasm { ...@@ -20,9 +20,8 @@ namespace wasm {
// other purposes: // other purposes:
// - the far stub table contains one entry per wasm runtime stub (see // - the far stub table contains one entry per wasm runtime stub (see
// {WasmCode::RuntimeStubId}, which jumps to the corresponding embedded // {WasmCode::RuntimeStubId}, which jumps to the corresponding embedded
// builtin, plus (if {FLAG_wasm_far_jump_table} is enabled and not the full // builtin, plus (if not the full address space can be reached via the jump
// address space can be reached via the jump table) one entry per wasm // table) one entry per wasm function.
// function.
// - the lazy compile table contains one entry per wasm function which jumps to // - the lazy compile table contains one entry per wasm function which jumps to
// the common {WasmCompileLazy} builtin and passes the function index that was // the common {WasmCompileLazy} builtin and passes the function index that was
// invoked. // invoked.
......
...@@ -440,13 +440,11 @@ void WasmCodeAllocator::OptionalLock::Lock(WasmCodeAllocator* allocator) { ...@@ -440,13 +440,11 @@ void WasmCodeAllocator::OptionalLock::Lock(WasmCodeAllocator* allocator) {
WasmCodeAllocator::WasmCodeAllocator(WasmCodeManager* code_manager, WasmCodeAllocator::WasmCodeAllocator(WasmCodeManager* code_manager,
VirtualMemory code_space, VirtualMemory code_space,
bool can_request_more,
std::shared_ptr<Counters> async_counters) std::shared_ptr<Counters> async_counters)
: code_manager_(code_manager), : code_manager_(code_manager),
free_code_space_(code_space.region()), free_code_space_(code_space.region()),
can_request_more_memory_(can_request_more),
async_counters_(std::move(async_counters)) { async_counters_(std::move(async_counters)) {
owned_code_space_.reserve(can_request_more ? 4 : 1); owned_code_space_.reserve(4);
owned_code_space_.emplace_back(std::move(code_space)); owned_code_space_.emplace_back(std::move(code_space));
async_counters_->wasm_module_num_code_spaces()->AddSample(1); async_counters_->wasm_module_num_code_spaces()->AddSample(1);
} }
...@@ -502,8 +500,7 @@ base::SmallVector<base::AddressRegion, 1> SplitRangeByReservationsIfNeeded( ...@@ -502,8 +500,7 @@ base::SmallVector<base::AddressRegion, 1> SplitRangeByReservationsIfNeeded(
} }
int NumWasmFunctionsInFarJumpTable(uint32_t num_declared_functions) { int NumWasmFunctionsInFarJumpTable(uint32_t num_declared_functions) {
return NativeModule::kNeedsFarJumpsBetweenCodeSpaces && return NativeModule::kNeedsFarJumpsBetweenCodeSpaces
FLAG_wasm_far_jump_table
? static_cast<int>(num_declared_functions) ? static_cast<int>(num_declared_functions)
: 0; : 0;
} }
...@@ -571,12 +568,8 @@ Vector<byte> WasmCodeAllocator::AllocateForCodeInRegion( ...@@ -571,12 +568,8 @@ Vector<byte> WasmCodeAllocator::AllocateForCodeInRegion(
base::AddressRegion code_space = base::AddressRegion code_space =
free_code_space_.AllocateInRegion(size, region); free_code_space_.AllocateInRegion(size, region);
if (code_space.is_empty()) { if (code_space.is_empty()) {
const bool in_specific_region = if (region.size() < std::numeric_limits<size_t>::max()) {
region.size() < std::numeric_limits<size_t>::max(); V8::FatalProcessOutOfMemory(nullptr, "wasm code reservation in region");
if (!can_request_more_memory_ || in_specific_region) {
auto error = in_specific_region ? "wasm code reservation in region"
: "wasm code reservation";
V8::FatalProcessOutOfMemory(nullptr, error);
UNREACHABLE(); UNREACHABLE();
} }
...@@ -649,14 +642,11 @@ bool WasmCodeAllocator::SetExecutable(bool executable) { ...@@ -649,14 +642,11 @@ bool WasmCodeAllocator::SetExecutable(bool executable) {
executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite; executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite;
#if V8_OS_WIN #if V8_OS_WIN
// On windows, we need to switch permissions per separate virtual memory // On windows, we need to switch permissions per separate virtual memory
// reservation. This is really just a problem when the NativeModule is // reservation.
// growable (meaning can_request_more_memory_). That's 32-bit in production,
// or unittests.
// For now, in that case, we commit at reserved memory granularity. // For now, in that case, we commit at reserved memory granularity.
// Technically, that may be a waste, because we may reserve more than we // Technically, that may be a waste, because we may reserve more than we
// use. On 32-bit though, the scarce resource is the address space - // use. On 32-bit though, the scarce resource is the address space -
// committed or not. // committed or not.
if (can_request_more_memory_) {
for (auto& vmem : owned_code_space_) { for (auto& vmem : owned_code_space_) {
if (!SetPermissions(page_allocator, vmem.address(), vmem.size(), if (!SetPermissions(page_allocator, vmem.address(), vmem.size(),
permission)) { permission)) {
...@@ -665,10 +655,7 @@ bool WasmCodeAllocator::SetExecutable(bool executable) { ...@@ -665,10 +655,7 @@ bool WasmCodeAllocator::SetExecutable(bool executable) {
TRACE_HEAP("Set %p:%p to executable:%d\n", vmem.address(), vmem.end(), TRACE_HEAP("Set %p:%p to executable:%d\n", vmem.address(), vmem.end(),
executable); executable);
} }
is_executable_ = executable; #else // V8_OS_WIN
return true;
}
#endif
size_t commit_page_size = page_allocator->CommitPageSize(); size_t commit_page_size = page_allocator->CommitPageSize();
for (auto& region : allocated_code_space_.regions()) { for (auto& region : allocated_code_space_.regions()) {
// allocated_code_space_ is fine-grained, so we need to // allocated_code_space_ is fine-grained, so we need to
...@@ -681,6 +668,7 @@ bool WasmCodeAllocator::SetExecutable(bool executable) { ...@@ -681,6 +668,7 @@ bool WasmCodeAllocator::SetExecutable(bool executable) {
TRACE_HEAP("Set 0x%" PRIxPTR ":0x%" PRIxPTR " to executable:%d\n", TRACE_HEAP("Set 0x%" PRIxPTR ":0x%" PRIxPTR " to executable:%d\n",
region.begin(), region.end(), executable); region.begin(), region.end(), executable);
} }
#endif // V8_OS_WIN
} }
is_executable_ = executable; is_executable_ = executable;
return true; return true;
...@@ -730,12 +718,12 @@ size_t WasmCodeAllocator::GetNumCodeSpaces() const { ...@@ -730,12 +718,12 @@ size_t WasmCodeAllocator::GetNumCodeSpaces() const {
} }
NativeModule::NativeModule(WasmEngine* engine, const WasmFeatures& enabled, NativeModule::NativeModule(WasmEngine* engine, const WasmFeatures& enabled,
bool can_request_more, VirtualMemory code_space, VirtualMemory code_space,
std::shared_ptr<const WasmModule> module, std::shared_ptr<const WasmModule> module,
std::shared_ptr<Counters> async_counters, std::shared_ptr<Counters> async_counters,
std::shared_ptr<NativeModule>* shared_this) std::shared_ptr<NativeModule>* shared_this)
: code_allocator_(engine->code_manager(), std::move(code_space), : code_allocator_(engine->code_manager(), std::move(code_space),
can_request_more, async_counters), async_counters),
enabled_features_(enabled), enabled_features_(enabled),
module_(std::move(module)), module_(std::move(module)),
import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>( import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
...@@ -844,8 +832,8 @@ WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) { ...@@ -844,8 +832,8 @@ WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
// Apply the relocation delta by iterating over the RelocInfo. // Apply the relocation delta by iterating over the RelocInfo.
intptr_t delta = reinterpret_cast<Address>(dst_code_bytes.begin()) - intptr_t delta = reinterpret_cast<Address>(dst_code_bytes.begin()) -
code->InstructionStart(); code->InstructionStart();
int mode_mask = RelocInfo::kApplyMask | int mode_mask =
RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL); RelocInfo::kApplyMask | RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
auto jump_tables_ref = auto jump_tables_ref =
FindJumpTablesForCode(reinterpret_cast<Address>(dst_code_bytes.begin())); FindJumpTablesForCode(reinterpret_cast<Address>(dst_code_bytes.begin()));
Address dst_code_addr = reinterpret_cast<Address>(dst_code_bytes.begin()); Address dst_code_addr = reinterpret_cast<Address>(dst_code_bytes.begin());
...@@ -1657,8 +1645,7 @@ size_t WasmCodeManager::EstimateNativeModuleMetaDataSize( ...@@ -1657,8 +1645,7 @@ size_t WasmCodeManager::EstimateNativeModuleMetaDataSize(
std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule( std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule(
WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled, WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled,
size_t code_size_estimate, bool can_request_more, size_t code_size_estimate, std::shared_ptr<const WasmModule> module) {
std::shared_ptr<const WasmModule> module) {
DCHECK_EQ(this, isolate->wasm_engine()->code_manager()); DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
if (total_committed_code_space_.load() > if (total_committed_code_space_.load() >
critical_committed_code_space_.load()) { critical_committed_code_space_.load()) {
...@@ -1672,9 +1659,7 @@ std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule( ...@@ -1672,9 +1659,7 @@ std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule(
// If we cannot add code space later, reserve enough address space up front. // If we cannot add code space later, reserve enough address space up front.
size_t code_vmem_size = size_t code_vmem_size =
can_request_more ? ReservationSize(code_size_estimate, ReservationSize(code_size_estimate, module->num_declared_functions, 0);
module->num_declared_functions, 0)
: kMaxWasmCodeSpaceSize;
// The '--wasm-max-code-space-reservation' testing flag can be used to reduce // The '--wasm-max-code-space-reservation' testing flag can be used to reduce
// the maximum size of the initial code space reservation (in MB). // the maximum size of the initial code space reservation (in MB).
...@@ -1705,8 +1690,8 @@ std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule( ...@@ -1705,8 +1690,8 @@ std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule(
size_t size = code_space.size(); size_t size = code_space.size();
Address end = code_space.end(); Address end = code_space.end();
std::shared_ptr<NativeModule> ret; std::shared_ptr<NativeModule> ret;
new NativeModule(engine, enabled, can_request_more, std::move(code_space), new NativeModule(engine, enabled, std::move(code_space), std::move(module),
std::move(module), isolate->async_counters(), &ret); isolate->async_counters(), &ret);
// The constructor initialized the shared_ptr. // The constructor initialized the shared_ptr.
DCHECK_NOT_NULL(ret); DCHECK_NOT_NULL(ret);
TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start, TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
......
...@@ -327,7 +327,6 @@ class WasmCodeAllocator { ...@@ -327,7 +327,6 @@ class WasmCodeAllocator {
}; };
WasmCodeAllocator(WasmCodeManager*, VirtualMemory code_space, WasmCodeAllocator(WasmCodeManager*, VirtualMemory code_space,
bool can_request_more,
std::shared_ptr<Counters> async_counters); std::shared_ptr<Counters> async_counters);
~WasmCodeAllocator(); ~WasmCodeAllocator();
...@@ -391,10 +390,6 @@ class WasmCodeAllocator { ...@@ -391,10 +390,6 @@ class WasmCodeAllocator {
bool is_executable_ = false; bool is_executable_ = false;
// TODO(clemensb): Remove this field once multiple code spaces are supported
// everywhere.
const bool can_request_more_memory_;
std::shared_ptr<Counters> async_counters_; std::shared_ptr<Counters> async_counters_;
}; };
...@@ -584,7 +579,7 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -584,7 +579,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
// Private constructor, called via {WasmCodeManager::NewNativeModule()}. // Private constructor, called via {WasmCodeManager::NewNativeModule()}.
NativeModule(WasmEngine* engine, const WasmFeatures& enabled_features, NativeModule(WasmEngine* engine, const WasmFeatures& enabled_features,
bool can_request_more, VirtualMemory code_space, VirtualMemory code_space,
std::shared_ptr<const WasmModule> module, std::shared_ptr<const WasmModule> module,
std::shared_ptr<Counters> async_counters, std::shared_ptr<Counters> async_counters,
std::shared_ptr<NativeModule>* shared_this); std::shared_ptr<NativeModule>* shared_this);
...@@ -757,7 +752,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final { ...@@ -757,7 +752,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
std::shared_ptr<NativeModule> NewNativeModule( std::shared_ptr<NativeModule> NewNativeModule(
WasmEngine* engine, Isolate* isolate, WasmEngine* engine, Isolate* isolate,
const WasmFeatures& enabled_features, size_t code_size_estimate, const WasmFeatures& enabled_features, size_t code_size_estimate,
bool can_request_more, std::shared_ptr<const WasmModule> module); std::shared_ptr<const WasmModule> module);
V8_WARN_UNUSED_RESULT VirtualMemory TryAllocate(size_t size, V8_WARN_UNUSED_RESULT VirtualMemory TryAllocate(size_t size,
void* hint = nullptr); void* hint = nullptr);
......
...@@ -740,13 +740,8 @@ void WasmEngine::LogOutstandingCodesForIsolate(Isolate* isolate) { ...@@ -740,13 +740,8 @@ void WasmEngine::LogOutstandingCodesForIsolate(Isolate* isolate) {
std::shared_ptr<NativeModule> WasmEngine::NewNativeModule( std::shared_ptr<NativeModule> WasmEngine::NewNativeModule(
Isolate* isolate, const WasmFeatures& enabled, Isolate* isolate, const WasmFeatures& enabled,
std::shared_ptr<const WasmModule> module, size_t code_size_estimate) { std::shared_ptr<const WasmModule> module, size_t code_size_estimate) {
// TODO(clemensb): Remove --wasm-far-jump-table and {can_request_more}. std::shared_ptr<NativeModule> native_module = code_manager_.NewNativeModule(
bool can_request_more = this, isolate, enabled, code_size_estimate, std::move(module));
!wasm::NativeModule::kNeedsFarJumpsBetweenCodeSpaces ||
FLAG_wasm_far_jump_table;
std::shared_ptr<NativeModule> native_module =
code_manager_.NewNativeModule(this, isolate, enabled, code_size_estimate,
can_request_more, std::move(module));
base::MutexGuard lock(&mutex_); base::MutexGuard lock(&mutex_);
auto pair = native_modules_.insert(std::make_pair( auto pair = native_modules_.insert(std::make_pair(
native_module.get(), std::make_unique<NativeModuleInfo>())); native_module.get(), std::make_unique<NativeModuleInfo>()));
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
// found in the LICENSE file. // found in the LICENSE file.
// No reason to stress-opt this; save some time. // No reason to stress-opt this; save some time.
// Flags: --wasm-far-jump-table --no-stress-opt // Flags: --no-stress-opt
load('test/mjsunit/wasm/wasm-module-builder.js'); load('test/mjsunit/wasm/wasm-module-builder.js');
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
// found in the LICENSE file. // found in the LICENSE file.
// Flags: --allow-natives-syntax --randomize-all-allocations // Flags: --allow-natives-syntax --randomize-all-allocations
// Flags: --wasm-far-jump-table --wasm-max-initial-code-space-reservation=1 // Flags: --wasm-max-initial-code-space-reservation=1
load('test/mjsunit/wasm/wasm-module-builder.js'); load('test/mjsunit/wasm/wasm-module-builder.js');
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment