Commit 3d215946 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Allocate one far jump table per code space

This moves the code to allocate the far jump table from
{SetRuntimeStubs} to {AddCodeSpace} to allocate one such table per code
space.
Also, the {runtime_stub_table_} and {runtime_stub_entries_} fields do
not make sense any more now and are replaced by calls to
{GetNearRuntimeStubEntry} and {GetRuntimeStubId}.

R=mstarzinger@chromium.org

Bug: v8:9477
Change-Id: Ie1f5c9d4eb282270337a684c34f097d8077fdfbb
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1795348
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63700}
parent 5b5a3608
......@@ -290,6 +290,7 @@ class LockGuard final {
};
using MutexGuard = LockGuard<Mutex>;
using RecursiveMutexGuard = LockGuard<RecursiveMutex>;
enum MutexSharedType : bool { kShared = true, kExclusive = false };
......
......@@ -85,6 +85,12 @@ class V8_EXPORT_PRIVATE JumpTableAssembler : public MacroAssembler {
return slot_index * kFarJumpTableSlotSize;
}
// Translate a far jump table offset to the index into the table.
static uint32_t FarJumpSlotOffsetToIndex(uint32_t offset) {
DCHECK_EQ(0, offset % kFarJumpTableSlotSize);
return offset / kFarJumpTableSlotSize;
}
// Determine the size of a far jump table containing the given number of
// slots.
static constexpr uint32_t SizeForNumberOfFarJumpSlots(
......
......@@ -1370,7 +1370,6 @@ std::shared_ptr<NativeModule> CompileToNativeModule(
auto native_module = isolate->wasm_engine()->NewNativeModule(
isolate, enabled, std::move(module));
native_module->SetWireBytes(std::move(wire_bytes_copy));
native_module->SetRuntimeStubs(isolate);
CompileNativeModule(isolate, thrower, wasm_module, native_module.get());
if (thrower->error()) return {};
......@@ -1509,7 +1508,6 @@ void AsyncCompileJob::CreateNativeModule(
native_module_ = isolate_->wasm_engine()->NewNativeModule(
isolate_, enabled_features_, std::move(module));
native_module_->SetWireBytes({std::move(bytes_copy_), wire_bytes_.length()});
native_module_->SetRuntimeStubs(isolate_);
if (stream_) stream_->NotifyNativeModuleCreated(native_module_);
}
......
This diff is collapsed.
......@@ -318,7 +318,10 @@ class WasmCodeAllocator {
// The engine-wide wasm code manager.
WasmCodeManager* const code_manager_;
mutable base::Mutex mutex_;
// TODO(clemensh): Try to make this non-recursive again. It's recursive
// currently because {AllocateForCodeInRegion} might create a new code space,
// which recursively calls {AllocateForCodeInRegion} for the jump table.
mutable base::RecursiveMutex mutex_;
//////////////////////////////////////////////////////////////////////////////
// Protected by {mutex_}:
......@@ -392,11 +395,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
// table with trampolines accordingly.
void UseLazyStub(uint32_t func_index);
// Initializes all runtime stubs by setting up entry addresses in the runtime
// stub table. It must be called exactly once per native module before adding
// other WasmCode so that runtime stub ids can be resolved during relocation.
void SetRuntimeStubs(Isolate* isolate);
// Creates a snapshot of the current state of the code table. This is useful
// to get a consistent view of the table (e.g. used by the serializer).
std::vector<WasmCode*> SnapshotCodeTable() const;
......@@ -407,13 +405,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
void SetWasmSourceMap(std::unique_ptr<WasmModuleSourceMap> source_map);
WasmModuleSourceMap* GetWasmSourceMap() const;
Address runtime_stub_entry(WasmCode::RuntimeStubId index) const {
DCHECK_LT(index, WasmCode::kRuntimeStubCount);
Address entry_address = runtime_stub_entries_[index];
DCHECK_NE(kNullAddress, entry_address);
return entry_address;
}
Address jump_table_start() const {
return main_jump_table_ ? main_jump_table_->instruction_start()
: kNullAddress;
......@@ -429,6 +420,12 @@ class V8_EXPORT_PRIVATE NativeModule final {
// the first jump table).
Address GetCallTargetForFunction(uint32_t func_index) const;
// Get a runtime stub entry (which is a far jump table slot) within near-call
// distance to {near_to}. Fails if {near_to} is not part of any code space of
// this module.
Address GetNearRuntimeStubEntry(WasmCode::RuntimeStubId index,
Address near_to) const;
// Reverse lookup from a given call target (i.e. a jump table slot as the
// above {GetCallTargetForFunction} returns) to a function index.
uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address) const;
......@@ -479,7 +476,11 @@ class V8_EXPORT_PRIVATE NativeModule final {
const WasmFeatures& enabled_features() const { return enabled_features_; }
const char* GetRuntimeStubName(Address runtime_stub_entry) const;
// Returns the runtime stub id that corresponds to the given address (which
// must be a far jump table slot). Returns {kRuntimeStubCount} on failure.
WasmCode::RuntimeStubId GetRuntimeStubId(Address runtime_stub_target) const;
const char* GetRuntimeStubName(Address runtime_stub_target) const;
// Sample the current code size of this modules to the given counters.
enum CodeSamplingTime : int8_t { kAfterBaseline, kAfterTopTier, kSampling };
......@@ -508,6 +509,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
struct CodeSpaceData {
base::AddressRegion region;
WasmCode* jump_table;
WasmCode* far_jump_table;
};
// Private constructor, called via {WasmCodeManager::NewNativeModule()}.
......@@ -577,12 +579,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
// {WireBytesStorage}, held by background compile tasks.
std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
// Contains entry points for runtime stub calls via {WASM_STUB_CALL}.
Address runtime_stub_entries_[WasmCode::kRuntimeStubCount] = {kNullAddress};
// Jump table used for runtime stubs (i.e. trampolines to embedded builtins).
WasmCode* runtime_stub_table_ = nullptr;
// Jump table used by external calls (from JS). Wasm calls use one of the jump
// tables stored in {code_space_data_}.
WasmCode* main_jump_table_ = nullptr;
......
......@@ -289,9 +289,6 @@ class V8_EXPORT_PRIVATE NativeModuleSerializer {
Vector<WasmCode* const> code_table_;
bool write_called_;
// Reverse lookup tables for embedded addresses.
std::map<Address, uint32_t> wasm_stub_targets_lookup_;
DISALLOW_COPY_AND_ASSIGN(NativeModuleSerializer);
};
......@@ -301,11 +298,6 @@ NativeModuleSerializer::NativeModuleSerializer(
DCHECK_NOT_NULL(native_module_);
// TODO(mtrofin): persist the export wrappers. Ideally, we'd only persist
// the unique ones, i.e. the cache.
for (uint32_t i = 0; i < WasmCode::kRuntimeStubCount; ++i) {
Address addr = native_module_->runtime_stub_entry(
static_cast<WasmCode::RuntimeStubId>(i));
wasm_stub_targets_lookup_.insert(std::make_pair(addr, i));
}
}
size_t NativeModuleSerializer::MeasureCode(const WasmCode* code) const {
......@@ -400,10 +392,9 @@ void NativeModuleSerializer::WriteCode(const WasmCode* code, Writer* writer) {
SetWasmCalleeTag(iter.rinfo(), tag);
} break;
case RelocInfo::WASM_STUB_CALL: {
Address orig_target = orig_iter.rinfo()->wasm_stub_call_address();
auto stub_iter = wasm_stub_targets_lookup_.find(orig_target);
DCHECK(stub_iter != wasm_stub_targets_lookup_.end());
uint32_t tag = stub_iter->second;
Address target = orig_iter.rinfo()->wasm_stub_call_address();
uint32_t tag = native_module_->GetRuntimeStubId(target);
DCHECK_GT(WasmCode::kRuntimeStubCount, tag);
SetWasmCalleeTag(iter.rinfo(), tag);
} break;
case RelocInfo::EXTERNAL_REFERENCE: {
......@@ -564,8 +555,9 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
case RelocInfo::WASM_STUB_CALL: {
uint32_t tag = GetWasmCalleeTag(iter.rinfo());
DCHECK_LT(tag, WasmCode::kRuntimeStubCount);
Address target = native_module_->runtime_stub_entry(
static_cast<WasmCode::RuntimeStubId>(tag));
Address target = native_module_->GetNearRuntimeStubEntry(
static_cast<WasmCode::RuntimeStubId>(tag),
code->instruction_start());
iter.rinfo()->set_wasm_stub_call_address(target, SKIP_ICACHE_FLUSH);
break;
}
......@@ -628,7 +620,6 @@ MaybeHandle<WasmModuleObject> DeserializeNativeModule(
auto shared_native_module = isolate->wasm_engine()->NewNativeModule(
isolate, enabled_features, std::move(decode_result.value()));
shared_native_module->SetWireBytes(OwnedVector<uint8_t>::Of(wire_bytes_vec));
shared_native_module->SetRuntimeStubs(isolate);
Handle<FixedArray> export_wrappers;
CompileJsToWasmWrappers(isolate, shared_native_module->module(),
......
......@@ -22,10 +22,8 @@ std::shared_ptr<NativeModule> NewModule(Isolate* isolate) {
std::shared_ptr<WasmModule> module(new WasmModule);
bool can_request_more = false;
size_t size = 16384;
auto native_module = isolate->wasm_engine()->NewNativeModule(
return isolate->wasm_engine()->NewNativeModule(
isolate, kAllWasmFeatures, size, can_request_more, std::move(module));
native_module->SetRuntimeStubs(isolate);
return native_module;
}
TEST(CacheHit) {
......
......@@ -321,7 +321,6 @@ Handle<WasmInstanceObject> TestingModuleBuilder::InitInstanceObject() {
auto native_module = isolate_->wasm_engine()->NewNativeModule(
isolate_, enabled_features_, test_module_);
native_module->SetWireBytes(OwnedVector<const uint8_t>());
native_module->SetRuntimeStubs(isolate_);
Handle<WasmModuleObject> module_object =
WasmModuleObject::New(isolate_, std::move(native_module), script);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment