Commit 0872da37 authored by Michael Starzinger's avatar Michael Starzinger Committed by Commit Bot

[wasm] Make {CloneCodeHelper} a private helper class.

R=clemensh@chromium.org

Change-Id: I510c8e771ab1c84d094205e247054d8057d2dc85
Reviewed-on: https://chromium-review.googlesource.com/997845Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52402}
parent 6823c0a4
......@@ -63,39 +63,6 @@ void GenerateJumpTrampoline(MacroAssembler* masm, Address target) {
const bool kModuleCanAllocateMoreMemory = true;
#endif
void PatchTrampolineAndStubCalls(
const WasmCode* original_code, const WasmCode* new_code,
const std::unordered_map<Address, Address, AddressHasher>& reverse_lookup,
WasmCode::FlushICache flush_icache) {
// Relocate everything in kApplyMask using this delta, and patch all code
// targets to call the new trampolines and stubs.
intptr_t delta =
new_code->instructions().start() - original_code->instructions().start();
int mask = RelocInfo::kApplyMask | RelocInfo::kCodeTargetMask;
RelocIterator orig_it(original_code->instructions(),
original_code->reloc_info(),
original_code->constant_pool(), mask);
for (RelocIterator it(new_code->instructions(), new_code->reloc_info(),
new_code->constant_pool(), mask);
!it.done(); it.next(), orig_it.next()) {
if (RelocInfo::IsCodeTarget(it.rinfo()->rmode())) {
Address target = orig_it.rinfo()->target_address();
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X
auto found = reverse_lookup.find(target);
DCHECK(found != reverse_lookup.end());
target = found->second;
#endif
it.rinfo()->set_target_address(target, SKIP_WRITE_BARRIER);
} else {
it.rinfo()->apply(delta);
}
}
if (flush_icache) {
Assembler::FlushICache(new_code->instructions().start(),
new_code->instructions().size());
}
}
void RelocateCode(WasmCode* code, const WasmCode* orig,
WasmCode::FlushICache flush_icache) {
intptr_t delta = code->instructions().start() - orig->instructions().start();
......@@ -345,6 +312,30 @@ WasmCode::~WasmCode() {
}
}
// Helper class to selectively clone and patch code from a
// {source_native_module} into a {cloning_native_module}.
class NativeModule::CloneCodeHelper {
public:
explicit CloneCodeHelper(NativeModule* source_native_module,
NativeModule* cloning_native_module);
void SelectForCloning(int32_t code_index);
void CloneAndPatchCode(bool patch_stub_to_stub_calls);
void PatchTrampolineAndStubCalls(const WasmCode* original_code,
const WasmCode* new_code,
WasmCode::FlushICache flush_icache);
private:
void PatchStubToStubCalls();
NativeModule* source_native_module_;
NativeModule* cloning_native_module_;
std::vector<uint32_t> selection_;
std::unordered_map<Address, Address, AddressHasher> reverse_lookup_;
};
NativeModule::CloneCodeHelper::CloneCodeHelper(
NativeModule* source_native_module, NativeModule* cloning_native_module)
: source_native_module_(source_native_module),
......@@ -388,14 +379,14 @@ void NativeModule::CloneCodeHelper::CloneAndPatchCode(
if (!original_code->IsAnonymous()) {
WasmCode* new_code = cloning_native_module_->CloneCode(
original_code, WasmCode::kNoFlushICache);
PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup_,
PatchTrampolineAndStubCalls(original_code, new_code,
WasmCode::kFlushICache);
break;
}
if (anonymous_lazy_builtin == nullptr) {
WasmCode* new_code = cloning_native_module_->CloneCode(
original_code, WasmCode::kNoFlushICache);
PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup_,
PatchTrampolineAndStubCalls(original_code, new_code,
WasmCode::kFlushICache);
anonymous_lazy_builtin = new_code;
}
......@@ -404,7 +395,7 @@ void NativeModule::CloneCodeHelper::CloneAndPatchCode(
case WasmCode::kFunction: {
WasmCode* new_code = cloning_native_module_->CloneCode(
original_code, WasmCode::kNoFlushICache);
PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup_,
PatchTrampolineAndStubCalls(original_code, new_code,
WasmCode::kFlushICache);
} break;
default:
......@@ -417,8 +408,39 @@ void NativeModule::CloneCodeHelper::PatchStubToStubCalls() {
for (auto& pair : cloning_native_module_->stubs_) {
WasmCode* new_stub = pair.second;
WasmCode* old_stub = source_native_module_->stubs_.find(pair.first)->second;
PatchTrampolineAndStubCalls(old_stub, new_stub, reverse_lookup_,
WasmCode::kFlushICache);
PatchTrampolineAndStubCalls(old_stub, new_stub, WasmCode::kFlushICache);
}
}
void NativeModule::CloneCodeHelper::PatchTrampolineAndStubCalls(
const WasmCode* original_code, const WasmCode* new_code,
WasmCode::FlushICache flush_icache) {
// Relocate everything in kApplyMask using this delta, and patch all code
// targets to call the new trampolines and stubs.
intptr_t delta =
new_code->instructions().start() - original_code->instructions().start();
int mask = RelocInfo::kApplyMask | RelocInfo::kCodeTargetMask;
RelocIterator orig_it(original_code->instructions(),
original_code->reloc_info(),
original_code->constant_pool(), mask);
for (RelocIterator it(new_code->instructions(), new_code->reloc_info(),
new_code->constant_pool(), mask);
!it.done(); it.next(), orig_it.next()) {
if (RelocInfo::IsCodeTarget(it.rinfo()->rmode())) {
Address target = orig_it.rinfo()->target_address();
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X
auto found = reverse_lookup_.find(target);
DCHECK(found != reverse_lookup_.end());
target = found->second;
#endif
it.rinfo()->set_target_address(target, SKIP_WRITE_BARRIER);
} else {
it.rinfo()->apply(delta);
}
}
if (flush_icache) {
Assembler::FlushICache(new_code->instructions().start(),
new_code->instructions().size());
}
}
......
......@@ -26,16 +26,10 @@ class WasmCompiledModule;
namespace wasm {
using GlobalHandleAddress = Address;
class NativeModule;
class WasmCodeManager;
struct WasmModule;
struct AddressHasher {
size_t operator()(const Address& addr) const {
return std::hash<intptr_t>()(reinterpret_cast<intptr_t>(addr));
}
};
// Sorted, disjoint and non-overlapping memory ranges. A range is of the
// form [start, end). So there's no [start, end), [end, other_end),
// because that should have been reduced to [start, other_end).
......@@ -203,8 +197,6 @@ class V8_EXPORT_PRIVATE WasmCode final {
// Return a textual description of the kind.
const char* GetWasmCodeKindAsString(WasmCode::Kind);
class WasmCodeManager;
// Note that we currently need to add code on the main thread, because we may
// trigger a GC if we believe there's a chance the GC would clear up native
// modules. The code is ready for concurrency otherwise, we just need to be
......@@ -212,26 +204,6 @@ class WasmCodeManager;
// WasmCodeManager::Commit.
class V8_EXPORT_PRIVATE NativeModule final {
public:
// Helper class to selectively clone and patch code from a
// {source_native_module} into a {cloning_native_module}.
class CloneCodeHelper {
public:
explicit CloneCodeHelper(NativeModule* source_native_module,
NativeModule* cloning_native_module);
void SelectForCloning(int32_t code_index);
void CloneAndPatchCode(bool patch_stub_to_stub_calls);
private:
void PatchStubToStubCalls();
NativeModule* source_native_module_;
NativeModule* cloning_native_module_;
std::vector<uint32_t> selection_;
std::unordered_map<Address, Address, AddressHasher> reverse_lookup_;
};
std::unique_ptr<NativeModule> Clone();
WasmCode* AddCode(const CodeDesc& desc, uint32_t frame_count, uint32_t index,
......@@ -302,6 +274,13 @@ class V8_EXPORT_PRIVATE NativeModule final {
friend class NativeModuleDeserializer;
friend class NativeModuleModificationScope;
class CloneCodeHelper;
struct AddressHasher {
size_t operator()(const Address& addr) const {
return std::hash<intptr_t>()(reinterpret_cast<intptr_t>(addr));
}
};
static base::AtomicNumber<size_t> next_id_;
NativeModule(uint32_t num_functions, uint32_t num_imports,
bool can_request_more, VirtualMemory* vmem,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment