Commit 226b8c86 authored by Manos Koukoutos's avatar Manos Koukoutos Committed by V8 LUCI CQ

[wasm] Refactoring ahead of element segment changes

See related CL for context.

Changes:
- In InitExprInterface, add the ability to evaluate function references
  as index only. Remove the global buffers and use the ones passed with
  the instance object instead.
- In WasmElemSegment, add a field indicating if elements should be
  parsed as expressions or indices. Change module-decoder.cc to reflect
  this change.
- In module-instantiate, change the signatures of LoadElemSegment,
  LoadElemSegmentImpl, and EvaluateInitExpr. Move the latter out of
  InstanceBuilder.

Change-Id: I1df54393b2005fba49380654bdd40429bd4869dd
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3364081Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78470}
parent 76d47dec
......@@ -52,14 +52,20 @@ void InitExprInterface::RefNull(FullDecoder* decoder, ValueType type,
void InitExprInterface::RefFunc(FullDecoder* decoder, uint32_t function_index,
Value* result) {
if (isolate_ != nullptr) {
auto internal = WasmInstanceObject::GetOrCreateWasmInternalFunction(
isolate_, instance_, function_index);
result->runtime_value = WasmValue(
internal, ValueType::Ref(module_->functions[function_index].sig_index,
kNonNullable));
} else {
if (isolate_ == nullptr) {
outer_module_->functions[function_index].declared = true;
return;
}
ValueType type = ValueType::Ref(module_->functions[function_index].sig_index,
kNonNullable);
if (function_strictness_ == kStrictFunctions) {
Handle<WasmInternalFunction> internal =
WasmInstanceObject::GetOrCreateWasmInternalFunction(isolate_, instance_,
function_index);
result->runtime_value = WasmValue(internal, type);
} else {
result->runtime_value =
WasmValue(handle(Smi::FromInt(function_index), isolate_), type);
}
}
......@@ -67,11 +73,18 @@ void InitExprInterface::GlobalGet(FullDecoder* decoder, Value* result,
const GlobalIndexImmediate<validate>& imm) {
if (isolate_ == nullptr) return;
const WasmGlobal& global = module_->globals[imm.index];
DCHECK(!global.mutability);
result->runtime_value =
global.type.is_numeric()
? WasmValue(GetRawUntaggedGlobalPtr(global), global.type)
: WasmValue(handle(tagged_globals_->get(global.offset), isolate_),
global.type);
? WasmValue(
reinterpret_cast<byte*>(
instance_->untagged_globals_buffer().backing_store()) +
global.offset,
global.type)
: WasmValue(
handle(instance_->tagged_globals_buffer().get(global.offset),
isolate_),
global.type);
}
void InitExprInterface::StructNewWithRtt(
......@@ -175,11 +188,6 @@ void InitExprInterface::DoReturn(FullDecoder* decoder,
if (isolate_ != nullptr) result_ = decoder->stack_value(1)->runtime_value;
}
byte* InitExprInterface::GetRawUntaggedGlobalPtr(const WasmGlobal& global) {
return reinterpret_cast<byte*>(untagged_globals_->backing_store()) +
global.offset;
}
} // namespace wasm
} // namespace internal
} // namespace v8
......@@ -44,16 +44,20 @@ class InitExprInterface {
using FullDecoder =
WasmFullDecoder<validate, InitExprInterface, decoding_mode>;
// Controls how function references are computed. {kLazyFunctions} means that
// only the index of the function will be computed, as a Smi.
// {kStrictFunctions} means we fully compute the function reference through
// {WasmInstanceObject::GetOrCreateWasmInternalFunction}.
enum FunctionStrictness { kLazyFunctions, kStrictFunctions };
InitExprInterface(const WasmModule* module, Isolate* isolate,
Handle<WasmInstanceObject> instance,
Handle<FixedArray> tagged_globals,
Handle<JSArrayBuffer> untagged_globals)
FunctionStrictness function_strictness)
: module_(module),
outer_module_(nullptr),
isolate_(isolate),
instance_(instance),
tagged_globals_(tagged_globals),
untagged_globals_(untagged_globals) {
function_strictness_(function_strictness) {
DCHECK_NOT_NULL(isolate);
}
......@@ -81,16 +85,13 @@ class InitExprInterface {
bool end_found() { return end_found_; }
private:
byte* GetRawUntaggedGlobalPtr(const WasmGlobal& global);
bool end_found_ = false;
WasmValue result_;
const WasmModule* module_;
WasmModule* outer_module_;
Isolate* isolate_;
Handle<WasmInstanceObject> instance_;
Handle<FixedArray> tagged_globals_;
Handle<JSArrayBuffer> untagged_globals_;
FunctionStrictness function_strictness_;
};
} // namespace wasm
......
......@@ -990,9 +990,7 @@ class ModuleDecoderImpl : public Decoder {
consume_count("element count", FLAG_wasm_max_table_size);
for (uint32_t i = 0; i < element_count; ++i) {
bool expressions_as_elements;
WasmElemSegment segment =
consume_element_segment_header(&expressions_as_elements);
WasmElemSegment segment = consume_element_segment_header();
if (failed()) return;
DCHECK_NE(segment.type, kWasmBottom);
......@@ -1001,7 +999,7 @@ class ModuleDecoderImpl : public Decoder {
for (uint32_t j = 0; j < num_elem; j++) {
WasmElemSegment::Entry init =
expressions_as_elements
segment.element_type == WasmElemSegment::kExpressionElements
? consume_element_expr()
: WasmElemSegment::Entry(WasmElemSegment::Entry::kRefFuncEntry,
consume_element_func_index());
......@@ -1910,8 +1908,7 @@ class ModuleDecoderImpl : public Decoder {
return attribute;
}
WasmElemSegment consume_element_segment_header(
bool* expressions_as_elements) {
WasmElemSegment consume_element_segment_header() {
const byte* pos = pc();
// The mask for the bit in the flag which indicates if the segment is
......@@ -1941,7 +1938,10 @@ class ModuleDecoderImpl : public Decoder {
: WasmElemSegment::kStatusActive;
const bool is_active = status == WasmElemSegment::kStatusActive;
*expressions_as_elements = flag & kExpressionsAsElementsMask;
WasmElemSegment::ElementType element_type =
flag & kExpressionsAsElementsMask
? WasmElemSegment::kExpressionElements
: WasmElemSegment::kFunctionIndexElements;
const bool has_table_index =
is_active && (flag & kHasTableIndexOrIsDeclarativeMask);
......@@ -1965,7 +1965,7 @@ class ModuleDecoderImpl : public Decoder {
const bool backwards_compatible_mode =
is_active && !(flag & kHasTableIndexOrIsDeclarativeMask);
ValueType type;
if (*expressions_as_elements) {
if (element_type == WasmElemSegment::kExpressionElements) {
type =
backwards_compatible_mode ? kWasmFuncRef : consume_reference_type();
if (is_active && !IsSubtypeOf(type, table_type, this->module_.get())) {
......@@ -2008,9 +2008,9 @@ class ModuleDecoderImpl : public Decoder {
}
if (is_active) {
return {type, table_index, std::move(offset)};
return {type, table_index, std::move(offset), element_type};
} else {
return {type, status == WasmElemSegment::kStatusDeclarative};
return {type, status, element_type};
}
}
......
......@@ -349,6 +349,9 @@ class InstanceBuilder {
std::vector<Handle<WasmTagObject>> tags_wrappers_;
Handle<WasmExportedFunction> start_function_;
std::vector<SanitizedImport> sanitized_imports_;
// We pass this {Zone} to the temporary {WasmFullDecoder} we allocate during
// each call to {EvaluateInitExpression}. This has been found to improve
// performance a bit over allocating a new {Zone} each time.
Zone init_expr_zone_;
// Helper routines to print out errors with imports.
......@@ -448,9 +451,6 @@ class InstanceBuilder {
// Process initialization of globals.
void InitGlobals(Handle<WasmInstanceObject> instance);
WasmValue EvaluateInitExpression(WireBytesRef init, ValueType expected,
Handle<WasmInstanceObject> instance);
// Process the exports, creating wrappers for functions, tables, memories,
// and globals.
void ProcessExports(Handle<WasmInstanceObject> instance);
......@@ -914,6 +914,28 @@ bool HasDefaultToNumberBehaviour(Isolate* isolate,
// Just a default function, which will convert to "Nan". Accept this.
return true;
}
WasmValue EvaluateInitExpression(
Zone* zone, WireBytesRef init, ValueType expected, Isolate* isolate,
Handle<WasmInstanceObject> instance,
InitExprInterface::FunctionStrictness function_strictness =
InitExprInterface::kStrictFunctions) {
base::Vector<const byte> module_bytes =
instance->module_object().native_module()->wire_bytes();
FunctionBody body(FunctionSig::Build(zone, {expected}, {}), init.offset(),
module_bytes.begin() + init.offset(),
module_bytes.begin() + init.end_offset());
WasmFeatures detected;
// We use kFullValidation so we do not have to create another instance of
// WasmFullDecoder, which would cost us >50Kb binary code size.
WasmFullDecoder<Decoder::kFullValidation, InitExprInterface, kInitExpression>
decoder(zone, instance->module(), WasmFeatures::All(), &detected, body,
instance->module(), isolate, instance, function_strictness);
decoder.DecodeFunctionBody();
return decoder.interface().result();
}
} // namespace
// Look up an import value in the {ffi_} object specifically for linking an
......@@ -975,7 +997,8 @@ void InstanceBuilder::LoadDataSegments(Handle<WasmInstanceObject> instance) {
size_t dest_offset;
if (module_->is_memory64) {
uint64_t dest_offset_64 =
EvaluateInitExpression(segment.dest_addr, kWasmI64, instance)
EvaluateInitExpression(&init_expr_zone_, segment.dest_addr, kWasmI64,
isolate_, instance)
.to_u64();
// Clamp to {std::numeric_limits<size_t>::max()}, which is always an
// invalid offset.
......@@ -983,9 +1006,9 @@ void InstanceBuilder::LoadDataSegments(Handle<WasmInstanceObject> instance) {
dest_offset = static_cast<size_t>(std::min(
dest_offset_64, uint64_t{std::numeric_limits<size_t>::max()}));
} else {
dest_offset =
EvaluateInitExpression(segment.dest_addr, kWasmI32, instance)
.to_u32();
dest_offset = EvaluateInitExpression(&init_expr_zone_, segment.dest_addr,
kWasmI32, isolate_, instance)
.to_u32();
}
if (!base::IsInBounds<size_t>(dest_offset, size, instance->memory_size())) {
......@@ -1651,26 +1674,6 @@ T* InstanceBuilder::GetRawUntaggedGlobalPtr(const WasmGlobal& global) {
return reinterpret_cast<T*>(raw_buffer_ptr(untagged_globals_, global.offset));
}
WasmValue InstanceBuilder::EvaluateInitExpression(
WireBytesRef init, ValueType expected,
Handle<WasmInstanceObject> instance) {
base::Vector<const byte> module_bytes =
instance->module_object().native_module()->wire_bytes();
FunctionBody body(FunctionSig::Build(&init_expr_zone_, {expected}, {}),
init.offset(), module_bytes.begin() + init.offset(),
module_bytes.begin() + init.end_offset());
WasmFeatures detected;
// We use kFullValidation so we do not have to create another instance of
// WasmFullDecoder, which would cost us >50Kb binary code size.
WasmFullDecoder<Decoder::kFullValidation, InitExprInterface, kInitExpression>
decoder(&init_expr_zone_, module_, WasmFeatures::All(), &detected, body,
module_, isolate_, instance, tagged_globals_, untagged_globals_);
decoder.DecodeFunctionBody();
return decoder.interface().result();
}
// Process initialization of globals.
void InstanceBuilder::InitGlobals(Handle<WasmInstanceObject> instance) {
for (const WasmGlobal& global : module_->globals) {
......@@ -1678,8 +1681,8 @@ void InstanceBuilder::InitGlobals(Handle<WasmInstanceObject> instance) {
// Happens with imported globals.
if (!global.init.is_set()) continue;
WasmValue value =
EvaluateInitExpression(global.init, global.type, instance);
WasmValue value = EvaluateInitExpression(&init_expr_zone_, global.init,
global.type, isolate_, instance);
if (global.type.is_reference()) {
tagged_globals_->set(global.offset, *value.to_ref());
......@@ -1960,7 +1963,8 @@ void InstanceBuilder::InitializeIndirectFunctionTables(
auto table_object = handle(
WasmTableObject::cast(instance->tables().get(table_index)), isolate_);
Handle<Object> value =
EvaluateInitExpression(table.initial_value, table.type, instance)
EvaluateInitExpression(&init_expr_zone_, table.initial_value,
table.type, isolate_, instance)
.to_ref();
if (value.is_null()) {
for (uint32_t entry_index = 0; entry_index < table.initial_size;
......@@ -1994,7 +1998,9 @@ void InstanceBuilder::InitializeIndirectFunctionTables(
}
}
bool LoadElemSegmentImpl(Isolate* isolate, Handle<WasmInstanceObject> instance,
namespace {
bool LoadElemSegmentImpl(Zone* zone, Isolate* isolate,
Handle<WasmInstanceObject> instance,
Handle<WasmTableObject> table_object,
uint32_t table_index, uint32_t segment_index,
uint32_t dst, uint32_t src, size_t count) {
......@@ -2049,6 +2055,7 @@ bool LoadElemSegmentImpl(Isolate* isolate, Handle<WasmInstanceObject> instance,
}
return true;
}
} // namespace
void InstanceBuilder::LoadTableSegments(Handle<WasmInstanceObject> instance) {
for (uint32_t segment_index = 0;
......@@ -2058,14 +2065,14 @@ void InstanceBuilder::LoadTableSegments(Handle<WasmInstanceObject> instance) {
if (elem_segment.status != WasmElemSegment::kStatusActive) continue;
uint32_t table_index = elem_segment.table_index;
uint32_t dst =
EvaluateInitExpression(elem_segment.offset, kWasmI32, instance)
.to_u32();
uint32_t dst = EvaluateInitExpression(&init_expr_zone_, elem_segment.offset,
kWasmI32, isolate_, instance)
.to_u32();
uint32_t src = 0;
size_t count = elem_segment.entries.size();
bool success = LoadElemSegmentImpl(
isolate_, instance,
&init_expr_zone_, isolate_, instance,
handle(WasmTableObject::cast(
instance->tables().get(elem_segment.table_index)),
isolate_),
......@@ -2108,8 +2115,13 @@ void InstanceBuilder::InitializeTags(Handle<WasmInstanceObject> instance) {
bool LoadElemSegment(Isolate* isolate, Handle<WasmInstanceObject> instance,
uint32_t table_index, uint32_t segment_index, uint32_t dst,
uint32_t src, uint32_t count) {
AccountingAllocator allocator;
// This {Zone} will be used only by the temporary WasmFullDecoder allocated
// down the line from this call. Therefore it is safe to stack-allocate it
// here.
Zone zone(&allocator, "LoadElemSegment");
return LoadElemSegmentImpl(
isolate, instance,
&zone, isolate, instance,
handle(WasmTableObject::cast(instance->tables().get(table_index)),
isolate),
table_index, segment_index, dst, src, count);
......
......@@ -23,6 +23,7 @@ class JSReceiver;
class WasmInitExpr;
class WasmModuleObject;
class WasmInstanceObject;
class Zone;
template <typename T>
class Handle;
......
......@@ -116,45 +116,53 @@ struct WasmDataSegment {
// Static representation of wasm element segment (table initializer).
struct WasmElemSegment {
enum Status {
kStatusActive, // copied automatically during instantiation.
kStatusPassive, // copied explicitly after instantiation.
kStatusDeclarative // purely declarative and never copied.
};
struct Entry {
enum Kind { kGlobalGetEntry, kRefFuncEntry, kRefNullEntry } kind;
uint32_t index;
Entry(Kind kind, uint32_t index) : kind(kind), index(index) {}
Entry() : kind(kRefNullEntry), index(0) {}
};
enum ElementType { kFunctionIndexElements, kExpressionElements };
// Construct an active segment.
WasmElemSegment(ValueType type, uint32_t table_index, WireBytesRef offset)
: type(type),
WasmElemSegment(ValueType type, uint32_t table_index, WireBytesRef offset,
ElementType element_type)
: status(kStatusActive),
type(type),
table_index(table_index),
offset(std::move(offset)),
status(kStatusActive) {}
element_type(element_type) {}
// Construct a passive or declarative segment, which has no table index or
// offset.
WasmElemSegment(ValueType type, bool declarative)
: type(type),
table_index(0),
status(declarative ? kStatusDeclarative : kStatusPassive) {}
WasmElemSegment(ValueType type, Status status, ElementType element_type)
: status(status), type(type), table_index(0), element_type(element_type) {
DCHECK_NE(status, kStatusActive);
}
// Construct a passive or declarative segment, which has no table index or
// offset.
// Default constructor. Constucts an invalid segment.
WasmElemSegment()
: type(kWasmBottom), table_index(0), status(kStatusActive) {}
: status(kStatusActive),
type(kWasmBottom),
table_index(0),
element_type(kFunctionIndexElements) {}
WasmElemSegment(const WasmElemSegment&) = delete;
WasmElemSegment(WasmElemSegment&&) V8_NOEXCEPT = default;
WasmElemSegment& operator=(const WasmElemSegment&) = delete;
WasmElemSegment& operator=(WasmElemSegment&&) V8_NOEXCEPT = default;
Status status;
ValueType type;
uint32_t table_index;
WireBytesRef offset;
struct Entry {
enum Kind { kGlobalGetEntry, kRefFuncEntry, kRefNullEntry } kind;
uint32_t index;
Entry(Kind kind, uint32_t index) : kind(kind), index(index) {}
Entry() : kind(kRefNullEntry), index(0) {}
};
ElementType element_type;
std::vector<Entry> entries;
enum Status {
kStatusActive, // copied automatically during instantiation.
kStatusPassive, // copied explicitly after instantiation.
kStatusDeclarative // purely declarative and never copied.
} status;
};
// Static representation of a wasm import.
......
......@@ -326,7 +326,9 @@ uint32_t TestingModuleBuilder::AddPassiveElementSegment(
uint32_t index = static_cast<uint32_t>(test_module_->elem_segments.size());
DCHECK_EQ(index, dropped_elem_segments_.size());
test_module_->elem_segments.emplace_back(kWasmFuncRef, false);
test_module_->elem_segments.emplace_back(
kWasmFuncRef, WasmElemSegment::kStatusPassive,
WasmElemSegment::kFunctionIndexElements);
auto& elem_segment = test_module_->elem_segments.back();
for (uint32_t entry : entries) {
elem_segment.entries.push_back(
......
......@@ -159,20 +159,15 @@ class TestModuleBuilder {
}
byte AddPassiveElementSegment(wasm::ValueType type) {
mod.elem_segments.emplace_back(type, false);
auto& init = mod.elem_segments.back();
// Add 5 empty elements.
for (uint32_t j = 0; j < 5; j++) {
init.entries.push_back(WasmElemSegment::Entry(
WasmElemSegment::Entry::kRefNullEntry, type.heap_representation()));
}
mod.elem_segments.emplace_back(type, WasmElemSegment::kStatusPassive,
WasmElemSegment::kExpressionElements);
return static_cast<byte>(mod.elem_segments.size() - 1);
}
byte AddDeclarativeElementSegment() {
mod.elem_segments.emplace_back(kWasmFuncRef, true);
mod.elem_segments.back().entries.push_back(WasmElemSegment::Entry(
WasmElemSegment::Entry::kRefNullEntry, HeapType::kFunc));
mod.elem_segments.emplace_back(kWasmFuncRef,
WasmElemSegment::kStatusDeclarative,
WasmElemSegment::kExpressionElements);
return static_cast<byte>(mod.elem_segments.size() - 1);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment