Commit 5c152a0f authored by Samuel Groß's avatar Samuel Groß Committed by V8 LUCI CQ

[sandbox] Remove a number of native allocations from WasmInstanceObject

Those are not safe in combination with the sandbox as they are stored as
raw pointers. Instead of turning them into ExternalPointers (which use
the ExternalPointerTable indirection), this CL simply turns them into
on-heap ByteArrays which is cheaper and should be unproblematic
security-wise as their contents can be corrupted without causing memory
corruption outside the sandbox address space (just incorrect behaviour
and/or further memory corruption *inside* the sandbox, which is fine).

Bug: chromium:1335046
Change-Id: Id2b901a58b7d6c91dd7596fca553d7c76cbc61ec
Cq-Include-Trybots: luci.v8.try:v8_linux64_heap_sandbox_dbg_ng,v8_linux_arm64_sim_heap_sandbox_dbg_ng
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3845636Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Samuel Groß <saelo@chromium.org>
Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82765}
parent c37badf3
......@@ -308,6 +308,7 @@ path. Add it with -I<path> to the command line
// V8_HAS_BUILTIN_SADD_OVERFLOW - __builtin_sadd_overflow() supported
// V8_HAS_BUILTIN_SSUB_OVERFLOW - __builtin_ssub_overflow() supported
// V8_HAS_BUILTIN_UADD_OVERFLOW - __builtin_uadd_overflow() supported
// V8_HAS_BUILTIN_SMUL_OVERFLOW - __builtin_smul_overflow() supported
// V8_HAS_COMPUTED_GOTO - computed goto/labels as values
// supported
// V8_HAS_DECLSPEC_NOINLINE - __declspec(noinline) supported
......@@ -357,6 +358,7 @@ path. Add it with -I<path> to the command line
# define V8_HAS_BUILTIN_SADD_OVERFLOW (__has_builtin(__builtin_sadd_overflow))
# define V8_HAS_BUILTIN_SSUB_OVERFLOW (__has_builtin(__builtin_ssub_overflow))
# define V8_HAS_BUILTIN_UADD_OVERFLOW (__has_builtin(__builtin_uadd_overflow))
# define V8_HAS_BUILTIN_SMUL_OVERFLOW (__has_builtin(__builtin_smul_overflow))
# define V8_HAS_BUILTIN_UNREACHABLE (__has_builtin(__builtin_unreachable))
// Clang has no __has_feature for computed gotos.
......
......@@ -90,14 +90,6 @@ int64_t SignedSaturatedSub64(int64_t lhs, int64_t rhs) {
return lhs - rhs;
}
bool SignedMulOverflow32(int32_t lhs, int32_t rhs, int32_t* val) {
// Compute the result as {int64_t}, then check for overflow.
int64_t result = int64_t{lhs} * int64_t{rhs};
*val = static_cast<int32_t>(result);
using limits = std::numeric_limits<int32_t>;
return result < limits::min() || result > limits::max();
}
} // namespace bits
} // namespace base
} // namespace v8
......@@ -261,7 +261,17 @@ inline bool SignedSubOverflow32(int32_t lhs, int32_t rhs, int32_t* val) {
// SignedMulOverflow32(lhs,rhs,val) performs a signed multiplication of |lhs|
// and |rhs| and stores the result into the variable pointed to by |val| and
// returns true if the signed multiplication resulted in an overflow.
V8_BASE_EXPORT bool SignedMulOverflow32(int32_t lhs, int32_t rhs, int32_t* val);
inline bool SignedMulOverflow32(int32_t lhs, int32_t rhs, int32_t* val) {
#if V8_HAS_BUILTIN_SMUL_OVERFLOW
return __builtin_smul_overflow(lhs, rhs, val);
#else
// Compute the result as {int64_t}, then check for overflow.
int64_t result = int64_t{lhs} * int64_t{rhs};
*val = static_cast<int32_t>(result);
using limits = std::numeric_limits<int32_t>;
return result < limits::min() || result > limits::max();
#endif
}
// SignedAddOverflow64(lhs,rhs,val) performs a signed summation of |lhs| and
// |rhs| and stores the result into the variable pointed to by |val| and
......
......@@ -2715,12 +2715,15 @@ Node* WasmGraphBuilder::BuildImportCall(const wasm::FunctionSig* sig,
// Load the target from the imported_targets array at the offset of
// {func_index}.
Node* func_index_times_pointersize = gasm_->IntMul(
func_index_intptr, gasm_->IntPtrConstant(kSystemPointerSize));
Node* imported_targets =
LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer());
Node* target_node = gasm_->LoadImmutableFromObject(
MachineType::Pointer(), imported_targets, func_index_times_pointersize);
Node* offset = gasm_->IntAdd(
gasm_->IntMul(func_index_intptr,
gasm_->IntPtrConstant(kSystemPointerSize)),
gasm_->IntPtrConstant(
wasm::ObjectAccess::ToTagged(FixedArray::kObjectsOffset)));
Node* imported_targets = LOAD_INSTANCE_FIELD(ImportedFunctionTargets,
MachineType::TaggedPointer());
Node* target_node = gasm_->LoadImmutableFromObject(MachineType::Pointer(),
imported_targets, offset);
args[0] = target_node;
switch (continuation) {
......@@ -3254,25 +3257,32 @@ Node* WasmGraphBuilder::BuildCallToRuntime(Runtime::FunctionId f,
void WasmGraphBuilder::GetGlobalBaseAndOffset(const wasm::WasmGlobal& global,
Node** base, Node** offset) {
if (global.mutability && global.imported) {
Node* base_or_index = gasm_->LoadFromObject(
MachineType::UintPtr(),
LOAD_INSTANCE_FIELD(ImportedMutableGlobals, MachineType::UintPtr()),
Int32Constant(global.index * kSystemPointerSize));
Node* imported_mutable_globals = LOAD_INSTANCE_FIELD(
ImportedMutableGlobals, MachineType::TaggedPointer());
Node* field_offset = Int32Constant(
wasm::ObjectAccess::ElementOffsetInTaggedFixedAddressArray(
global.index));
if (global.type.is_reference()) {
// Load the base from the ImportedMutableGlobalsBuffer of the instance.
Node* buffers = LOAD_INSTANCE_FIELD(ImportedMutableGlobalsBuffers,
MachineType::TaggedPointer());
*base = gasm_->LoadFixedArrayElementAny(buffers, global.index);
// For this case, {base_or_index} gives the index of the global in the
// buffer. From the index, calculate the actual offset in the FixedArray.
// This is kHeaderSize + (index * kTaggedSize).
Node* index = gasm_->LoadFromObject(
MachineType::Int32(), imported_mutable_globals, field_offset);
// For this case, {index} gives the index of the global in the buffer.
// From the index, calculate the actual offset in the FixedArray. This is
// kHeaderSize + (index * kTaggedSize).
*offset = gasm_->IntAdd(
gasm_->IntMul(base_or_index, gasm_->IntPtrConstant(kTaggedSize)),
gasm_->IntMul(index, gasm_->IntPtrConstant(kTaggedSize)),
gasm_->IntPtrConstant(
wasm::ObjectAccess::ToTagged(FixedArray::kObjectsOffset)));
} else {
*base = base_or_index;
MachineType machine_type = V8_ENABLE_SANDBOX_BOOL
? MachineType::SandboxedPointer()
: MachineType::UintPtr();
*base = gasm_->LoadFromObject(machine_type, imported_mutable_globals,
field_offset);
*offset = gasm_->IntPtrConstant(0);
}
} else if (global.type.is_reference()) {
......@@ -3281,11 +3291,10 @@ void WasmGraphBuilder::GetGlobalBaseAndOffset(const wasm::WasmGlobal& global,
*offset = gasm_->IntPtrConstant(
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(global.offset));
} else {
#ifdef V8_ENABLE_SANDBOX
*base = LOAD_INSTANCE_FIELD(GlobalsStart, MachineType::SandboxedPointer());
#else
*base = LOAD_INSTANCE_FIELD(GlobalsStart, MachineType::UintPtr());
#endif
MachineType machine_type = V8_ENABLE_SANDBOX_BOOL
? MachineType::SandboxedPointer()
: MachineType::UintPtr();
*base = LOAD_INSTANCE_FIELD(GlobalsStart, machine_type);
*offset = gasm_->IntPtrConstant(global.offset);
}
}
......@@ -5035,11 +5044,14 @@ void WasmGraphBuilder::DataDrop(uint32_t data_segment_index,
DCHECK_LT(data_segment_index, env_->module->num_declared_data_segments);
Node* seg_size_array =
LOAD_INSTANCE_FIELD(DataSegmentSizes, MachineType::Pointer());
LOAD_INSTANCE_FIELD(DataSegmentSizes, MachineType::TaggedPointer());
static_assert(wasm::kV8MaxWasmDataSegments <= kMaxUInt32 >> 2);
auto access = ObjectAccess(MachineType::Int32(), kNoWriteBarrier);
gasm_->StoreToObject(access, seg_size_array, data_segment_index << 2,
Int32Constant(0));
gasm_->StoreToObject(
access, seg_size_array,
wasm::ObjectAccess::ElementOffsetInTaggedFixedUInt32Array(
data_segment_index),
Int32Constant(0));
}
Node* WasmGraphBuilder::StoreArgsInStackSlot(
......@@ -5137,10 +5149,12 @@ void WasmGraphBuilder::ElemDrop(uint32_t elem_segment_index,
DCHECK_LT(elem_segment_index, env_->module->elem_segments.size());
Node* dropped_elem_segments =
LOAD_INSTANCE_FIELD(DroppedElemSegments, MachineType::Pointer());
LOAD_INSTANCE_FIELD(DroppedElemSegments, MachineType::TaggedPointer());
auto store_rep =
StoreRepresentation(MachineRepresentation::kWord8, kNoWriteBarrier);
gasm_->Store(store_rep, dropped_elem_segments, elem_segment_index,
gasm_->Store(store_rep, dropped_elem_segments,
wasm::ObjectAccess::ElementOffsetInTaggedFixedUInt8Array(
elem_segment_index),
Int32Constant(1));
}
......
......@@ -2011,7 +2011,6 @@ void WasmInstanceObject::WasmInstanceObjectPrint(std::ostream& os) {
PRINT_OPTIONAL_WASM_INSTANCE_FIELD(indirect_function_tables, Brief);
PRINT_WASM_INSTANCE_FIELD(imported_function_refs, Brief);
PRINT_OPTIONAL_WASM_INSTANCE_FIELD(indirect_function_table_refs, Brief);
PRINT_OPTIONAL_WASM_INSTANCE_FIELD(managed_native_allocations, Brief);
PRINT_OPTIONAL_WASM_INSTANCE_FIELD(tags_table, Brief);
PRINT_OPTIONAL_WASM_INSTANCE_FIELD(wasm_internal_functions, Brief);
PRINT_WASM_INSTANCE_FIELD(managed_object_maps, Brief);
......@@ -2025,18 +2024,18 @@ void WasmInstanceObject::WasmInstanceObjectPrint(std::ostream& os) {
PRINT_WASM_INSTANCE_FIELD(new_allocation_top_address, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(old_allocation_limit_address, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(old_allocation_top_address, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(imported_function_targets, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(imported_function_targets, Brief);
PRINT_WASM_INSTANCE_FIELD(globals_start, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(imported_mutable_globals, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(imported_mutable_globals, Brief);
PRINT_WASM_INSTANCE_FIELD(indirect_function_table_size, +);
PRINT_WASM_INSTANCE_FIELD(indirect_function_table_sig_ids, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(indirect_function_table_targets, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(isorecursive_canonical_types,
reinterpret_cast<const uint32_t*>);
PRINT_WASM_INSTANCE_FIELD(jump_table_start, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(data_segment_starts, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(data_segment_sizes, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(dropped_elem_segments, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(data_segment_starts, Brief);
PRINT_WASM_INSTANCE_FIELD(data_segment_sizes, Brief);
PRINT_WASM_INSTANCE_FIELD(dropped_elem_segments, Brief);
PRINT_WASM_INSTANCE_FIELD(hook_on_function_call_address, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(tiering_budget_array, to_void_ptr);
PRINT_WASM_INSTANCE_FIELD(break_on_entry, static_cast<int>);
......
......@@ -621,6 +621,20 @@ void ByteArray::set_int(int offset, int value) {
WriteField<int>(kHeaderSize + offset, value);
}
Address ByteArray::get_sandboxed_pointer(int offset) const {
DCHECK_GE(offset, 0);
DCHECK_LE(offset + sizeof(Address), length());
PtrComprCageBase sandbox_base = GetPtrComprCageBase(*this);
return ReadSandboxedPointerField(kHeaderSize + offset, sandbox_base);
}
void ByteArray::set_sandboxed_pointer(int offset, Address value) {
DCHECK_GE(offset, 0);
DCHECK_LE(offset + sizeof(Address), length());
PtrComprCageBase sandbox_base = GetPtrComprCageBase(*this);
WriteSandboxedPointerField(kHeaderSize + offset, sandbox_base, value);
}
void ByteArray::copy_in(int offset, const byte* buffer, int slice_length) {
DCHECK_GE(offset, 0);
DCHECK_GE(slice_length, 0);
......@@ -673,8 +687,10 @@ FixedIntegerArray<T> FixedIntegerArray<T>::cast(Object object) {
template <typename T>
Handle<FixedIntegerArray<T>> FixedIntegerArray<T>::New(
Isolate* isolate, int length, AllocationType allocation) {
int byte_length;
CHECK(!base::bits::SignedMulOverflow32(length, sizeof(T), &byte_length));
return Handle<FixedIntegerArray<T>>::cast(
isolate->factory()->NewByteArray(length * sizeof(T), allocation));
isolate->factory()->NewByteArray(byte_length, allocation));
}
template <typename T>
......@@ -711,8 +727,10 @@ PodArray<T> PodArray<T>::cast(Object object) {
template <class T>
Handle<PodArray<T>> PodArray<T>::New(Isolate* isolate, int length,
AllocationType allocation) {
int byte_length;
CHECK(!base::bits::SignedMulOverflow32(length, sizeof(T), &byte_length));
return Handle<PodArray<T>>::cast(
isolate->factory()->NewByteArray(length * sizeof(T), allocation));
isolate->factory()->NewByteArray(byte_length, allocation));
}
template <class T>
......
......@@ -531,6 +531,9 @@ class ByteArray : public TorqueGeneratedByteArray<ByteArray, FixedArrayBase> {
inline int get_int(int offset) const;
inline void set_int(int offset, int value);
inline Address get_sandboxed_pointer(int offset) const;
inline void set_sandboxed_pointer(int offset, Address value);
// Copy in / copy out whole byte slices.
inline void copy_out(int index, byte* buffer, int slice_length);
inline void copy_in(int index, const byte* buffer, int slice_length);
......@@ -617,6 +620,10 @@ using FixedInt32Array = FixedIntegerArray<int32_t>;
using FixedUInt32Array = FixedIntegerArray<uint32_t>;
using FixedInt64Array = FixedIntegerArray<int64_t>;
using FixedUInt64Array = FixedIntegerArray<uint64_t>;
// Use with care! Raw addresses on the heap are not safe in combination with
// the sandbox. However, this can for example be used to store sandboxed
// pointers, which is safe.
using FixedAddressArray = FixedIntegerArray<Address>;
// Wrapper class for ByteArray which can store arbitrary C++ classes, as long
// as they can be copied with memcpy.
......
......@@ -741,19 +741,20 @@ RUNTIME_FUNCTION(Runtime_WasmArrayNewSegment) {
DCHECK_EQ(length_in_bytes / element_size, length);
if (!base::IsInBounds<uint32_t>(
offset, length_in_bytes,
instance->data_segment_sizes()[segment_index])) {
instance->data_segment_sizes().get(segment_index))) {
return ThrowWasmError(isolate,
MessageTemplate::kWasmTrapDataSegmentOutOfBounds);
}
Address source = instance->data_segment_starts()[segment_index] + offset;
Address source =
instance->data_segment_starts().get(segment_index) + offset;
return *isolate->factory()->NewWasmArrayFromMemory(length, rtt, source);
} else {
const wasm::WasmElemSegment* elem_segment =
&instance->module()->elem_segments[segment_index];
if (!base::IsInBounds<size_t>(
offset, length,
instance->dropped_elem_segments()[segment_index]
instance->dropped_elem_segments().get(segment_index)
? 0
: elem_segment->entries.size())) {
return ThrowWasmError(
......
......@@ -2491,19 +2491,21 @@ class LiftoffCompiler {
LiftoffRegList* pinned, uint32_t* offset) {
Register addr = pinned->set(__ GetUnusedRegister(kGpReg, {})).gp();
if (global->mutability && global->imported) {
LOAD_INSTANCE_FIELD(addr, ImportedMutableGlobals, kSystemPointerSize,
*pinned);
__ Load(LiftoffRegister(addr), addr, no_reg,
global->index * sizeof(Address), kPointerLoadType);
LOAD_TAGGED_PTR_INSTANCE_FIELD(addr, ImportedMutableGlobals, *pinned);
int field_offset =
wasm::ObjectAccess::ElementOffsetInTaggedFixedAddressArray(
global->index);
__ Load(LiftoffRegister(addr), addr, no_reg, field_offset,
kPointerLoadType);
*offset = 0;
} else {
LOAD_INSTANCE_FIELD(addr, GlobalsStart, kSystemPointerSize, *pinned);
*offset = global->offset;
}
#ifdef V8_ENABLE_SANDBOX
__ DecodeSandboxedPointer(addr);
#endif
*offset = global->offset;
}
return addr;
return addr;
}
void GetBaseAndOffsetForImportedMutableExternRefGlobal(
......@@ -2524,12 +2526,14 @@ class LiftoffCompiler {
Register imported_mutable_globals =
pinned->set(__ GetUnusedRegister(kGpReg, *pinned)).gp();
LOAD_INSTANCE_FIELD(imported_mutable_globals, ImportedMutableGlobals,
kSystemPointerSize, *pinned);
LOAD_TAGGED_PTR_INSTANCE_FIELD(imported_mutable_globals,
ImportedMutableGlobals, *pinned);
*offset = imported_mutable_globals;
int field_offset =
wasm::ObjectAccess::ElementOffsetInTaggedFixedAddressArray(
global->index);
__ Load(LiftoffRegister(*offset), imported_mutable_globals, no_reg,
global->index * sizeof(Address),
kSystemPointerSize == 4 ? LoadType::kI32Load : LoadType::kI64Load);
field_offset, LoadType::kI32Load);
__ emit_i32_shli(*offset, *offset, kTaggedSizeLog2);
__ emit_i32_addi(*offset, *offset,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0));
......@@ -5259,14 +5263,15 @@ class LiftoffCompiler {
Register seg_size_array =
pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
LOAD_INSTANCE_FIELD(seg_size_array, DataSegmentSizes, kSystemPointerSize,
pinned);
LOAD_TAGGED_PTR_INSTANCE_FIELD(seg_size_array, DataSegmentSizes, pinned);
LiftoffRegister seg_index =
pinned.set(__ GetUnusedRegister(kGpReg, pinned));
// Scale the seg_index for the array access.
__ LoadConstant(seg_index,
WasmValue(imm.index << value_kind_size_log2(kI32)));
__ LoadConstant(
seg_index,
WasmValue(wasm::ObjectAccess::ElementOffsetInTaggedFixedUInt32Array(
imm.index)));
// Set the length of the segment to '0' to drop it.
LiftoffRegister null_reg = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
......@@ -5396,12 +5401,15 @@ class LiftoffCompiler {
LiftoffRegList pinned;
Register dropped_elem_segments =
pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
LOAD_INSTANCE_FIELD(dropped_elem_segments, DroppedElemSegments,
kSystemPointerSize, pinned);
LOAD_TAGGED_PTR_INSTANCE_FIELD(dropped_elem_segments, DroppedElemSegments,
pinned);
LiftoffRegister seg_index =
pinned.set(__ GetUnusedRegister(kGpReg, pinned));
__ LoadConstant(seg_index, WasmValue(imm.index));
__ LoadConstant(
seg_index,
WasmValue(wasm::ObjectAccess::ElementOffsetInTaggedFixedUInt8Array(
imm.index)));
// Mark the segment as dropped by setting its value in the dropped
// segments list to 1.
......@@ -7057,10 +7065,12 @@ class LiftoffCompiler {
Register target = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
Register imported_targets = tmp;
LOAD_INSTANCE_FIELD(imported_targets, ImportedFunctionTargets,
kSystemPointerSize, pinned);
__ Load(LiftoffRegister(target), imported_targets, no_reg,
imm.index * sizeof(Address), kPointerLoadType);
LOAD_TAGGED_PTR_INSTANCE_FIELD(imported_targets, ImportedFunctionTargets,
pinned);
__ Load(
LiftoffRegister(target), imported_targets, no_reg,
wasm::ObjectAccess::ElementOffsetInTaggedFixedAddressArray(imm.index),
kPointerLoadType);
Register imported_function_refs = tmp;
LOAD_TAGGED_PTR_INSTANCE_FIELD(imported_function_refs,
......
......@@ -264,7 +264,7 @@ void ConstantExpressionInterface::ArrayNewSegment(
}
Address source =
instance_->data_segment_starts()[segment_imm.index] + offset;
instance_->data_segment_starts().get(segment_imm.index) + offset;
Handle<WasmArray> array_value = isolate_->factory()->NewWasmArrayFromMemory(
length, Handle<Map>::cast(rtt.runtime_value.to_ref()), source);
result->runtime_value = WasmValue(array_value, result_type);
......
......@@ -1420,7 +1420,6 @@ bool InstanceBuilder::ProcessImportedWasmGlobalObject(
if (global.mutability) {
DCHECK_LT(global.index, module_->num_imported_mutable_globals);
Handle<Object> buffer;
Address address_or_offset;
if (global.type.is_reference()) {
static_assert(sizeof(global_object->offset()) <= sizeof(Address),
"The offset into the globals buffer does not fit into "
......@@ -1428,17 +1427,19 @@ bool InstanceBuilder::ProcessImportedWasmGlobalObject(
buffer = handle(global_object->tagged_buffer(), isolate_);
// For externref globals we use a relative offset, not an absolute
// address.
address_or_offset = static_cast<Address>(global_object->offset());
instance->imported_mutable_globals().set_int(
global.index * kSystemPointerSize, global_object->offset());
} else {
buffer = handle(global_object->untagged_buffer(), isolate_);
// It is safe in this case to store the raw pointer to the buffer
// since the backing store of the JSArrayBuffer will not be
// relocated.
address_or_offset = reinterpret_cast<Address>(raw_buffer_ptr(
Address address = reinterpret_cast<Address>(raw_buffer_ptr(
Handle<JSArrayBuffer>::cast(buffer), global_object->offset()));
instance->imported_mutable_globals().set_sandboxed_pointer(
global.index * kSystemPointerSize, address);
}
instance->imported_mutable_globals_buffers().set(global.index, *buffer);
instance->imported_mutable_globals()[global.index] = address_or_offset;
return true;
}
......@@ -1865,16 +1866,15 @@ void InstanceBuilder::ProcessExports(Handle<WasmInstanceObject> instance) {
FixedArray::cast(buffers_array->get(global.index)), isolate_);
// For externref globals we store the relative offset in the
// imported_mutable_globals array instead of an absolute address.
Address addr = instance->imported_mutable_globals()[global.index];
DCHECK_LE(addr, static_cast<Address>(
std::numeric_limits<uint32_t>::max()));
offset = static_cast<uint32_t>(addr);
offset = instance->imported_mutable_globals().get_int(
global.index * kSystemPointerSize);
} else {
untagged_buffer =
handle(JSArrayBuffer::cast(buffers_array->get(global.index)),
isolate_);
Address global_addr =
instance->imported_mutable_globals()[global.index];
instance->imported_mutable_globals().get_sandboxed_pointer(
global.index * kSystemPointerSize);
size_t buffer_size = untagged_buffer->byte_length();
Address backing_store =
......@@ -2026,7 +2026,7 @@ base::Optional<MessageTemplate> LoadElemSegmentImpl(
}
if (!base::IsInBounds<uint64_t>(
src, count,
instance->dropped_elem_segments()[segment_index] == 0
instance->dropped_elem_segments().get(segment_index) == 0
? elem_segment.entries.size()
: 0)) {
return {MessageTemplate::kWasmTrapElementSegmentOutOfBounds};
......@@ -2082,7 +2082,7 @@ void InstanceBuilder::LoadTableSegments(Handle<WasmInstanceObject> instance) {
table_index, segment_index, dst, src, count);
// Set the active segments to being already dropped, since table.init on
// a dropped passive segment and an active segment have the same behavior.
instance->dropped_elem_segments()[segment_index] = 1;
instance->dropped_elem_segments().set(segment_index, 1);
if (opt_error.has_value()) {
thrower_->RuntimeError(
"%s", MessageFormatter::TemplateString(opt_error.value()));
......
......@@ -28,6 +28,21 @@ class ObjectAccess : public AllStatic {
return ToTagged(FixedArray::OffsetOfElementAt(index));
}
// Get the offset into a fixed uint8 array for a given {index}.
static constexpr int ElementOffsetInTaggedFixedUInt8Array(int index) {
return ToTagged(FixedUInt8Array::OffsetOfElementAt(index));
}
// Get the offset into a fixed uint32 array for a given {index}.
static constexpr int ElementOffsetInTaggedFixedUInt32Array(int index) {
return ToTagged(FixedUInt32Array::OffsetOfElementAt(index));
}
// Get the offset into a fixed address array for a given {index}.
static constexpr int ElementOffsetInTaggedFixedAddressArray(int index) {
return ToTagged(FixedAddressArray::OffsetOfElementAt(index));
}
// Get the offset of the context stored in a {JSFunction} object.
static constexpr int ContextOffsetInTaggedJSFunction() {
return ToTagged(JSFunction::kContextOffset);
......
......@@ -488,11 +488,11 @@ int32_t memory_init_wrapper(Address data) {
uint64_t mem_size = instance.memory_size();
if (!base::IsInBounds<uint64_t>(dst, size, mem_size)) return kOutOfBounds;
uint32_t seg_size = instance.data_segment_sizes()[seg_index];
uint32_t seg_size = instance.data_segment_sizes().get(seg_index);
if (!base::IsInBounds<uint32_t>(src, size, seg_size)) return kOutOfBounds;
byte* seg_start =
reinterpret_cast<byte*>(instance.data_segment_starts()[seg_index]);
reinterpret_cast<byte*>(instance.data_segment_starts().get(seg_index));
std::memcpy(EffectiveAddress(instance, dst), seg_start + src, size);
return kSuccess;
}
......
......@@ -207,12 +207,12 @@ PRIMITIVE_ACCESSORS(WasmInstanceObject, old_allocation_top_address, Address*,
kOldAllocationTopAddressOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, isorecursive_canonical_types,
const uint32_t*, kIsorecursiveCanonicalTypesOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, imported_function_targets, Address*,
kImportedFunctionTargetsOffset)
SANDBOXED_POINTER_ACCESSORS(WasmInstanceObject, globals_start, byte*,
kGlobalsStartOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, imported_mutable_globals, Address*,
kImportedMutableGlobalsOffset)
ACCESSORS(WasmInstanceObject, imported_mutable_globals, ByteArray,
kImportedMutableGlobalsOffset)
ACCESSORS(WasmInstanceObject, imported_function_targets, FixedAddressArray,
kImportedFunctionTargetsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_size, uint32_t,
kIndirectFunctionTableSizeOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_sig_ids,
......@@ -221,16 +221,16 @@ PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_targets,
Address*, kIndirectFunctionTableTargetsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, jump_table_start, Address,
kJumpTableStartOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, data_segment_starts, Address*,
kDataSegmentStartsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, data_segment_sizes, uint32_t*,
kDataSegmentSizesOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, dropped_elem_segments, byte*,
kDroppedElemSegmentsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, hook_on_function_call_address, Address,
kHookOnFunctionCallAddressOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, tiering_budget_array, uint32_t*,
kTieringBudgetArrayOffset)
ACCESSORS(WasmInstanceObject, data_segment_starts, FixedAddressArray,
kDataSegmentStartsOffset)
ACCESSORS(WasmInstanceObject, data_segment_sizes, FixedUInt32Array,
kDataSegmentSizesOffset)
ACCESSORS(WasmInstanceObject, dropped_elem_segments, FixedUInt8Array,
kDroppedElemSegmentsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, break_on_entry, uint8_t,
kBreakOnEntryOffset)
......@@ -253,8 +253,6 @@ ACCESSORS(WasmInstanceObject, imported_function_refs, FixedArray,
kImportedFunctionRefsOffset)
OPTIONAL_ACCESSORS(WasmInstanceObject, indirect_function_table_refs, FixedArray,
kIndirectFunctionTableRefsOffset)
OPTIONAL_ACCESSORS(WasmInstanceObject, managed_native_allocations, Foreign,
kManagedNativeAllocationsOffset)
OPTIONAL_ACCESSORS(WasmInstanceObject, tags_table, FixedArray, kTagsTableOffset)
OPTIONAL_ACCESSORS(WasmInstanceObject, wasm_internal_functions, FixedArray,
kWasmInternalFunctionsOffset)
......
......@@ -43,47 +43,6 @@ using WasmModule = wasm::WasmModule;
namespace {
// Manages the natively-allocated memory for a WasmInstanceObject. Since
// an instance finalizer is not guaranteed to run upon isolate shutdown,
// we must use a Managed<WasmInstanceNativeAllocations> to guarantee
// it is freed.
class WasmInstanceNativeAllocations {
public:
WasmInstanceNativeAllocations(Handle<WasmInstanceObject> instance,
size_t num_imported_functions,
size_t num_imported_mutable_globals,
size_t num_data_segments,
size_t num_elem_segments)
: imported_function_targets_(new Address[num_imported_functions]),
imported_mutable_globals_(new Address[num_imported_mutable_globals]),
data_segment_starts_(new Address[num_data_segments]),
data_segment_sizes_(new uint32_t[num_data_segments]),
dropped_elem_segments_(new uint8_t[num_elem_segments]) {
instance->set_imported_function_targets(imported_function_targets_.get());
instance->set_imported_mutable_globals(imported_mutable_globals_.get());
instance->set_data_segment_starts(data_segment_starts_.get());
instance->set_data_segment_sizes(data_segment_sizes_.get());
instance->set_dropped_elem_segments(dropped_elem_segments_.get());
}
private:
const std::unique_ptr<Address[]> imported_function_targets_;
const std::unique_ptr<Address[]> imported_mutable_globals_;
const std::unique_ptr<Address[]> data_segment_starts_;
const std::unique_ptr<uint32_t[]> data_segment_sizes_;
const std::unique_ptr<uint8_t[]> dropped_elem_segments_;
};
size_t EstimateNativeAllocationsSize(const WasmModule* module) {
size_t estimate =
sizeof(WasmInstanceNativeAllocations) +
(1 * kSystemPointerSize * module->num_imported_mutable_globals) +
(2 * kSystemPointerSize * module->num_imported_functions) +
((kSystemPointerSize + sizeof(uint32_t) + sizeof(uint8_t)) *
module->num_declared_data_segments);
return estimate;
}
enum DispatchTableElements : int {
kDispatchTableInstanceOffset,
kDispatchTableIndexOffset,
......@@ -1193,8 +1152,8 @@ void ImportedFunctionEntry::SetWasmToJs(
Handle<WasmApiFunctionRef> ref =
isolate->factory()->NewWasmApiFunctionRef(callable, suspend, instance_);
instance_->imported_function_refs().set(index_, *ref);
instance_->imported_function_targets()[index_] =
wasm_to_js_wrapper->instruction_start();
instance_->imported_function_targets().set(
index_, wasm_to_js_wrapper->instruction_start());
}
void ImportedFunctionEntry::SetWasmToWasm(WasmInstanceObject instance,
......@@ -1203,7 +1162,7 @@ void ImportedFunctionEntry::SetWasmToWasm(WasmInstanceObject instance,
", target=0x%" PRIxPTR "}\n",
instance_->ptr(), index_, instance.ptr(), call_target);
instance_->imported_function_refs().set(index_, instance);
instance_->imported_function_targets()[index_] = call_target;
instance_->imported_function_targets().set(index_, call_target);
}
// Returns an empty Object() if no callable is available, a JSReceiver
......@@ -1223,7 +1182,7 @@ Object ImportedFunctionEntry::object_ref() {
}
Address ImportedFunctionEntry::target() {
return instance_->imported_function_targets()[index_];
return instance_->imported_function_targets().get(index_);
}
// static
......@@ -1266,17 +1225,35 @@ Handle<WasmInstanceObject> WasmInstanceObject::New(
WasmInstanceObject::cast(*instance_object), isolate);
instance->clear_padding();
// Initialize the imported function arrays.
auto module = module_object->module();
auto num_imported_functions = module->num_imported_functions;
auto num_imported_mutable_globals = module->num_imported_mutable_globals;
auto num_data_segments = module->num_declared_data_segments;
size_t native_allocations_size = EstimateNativeAllocationsSize(module);
auto native_allocations = Managed<WasmInstanceNativeAllocations>::Allocate(
isolate, native_allocations_size, instance, num_imported_functions,
num_imported_mutable_globals, num_data_segments,
module->elem_segments.size());
instance->set_managed_native_allocations(*native_allocations);
Handle<FixedAddressArray> imported_function_targets =
FixedAddressArray::New(isolate, num_imported_functions);
instance->set_imported_function_targets(*imported_function_targets);
int num_imported_mutable_globals = module->num_imported_mutable_globals;
// The imported_mutable_globals is essentially a FixedAddressArray (storing
// sandboxed pointers), but some entries (the indices for reference-type
// globals) are accessed as 32-bit integers which is more convenient with a
// raw ByteArray.
Handle<ByteArray> imported_mutable_globals =
FixedAddressArray::New(isolate, num_imported_mutable_globals);
instance->set_imported_mutable_globals(*imported_mutable_globals);
int num_data_segments = module->num_declared_data_segments;
Handle<FixedAddressArray> data_segment_starts =
FixedAddressArray::New(isolate, num_data_segments);
instance->set_data_segment_starts(*data_segment_starts);
Handle<FixedUInt32Array> data_segment_sizes =
FixedUInt32Array::New(isolate, num_data_segments);
instance->set_data_segment_sizes(*data_segment_sizes);
int num_elem_segments = static_cast<int>(module->elem_segments.size());
Handle<FixedUInt8Array> dropped_elem_segments =
FixedUInt8Array::New(isolate, num_elem_segments);
instance->set_dropped_elem_segments(*dropped_elem_segments);
Handle<FixedArray> imported_function_refs =
isolate->factory()->NewFixedArray(num_imported_functions);
......@@ -1345,18 +1322,18 @@ void WasmInstanceObject::InitDataSegmentArrays(
// instructions).
DCHECK(num_data_segments == 0 ||
num_data_segments == module->data_segments.size());
for (size_t i = 0; i < num_data_segments; ++i) {
for (uint32_t i = 0; i < num_data_segments; ++i) {
const wasm::WasmDataSegment& segment = module->data_segments[i];
// Initialize the pointer and size of passive segments.
auto source_bytes = wire_bytes.SubVector(segment.source.offset(),
segment.source.end_offset());
instance->data_segment_starts()[i] =
reinterpret_cast<Address>(source_bytes.begin());
instance->data_segment_starts().set(
i, reinterpret_cast<Address>(source_bytes.begin()));
// Set the active segments to being already dropped, since memory.init on
// a dropped passive segment and an active segment have the same
// behavior.
instance->data_segment_sizes()[i] =
segment.active ? 0 : source_bytes.length();
instance->data_segment_sizes().set(
static_cast<int>(i), segment.active ? 0 : source_bytes.length());
}
}
......@@ -1366,18 +1343,18 @@ void WasmInstanceObject::InitElemSegmentArrays(
auto module = module_object->module();
auto num_elem_segments = module->elem_segments.size();
for (size_t i = 0; i < num_elem_segments; ++i) {
instance->dropped_elem_segments()[i] =
module->elem_segments[i].status ==
wasm::WasmElemSegment::kStatusDeclarative
? 1
: 0;
instance->dropped_elem_segments().set(
static_cast<int>(i), module->elem_segments[i].status ==
wasm::WasmElemSegment::kStatusDeclarative
? 1
: 0);
}
}
Address WasmInstanceObject::GetCallTarget(uint32_t func_index) {
wasm::NativeModule* native_module = module_object().native_module();
if (func_index < native_module->num_imported_functions()) {
return imported_function_targets()[func_index];
return imported_function_targets().get(func_index);
}
return jump_table_start() +
JumpTableOffset(native_module->module(), func_index);
......@@ -1602,7 +1579,8 @@ uint8_t* WasmInstanceObject::GetGlobalStorage(
DCHECK(!global.type.is_reference());
if (global.mutability && global.imported) {
return reinterpret_cast<byte*>(
instance->imported_mutable_globals()[global.index]);
instance->imported_mutable_globals().get_sandboxed_pointer(
global.index * kSystemPointerSize));
} else {
return instance->globals_start() + global.offset;
}
......@@ -1619,7 +1597,7 @@ WasmInstanceObject::GetGlobalBufferAndIndex(Handle<WasmInstanceObject> instance,
FixedArray::cast(
instance->imported_mutable_globals_buffers().get(global.index)),
isolate);
Address idx = instance->imported_mutable_globals()[global.index];
Address idx = instance->imported_mutable_globals().get(global.index);
DCHECK_LE(idx, std::numeric_limits<uint32_t>::max());
return {buffer, static_cast<uint32_t>(idx)};
}
......
......@@ -342,8 +342,9 @@ class V8_EXPORT_PRIVATE WasmInstanceObject : public JSObject {
DECL_OPTIONAL_ACCESSORS(tables, FixedArray)
DECL_OPTIONAL_ACCESSORS(indirect_function_tables, FixedArray)
DECL_ACCESSORS(imported_function_refs, FixedArray)
DECL_ACCESSORS(imported_mutable_globals, ByteArray)
DECL_ACCESSORS(imported_function_targets, FixedAddressArray)
DECL_OPTIONAL_ACCESSORS(indirect_function_table_refs, FixedArray)
DECL_OPTIONAL_ACCESSORS(managed_native_allocations, Foreign)
DECL_OPTIONAL_ACCESSORS(tags_table, FixedArray)
DECL_OPTIONAL_ACCESSORS(wasm_internal_functions, FixedArray)
DECL_ACCESSORS(managed_object_maps, FixedArray)
......@@ -358,18 +359,16 @@ class V8_EXPORT_PRIVATE WasmInstanceObject : public JSObject {
DECL_PRIMITIVE_ACCESSORS(old_allocation_limit_address, Address*)
DECL_PRIMITIVE_ACCESSORS(old_allocation_top_address, Address*)
DECL_PRIMITIVE_ACCESSORS(isorecursive_canonical_types, const uint32_t*)
DECL_PRIMITIVE_ACCESSORS(imported_function_targets, Address*)
DECL_SANDBOXED_POINTER_ACCESSORS(globals_start, byte*)
DECL_PRIMITIVE_ACCESSORS(imported_mutable_globals, Address*)
DECL_PRIMITIVE_ACCESSORS(indirect_function_table_size, uint32_t)
DECL_PRIMITIVE_ACCESSORS(indirect_function_table_sig_ids, uint32_t*)
DECL_PRIMITIVE_ACCESSORS(indirect_function_table_targets, Address*)
DECL_PRIMITIVE_ACCESSORS(jump_table_start, Address)
DECL_PRIMITIVE_ACCESSORS(data_segment_starts, Address*)
DECL_PRIMITIVE_ACCESSORS(data_segment_sizes, uint32_t*)
DECL_PRIMITIVE_ACCESSORS(dropped_elem_segments, byte*)
DECL_PRIMITIVE_ACCESSORS(hook_on_function_call_address, Address)
DECL_PRIMITIVE_ACCESSORS(tiering_budget_array, uint32_t*)
DECL_ACCESSORS(data_segment_starts, FixedAddressArray)
DECL_ACCESSORS(data_segment_sizes, FixedUInt32Array)
DECL_ACCESSORS(dropped_elem_segments, FixedUInt8Array)
DECL_PRIMITIVE_ACCESSORS(break_on_entry, uint8_t)
// Clear uninitialized padding space. This ensures that the snapshot content
......@@ -386,6 +385,8 @@ class V8_EXPORT_PRIVATE WasmInstanceObject : public JSObject {
/* Less than system pointer sized fields come first. */ \
V(kImportedFunctionRefsOffset, kTaggedSize) \
V(kIndirectFunctionTableRefsOffset, kTaggedSize) \
V(kImportedMutableGlobalsOffset, kTaggedSize) \
V(kImportedFunctionTargetsOffset, kTaggedSize) \
V(kIndirectFunctionTableSizeOffset, kUInt32Size) \
/* Optional padding to align system pointer size fields */ \
V(kOptionalPaddingOffset, POINTER_SIZE_PADDING(kOptionalPaddingOffset)) \
......@@ -393,11 +394,9 @@ class V8_EXPORT_PRIVATE WasmInstanceObject : public JSObject {
V(kMemorySizeOffset, kSizetSize) \
V(kStackLimitAddressOffset, kSystemPointerSize) \
V(kIsorecursiveCanonicalTypesOffset, kSystemPointerSize) \
V(kImportedFunctionTargetsOffset, kSystemPointerSize) \
V(kIndirectFunctionTableTargetsOffset, kSystemPointerSize) \
V(kIndirectFunctionTableSigIdsOffset, kSystemPointerSize) \
V(kGlobalsStartOffset, kSystemPointerSize) \
V(kImportedMutableGlobalsOffset, kSystemPointerSize) \
V(kIsolateRootOffset, kSystemPointerSize) \
V(kJumpTableStartOffset, kSystemPointerSize) \
/* End of often-accessed fields. */ \
......@@ -407,12 +406,12 @@ class V8_EXPORT_PRIVATE WasmInstanceObject : public JSObject {
V(kOldAllocationLimitAddressOffset, kSystemPointerSize) \
V(kOldAllocationTopAddressOffset, kSystemPointerSize) \
V(kRealStackLimitAddressOffset, kSystemPointerSize) \
V(kDataSegmentStartsOffset, kSystemPointerSize) \
V(kDataSegmentSizesOffset, kSystemPointerSize) \
V(kDroppedElemSegmentsOffset, kSystemPointerSize) \
V(kHookOnFunctionCallAddressOffset, kSystemPointerSize) \
V(kTieringBudgetArrayOffset, kSystemPointerSize) \
/* Less than system pointer size aligned fields are below. */ \
V(kDataSegmentStartsOffset, kTaggedSize) \
V(kDataSegmentSizesOffset, kTaggedSize) \
V(kDroppedElemSegmentsOffset, kTaggedSize) \
V(kModuleObjectOffset, kTaggedSize) \
V(kExportsObjectOffset, kTaggedSize) \
V(kNativeContextOffset, kTaggedSize) \
......@@ -422,7 +421,6 @@ class V8_EXPORT_PRIVATE WasmInstanceObject : public JSObject {
V(kImportedMutableGlobalsBuffersOffset, kTaggedSize) \
V(kTablesOffset, kTaggedSize) \
V(kIndirectFunctionTablesOffset, kTaggedSize) \
V(kManagedNativeAllocationsOffset, kTaggedSize) \
V(kTagsTableOffset, kTaggedSize) \
V(kWasmInternalFunctionsOffset, kTaggedSize) \
V(kManagedObjectMapsOffset, kTaggedSize) \
......@@ -459,11 +457,15 @@ class V8_EXPORT_PRIVATE WasmInstanceObject : public JSObject {
kImportedMutableGlobalsBuffersOffset,
kTablesOffset,
kIndirectFunctionTablesOffset,
kManagedNativeAllocationsOffset,
kTagsTableOffset,
kWasmInternalFunctionsOffset,
kManagedObjectMapsOffset,
kFeedbackVectorsOffset};
kFeedbackVectorsOffset,
kImportedMutableGlobalsOffset,
kImportedFunctionTargetsOffset,
kDataSegmentStartsOffset,
kDataSegmentSizesOffset,
kDroppedElemSegmentsOffset};
const wasm::WasmModule* module();
......
......@@ -327,8 +327,19 @@ uint32_t TestingModuleBuilder::AddPassiveDataSegment(
data_segment_sizes_.push_back(bytes.length());
// The vector pointers may have moved, so update the instance object.
instance_object_->set_data_segment_starts(data_segment_starts_.data());
instance_object_->set_data_segment_sizes(data_segment_sizes_.data());
uint32_t size = static_cast<uint32_t>(data_segment_sizes_.size());
Handle<FixedAddressArray> data_segment_starts =
FixedAddressArray::New(isolate_, size);
data_segment_starts->copy_in(
0, reinterpret_cast<byte*>(data_segment_starts_.data()),
size * sizeof(Address));
instance_object_->set_data_segment_starts(*data_segment_starts);
Handle<FixedUInt32Array> data_segment_sizes =
FixedUInt32Array::New(isolate_, size);
data_segment_sizes->copy_in(
0, reinterpret_cast<byte*>(data_segment_sizes_.data()),
size * sizeof(uint32_t));
instance_object_->set_data_segment_sizes(*data_segment_sizes);
return index;
}
......@@ -347,7 +358,11 @@ uint32_t TestingModuleBuilder::AddPassiveElementSegment(
// The vector pointers may have moved, so update the instance object.
dropped_elem_segments_.push_back(0);
instance_object_->set_dropped_elem_segments(dropped_elem_segments_.data());
uint32_t size = static_cast<uint32_t>(dropped_elem_segments_.size());
Handle<FixedUInt8Array> dropped_elem_segments =
FixedUInt8Array::New(isolate_, size);
dropped_elem_segments->copy_in(0, dropped_elem_segments_.data(), size);
instance_object_->set_dropped_elem_segments(*dropped_elem_segments);
return index;
}
......
......@@ -1836,15 +1836,15 @@ class WasmInterpreterInternals {
uint64_t dst = ToMemType(Pop());
Address dst_addr;
uint64_t src_max =
instance_object_->data_segment_sizes()[imm.data_segment.index];
instance_object_->data_segment_sizes().get(imm.data_segment.index);
if (!BoundsCheckMemRange(dst, &size, &dst_addr) ||
!base::IsInBounds(src, size, src_max)) {
DoTrap(kTrapMemOutOfBounds, pc);
return false;
}
Address src_addr =
instance_object_->data_segment_starts()[imm.data_segment.index] +
src;
Address src_addr = instance_object_->data_segment_starts().get(
imm.data_segment.index) +
src;
std::memmove(reinterpret_cast<void*>(dst_addr),
reinterpret_cast<void*>(src_addr), size);
return true;
......@@ -1856,7 +1856,7 @@ class WasmInterpreterInternals {
// validation.
DCHECK_LT(imm.index, module()->num_declared_data_segments);
*len += imm.length;
instance_object_->data_segment_sizes()[imm.index] = 0;
instance_object_->data_segment_sizes().set(imm.index, 0);
return true;
}
case kExprMemoryCopy: {
......@@ -1916,7 +1916,7 @@ class WasmInterpreterInternals {
IndexImmediate<Decoder::kNoValidation> imm(decoder, code->at(pc + *len),
"element segment index");
*len += imm.length;
instance_object_->dropped_elem_segments()[imm.index] = 1;
instance_object_->dropped_elem_segments().set(imm.index, 1);
return true;
}
case kExprTableCopy: {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment