Commit f1c2a208 authored by Manos Koukoutos's avatar Manos Koukoutos Committed by V8 LUCI CQ

[wasm] Various small cleanups/fixes

Changes:
- Fix a bug in objects-printer where array elements were not treated as
  tagged pointers.
- Fix a few TODOs, mainly in the wasm interpreter.
- Improve documentation, small refactorings.

Change-Id: I1d70ad454b3a0693b9b784b17395434d81d01b61
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3383136Reviewed-by: 's avatarNikolaos Papaspyrou <nikolaos@chromium.org>
Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78656}
parent 7727437e
...@@ -3065,7 +3065,7 @@ Node* WasmGraphBuilder::BuildImportCall(const wasm::FunctionSig* sig, ...@@ -3065,7 +3065,7 @@ Node* WasmGraphBuilder::BuildImportCall(const wasm::FunctionSig* sig,
func_index_intptr, gasm_->IntPtrConstant(kSystemPointerSize)); func_index_intptr, gasm_->IntPtrConstant(kSystemPointerSize));
Node* imported_targets = Node* imported_targets =
LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer()); LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer());
Node* target_node = gasm_->LoadImmutableFromObject( Node* target_node = gasm_->LoadImmutable(
MachineType::Pointer(), imported_targets, func_index_times_pointersize); MachineType::Pointer(), imported_targets, func_index_times_pointersize);
args[0] = target_node; args[0] = target_node;
......
...@@ -1805,9 +1805,16 @@ void WasmStruct::WasmStructPrint(std::ostream& os) { ...@@ -1805,9 +1805,16 @@ void WasmStruct::WasmStructPrint(std::ostream& os) {
case wasm::kRef: case wasm::kRef:
case wasm::kOptRef: case wasm::kOptRef:
case wasm::kRtt: case wasm::kRtt:
case wasm::kRttWithDepth: case wasm::kRttWithDepth: {
os << Brief(base::ReadUnalignedValue<Object>(field_address)); Tagged_t raw = base::ReadUnalignedValue<Tagged_t>(field_address);
#if V8_COMPRESS_POINTERS
Address obj = DecompressTaggedPointer(address(), raw);
#else
Address obj = raw;
#endif
os << Brief(Object(obj));
break; break;
}
case wasm::kS128: case wasm::kS128:
os << "UNIMPLEMENTED"; // TODO(7748): Implement. os << "UNIMPLEMENTED"; // TODO(7748): Implement.
break; break;
...@@ -1844,17 +1851,24 @@ void WasmArray::WasmArrayPrint(std::ostream& os) { ...@@ -1844,17 +1851,24 @@ void WasmArray::WasmArrayPrint(std::ostream& os) {
true); true);
break; break;
case wasm::kI8: case wasm::kI8:
PrintTypedArrayElements(os, reinterpret_cast<int8_t*>(data_ptr), len,
true);
break;
case wasm::kI16: case wasm::kI16:
PrintTypedArrayElements(os, reinterpret_cast<int16_t*>(data_ptr), len,
true);
break;
case wasm::kS128: case wasm::kS128:
case wasm::kRef: case wasm::kRef:
case wasm::kOptRef: case wasm::kOptRef:
case wasm::kRtt: case wasm::kRtt:
case wasm::kRttWithDepth: case wasm::kRttWithDepth:
case wasm::kBottom:
case wasm::kVoid:
os << "\n Printing elements of this type is unimplemented, sorry"; os << "\n Printing elements of this type is unimplemented, sorry";
// TODO(7748): Implement. // TODO(7748): Implement.
break; break;
case wasm::kBottom:
case wasm::kVoid:
UNREACHABLE();
} }
os << "\n"; os << "\n";
} }
......
...@@ -1273,6 +1273,8 @@ class WasmDecoder : public Decoder { ...@@ -1273,6 +1273,8 @@ class WasmDecoder : public Decoder {
} }
bool Validate(const byte* pc, GlobalIndexImmediate<validate>& imm) { bool Validate(const byte* pc, GlobalIndexImmediate<validate>& imm) {
// We compare with the current size of the globals vector. This is important
// if we are decoding a constant expression in the global section.
if (!VALIDATE(imm.index < module_->globals.size())) { if (!VALIDATE(imm.index < module_->globals.size())) {
DecodeError(pc, "Invalid global index: %u", imm.index); DecodeError(pc, "Invalid global index: %u", imm.index);
return false; return false;
...@@ -2506,9 +2508,16 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> { ...@@ -2506,9 +2508,16 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
Append("T"); Append("T");
break; break;
case kControlIfElse: case kControlIfElse:
Append("E");
break;
case kControlTryCatch: case kControlTryCatch:
Append("C");
break;
case kControlTryCatchAll: case kControlTryCatchAll:
case kControlLet: // TODO(7748): Implement Append("A");
break;
case kControlLet:
Append("D");
break; break;
} }
if (c.start_merge.arity) Append("%u-", c.start_merge.arity); if (c.start_merge.arity) Append("%u-", c.start_merge.arity);
......
...@@ -63,7 +63,7 @@ class InitExprInterface { ...@@ -63,7 +63,7 @@ class InitExprInterface {
#define UNREACHABLE_INTERFACE_FUNCTION(name, ...) \ #define UNREACHABLE_INTERFACE_FUNCTION(name, ...) \
V8_INLINE void name(FullDecoder* decoder, ##__VA_ARGS__) { UNREACHABLE(); } V8_INLINE void name(FullDecoder* decoder, ##__VA_ARGS__) { UNREACHABLE(); }
INTERFACE_NON_CONSTANT_FUNCTIONS(UNREACHABLE_INTERFACE_FUNCTION) INTERFACE_NON_CONSTANT_FUNCTIONS(UNREACHABLE_INTERFACE_FUNCTION)
#undef EMPTY_INTERFACE_FUNCTION #undef UNREACHABLE_INTERFACE_FUNCTION
#define DECLARE_INTERFACE_FUNCTION(name, ...) \ #define DECLARE_INTERFACE_FUNCTION(name, ...) \
void name(FullDecoder* decoder, ##__VA_ARGS__); void name(FullDecoder* decoder, ##__VA_ARGS__);
......
...@@ -859,6 +859,8 @@ class ModuleDecoderImpl : public Decoder { ...@@ -859,6 +859,8 @@ class ModuleDecoderImpl : public Decoder {
void DecodeGlobalSection() { void DecodeGlobalSection() {
uint32_t globals_count = consume_count("globals count", kV8MaxWasmGlobals); uint32_t globals_count = consume_count("globals count", kV8MaxWasmGlobals);
uint32_t imported_globals = static_cast<uint32_t>(module_->globals.size()); uint32_t imported_globals = static_cast<uint32_t>(module_->globals.size());
// It is important to not resize the globals vector from the beginning,
// because we use its current size when decoding the initializer.
module_->globals.reserve(imported_globals + globals_count); module_->globals.reserve(imported_globals + globals_count);
for (uint32_t i = 0; ok() && i < globals_count; ++i) { for (uint32_t i = 0; ok() && i < globals_count; ++i) {
TRACE("DecodeGlobal[%d] module+%d\n", i, static_cast<int>(pc_ - start_)); TRACE("DecodeGlobal[%d] module+%d\n", i, static_cast<int>(pc_ - start_));
...@@ -1928,10 +1930,8 @@ class ModuleDecoderImpl : public Decoder { ...@@ -1928,10 +1930,8 @@ class ModuleDecoderImpl : public Decoder {
ValueType* fields = zone->NewArray<ValueType>(field_count); ValueType* fields = zone->NewArray<ValueType>(field_count);
bool* mutabilities = zone->NewArray<bool>(field_count); bool* mutabilities = zone->NewArray<bool>(field_count);
for (uint32_t i = 0; ok() && i < field_count; ++i) { for (uint32_t i = 0; ok() && i < field_count; ++i) {
ValueType field = consume_storage_type(); fields[i] = consume_storage_type();
fields[i] = field; mutabilities[i] = consume_mutability();
bool mutability = consume_mutability();
mutabilities[i] = mutability;
} }
if (failed()) return nullptr; if (failed()) return nullptr;
uint32_t* offsets = zone->NewArray<uint32_t>(field_count); uint32_t* offsets = zone->NewArray<uint32_t>(field_count);
...@@ -1939,10 +1939,10 @@ class ModuleDecoderImpl : public Decoder { ...@@ -1939,10 +1939,10 @@ class ModuleDecoderImpl : public Decoder {
} }
const ArrayType* consume_array(Zone* zone) { const ArrayType* consume_array(Zone* zone) {
ValueType field = consume_storage_type(); ValueType element_type = consume_storage_type();
if (failed()) return nullptr;
bool mutability = consume_mutability(); bool mutability = consume_mutability();
return zone->New<ArrayType>(field, mutability); if (failed()) return nullptr;
return zone->New<ArrayType>(element_type, mutability);
} }
// Consume the attribute field of an exception. // Consume the attribute field of an exception.
......
...@@ -2077,16 +2077,12 @@ void InstanceBuilder::LoadTableSegments(Handle<WasmInstanceObject> instance) { ...@@ -2077,16 +2077,12 @@ void InstanceBuilder::LoadTableSegments(Handle<WasmInstanceObject> instance) {
instance->tables().get(elem_segment.table_index)), instance->tables().get(elem_segment.table_index)),
isolate_), isolate_),
table_index, segment_index, dst, src, count); table_index, segment_index, dst, src, count);
// Set the active segments to being already dropped, since memory.init on // Set the active segments to being already dropped, since table.init on
// a dropped passive segment and an active segment have the same // a dropped passive segment and an active segment have the same behavior.
// behavior.
instance->dropped_elem_segments()[segment_index] = 1; instance->dropped_elem_segments()[segment_index] = 1;
if (!success) { if (!success) {
thrower_->RuntimeError("table initializer is out of bounds"); thrower_->RuntimeError("table initializer is out of bounds");
// Break out instead of returning; we don't want to continue to return;
// initialize any further element segments, but still need to add
// dispatch tables below.
break;
} }
} }
} }
......
...@@ -69,13 +69,9 @@ class HeapType { ...@@ -69,13 +69,9 @@ class HeapType {
kArray, // shorthand: g kArray, // shorthand: g
kAny, // shorthand: a kAny, // shorthand: a
// This value is used to represent failures in the parsing of heap types and // This value is used to represent failures in the parsing of heap types and
// does not correspond to a wasm heap type. // does not correspond to a wasm heap type. It has to be last in this list.
kBottom kBottom
}; };
// Internal use only; defined in the public section to make it easy to
// check that they are defined correctly:
static constexpr Representation kFirstSentinel = kFunc;
static constexpr Representation kLastSentinel = kAny;
static constexpr HeapType from_code(uint8_t code) { static constexpr HeapType from_code(uint8_t code) {
switch (code) { switch (code) {
...@@ -181,8 +177,14 @@ class HeapType { ...@@ -181,8 +177,14 @@ class HeapType {
private: private:
friend class ValueType; friend class ValueType;
Representation representation_;
constexpr bool is_valid() const { return representation_ <= kLastSentinel; } constexpr bool is_valid() const { return representation_ <= kLastSentinel; }
static constexpr Representation kFirstSentinel =
static_cast<Representation>(kV8MaxWasmTypes);
static constexpr Representation kLastSentinel =
static_cast<Representation>(kBottom - 1);
Representation representation_;
}; };
enum Nullability : bool { kNonNullable, kNullable }; enum Nullability : bool { kNonNullable, kNullable };
......
...@@ -661,6 +661,10 @@ bool V8_EXPORT_PRIVATE IsJSCompatibleSignature(const FunctionSig* sig, ...@@ -661,6 +661,10 @@ bool V8_EXPORT_PRIVATE IsJSCompatibleSignature(const FunctionSig* sig,
V(I64AtomicCompareExchange16U, 0xfe4d, l_ill) \ V(I64AtomicCompareExchange16U, 0xfe4d, l_ill) \
V(I64AtomicCompareExchange32U, 0xfe4e, l_ill) V(I64AtomicCompareExchange32U, 0xfe4e, l_ill)
#define FOREACH_ATOMIC_0_OPERAND_OPCODE(V) \
/* AtomicFence does not target a particular linear memory. */ \
V(AtomicFence, 0xfe03, v_v)
#define FOREACH_GC_OPCODE(V) \ #define FOREACH_GC_OPCODE(V) \
V(StructNewWithRtt, 0xfb01, _) \ V(StructNewWithRtt, 0xfb01, _) \
V(StructNewDefaultWithRtt, 0xfb02, _) \ V(StructNewDefaultWithRtt, 0xfb02, _) \
...@@ -713,10 +717,6 @@ bool V8_EXPORT_PRIVATE IsJSCompatibleSignature(const FunctionSig* sig, ...@@ -713,10 +717,6 @@ bool V8_EXPORT_PRIVATE IsJSCompatibleSignature(const FunctionSig* sig,
V(BrOnNonI31, 0xfb65, _) \ V(BrOnNonI31, 0xfb65, _) \
V(BrOnNonArray, 0xfb67, _) /* not standardized - V8 experimental */ V(BrOnNonArray, 0xfb67, _) /* not standardized - V8 experimental */
#define FOREACH_ATOMIC_0_OPERAND_OPCODE(V) \
/* AtomicFence does not target a particular linear memory. */ \
V(AtomicFence, 0xfe03, v_v)
// All opcodes. // All opcodes.
#define FOREACH_OPCODE(V) \ #define FOREACH_OPCODE(V) \
FOREACH_CONTROL_OPCODE(V) \ FOREACH_CONTROL_OPCODE(V) \
......
...@@ -240,67 +240,82 @@ ValueType optref(uint32_t type_index) { ...@@ -240,67 +240,82 @@ ValueType optref(uint32_t type_index) {
WASM_COMPILED_EXEC_TEST(WasmBasicStruct) { WASM_COMPILED_EXEC_TEST(WasmBasicStruct) {
WasmGCTester tester(execution_tier); WasmGCTester tester(execution_tier);
const byte type_index = const byte kStructIndex =
tester.DefineStruct({F(kWasmI32, true), F(kWasmI32, true)}); tester.DefineStruct({F(kWasmI32, true), F(kWasmI32, true)});
const byte empty_struct_index = tester.DefineStruct({}); const byte kEmptyStructIndex = tester.DefineStruct({});
ValueType kRefType = ref(type_index); const byte kComplexStructIndex = tester.DefineStruct(
ValueType kEmptyStructType = ref(empty_struct_index); {F(kWasmI32, false), F(optref(kStructIndex), false), F(kWasmS128, false),
ValueType kOptRefType = optref(type_index); F(ValueType::Rtt(kStructIndex), false)});
FunctionSig sig_q_v(1, 0, &kRefType); auto sig_n_v = FixedSizeSignature<ValueType>::Returns(optref(kStructIndex));
FunctionSig sig_qe_v(1, 0, &kEmptyStructType); auto sig_r_v = FixedSizeSignature<ValueType>::Returns(ref(kEmptyStructIndex));
auto sig_r_v_2 =
FixedSizeSignature<ValueType>::Returns(ref(kComplexStructIndex));
// Test struct.new and struct.get. // Test struct.new and struct.get.
const byte kGet1 = tester.DefineFunction( const byte kGet1 = tester.DefineFunction(
tester.sigs.i_v(), {}, tester.sigs.i_v(), {},
{WASM_STRUCT_GET( {WASM_STRUCT_GET(
type_index, 0, kStructIndex, 0,
WASM_STRUCT_NEW_WITH_RTT(type_index, WASM_I32V(42), WASM_I32V(64), WASM_STRUCT_NEW_WITH_RTT(kStructIndex, WASM_I32V(42), WASM_I32V(64),
WASM_RTT_CANON(type_index))), WASM_RTT_CANON(kStructIndex))),
kExprEnd}); kExprEnd});
// Test struct.new and struct.get. // Test struct.new and struct.get.
const byte kGet2 = tester.DefineFunction( const byte kGet2 = tester.DefineFunction(
tester.sigs.i_v(), {}, tester.sigs.i_v(), {},
{WASM_STRUCT_GET( {WASM_STRUCT_GET(
type_index, 1, kStructIndex, 1,
WASM_STRUCT_NEW_WITH_RTT(type_index, WASM_I32V(42), WASM_I32V(64), WASM_STRUCT_NEW_WITH_RTT(kStructIndex, WASM_I32V(42), WASM_I32V(64),
WASM_RTT_CANON(type_index))), WASM_RTT_CANON(kStructIndex))),
kExprEnd}); kExprEnd});
// Test struct.new, returning struct reference. // Test struct.new, returning struct reference.
const byte kGetStruct = tester.DefineFunction( const byte kGetStruct = tester.DefineFunction(
&sig_q_v, {}, &sig_n_v, {},
{WASM_STRUCT_NEW_WITH_RTT(type_index, WASM_I32V(42), WASM_I32V(64), {WASM_STRUCT_NEW_WITH_RTT(kStructIndex, WASM_I32V(42), WASM_I32V(64),
WASM_RTT_CANON(type_index)), WASM_RTT_CANON(kStructIndex)),
kExprEnd}); kExprEnd});
const byte kGetStructNominal = tester.DefineFunction( const byte kGetStructNominal = tester.DefineFunction(
&sig_q_v, {}, &sig_n_v, {},
{WASM_STRUCT_NEW_DEFAULT(type_index), WASM_DROP, {WASM_STRUCT_NEW_DEFAULT(kStructIndex), WASM_DROP,
WASM_STRUCT_NEW(type_index, WASM_I32V(42), WASM_I32V(64)), kExprEnd}); WASM_STRUCT_NEW(kStructIndex, WASM_I32V(42), WASM_I32V(64)), kExprEnd});
// Test struct.new, returning reference to an empty struct. // Test struct.new, returning reference to an empty struct.
const byte kGetEmptyStruct = tester.DefineFunction( const byte kGetEmptyStruct = tester.DefineFunction(
&sig_qe_v, {}, &sig_r_v, {},
{WASM_STRUCT_NEW_WITH_RTT(empty_struct_index, {WASM_STRUCT_NEW_WITH_RTT(kEmptyStructIndex,
WASM_RTT_CANON(empty_struct_index)), WASM_RTT_CANON(kEmptyStructIndex)),
kExprEnd}); kExprEnd});
// Test struct.set, struct refs types in locals. // Test struct.set, struct refs types in locals.
const byte j_local_index = 0; const byte j_local_index = 0;
const byte j_field_index = 0; const byte j_field_index = 0;
const byte kSet = tester.DefineFunction( const byte kSet = tester.DefineFunction(
tester.sigs.i_v(), {kOptRefType}, tester.sigs.i_v(), {optref(kStructIndex)},
{WASM_LOCAL_SET( {WASM_LOCAL_SET(
j_local_index, j_local_index,
WASM_STRUCT_NEW_WITH_RTT(type_index, WASM_I32V(42), WASM_I32V(64), WASM_STRUCT_NEW_WITH_RTT(kStructIndex, WASM_I32V(42), WASM_I32V(64),
WASM_RTT_CANON(type_index))), WASM_RTT_CANON(kStructIndex))),
WASM_STRUCT_SET(type_index, j_field_index, WASM_LOCAL_GET(j_local_index), WASM_STRUCT_SET(kStructIndex, j_field_index,
WASM_I32V(-99)), WASM_LOCAL_GET(j_local_index), WASM_I32V(-99)),
WASM_STRUCT_GET(type_index, j_field_index, WASM_STRUCT_GET(kStructIndex, j_field_index,
WASM_LOCAL_GET(j_local_index)), WASM_LOCAL_GET(j_local_index)),
kExprEnd}); kExprEnd});
const byte kSimdConstant[16] = {0, 1, 2, 3, 4, 5, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15};
const byte kComplexStructProducer = tester.DefineFunction(
&sig_r_v_2, {},
{WASM_STRUCT_NEW_WITH_RTT(kComplexStructIndex, WASM_I32V(42),
WASM_STRUCT_NEW_DEFAULT_WITH_RTT(
kStructIndex, WASM_RTT_CANON(kStructIndex)),
WASM_SIMD_CONSTANT(kSimdConstant),
WASM_RTT_CANON(kStructIndex),
WASM_RTT_CANON(kComplexStructIndex)),
kExprEnd});
tester.CompileModule(); tester.CompileModule();
tester.CheckResult(kGet1, 42); tester.CheckResult(kGet1, 42);
...@@ -313,6 +328,9 @@ WASM_COMPILED_EXEC_TEST(WasmBasicStruct) { ...@@ -313,6 +328,9 @@ WASM_COMPILED_EXEC_TEST(WasmBasicStruct) {
.ToHandleChecked() .ToHandleChecked()
->IsWasmStruct()); ->IsWasmStruct());
tester.CheckResult(kSet, -99); tester.CheckResult(kSet, -99);
CHECK(tester.GetResultObject(kComplexStructProducer)
.ToHandleChecked()
->IsWasmStruct());
} }
// Test struct.get, ref.as_non_null and ref-typed globals. // Test struct.get, ref.as_non_null and ref-typed globals.
......
...@@ -1440,7 +1440,7 @@ class WasmInterpreterInternals { ...@@ -1440,7 +1440,7 @@ class WasmInterpreterInternals {
val = WasmValue(isolate_->factory()->null_value(), p); val = WasmValue(isolate_->factory()->null_value(), p);
break; break;
} }
case kRef: // TODO(7748): Implement. case kRef:
case kRtt: case kRtt:
case kRttWithDepth: case kRttWithDepth:
case kVoid: case kVoid:
...@@ -3164,29 +3164,11 @@ class WasmInterpreterInternals { ...@@ -3164,29 +3164,11 @@ class WasmInterpreterInternals {
break; break;
} }
case kRef: case kRef:
case kOptRef: { case kOptRef:
switch (sig->GetParam(i).heap_representation()) { case kRtt:
case HeapType::kExtern:
case HeapType::kFunc:
case HeapType::kEq:
case HeapType::kData:
case HeapType::kArray:
case HeapType::kI31:
case HeapType::kAny: {
Handle<Object> ref = value.to_ref();
encoded_values->set(encoded_index++, *ref);
break;
}
case HeapType::kBottom:
UNREACHABLE();
default:
// TODO(7748): Implement these.
UNIMPLEMENTED();
}
break;
}
case kRtt: // TODO(7748): Implement.
case kRttWithDepth: case kRttWithDepth:
encoded_values->set(encoded_index++, *value.to_ref());
break;
case kI8: case kI8:
case kI16: case kI16:
case kVoid: case kVoid:
...@@ -3270,28 +3252,13 @@ class WasmInterpreterInternals { ...@@ -3270,28 +3252,13 @@ class WasmInterpreterInternals {
break; break;
} }
case kRef: case kRef:
case kOptRef: { case kOptRef:
switch (sig->GetParam(i).heap_representation()) { case kRtt:
case HeapType::kExtern: case kRttWithDepth: {
case HeapType::kFunc: Handle<Object> ref(encoded_values->get(encoded_index++), isolate_);
case HeapType::kEq: value = WasmValue(ref, sig->GetParam(i));
case HeapType::kData:
case HeapType::kArray:
case HeapType::kI31:
case HeapType::kAny: {
Handle<Object> ref(encoded_values->get(encoded_index++),
isolate_);
value = WasmValue(ref, sig->GetParam(i));
break;
}
default:
// TODO(7748): Implement these.
UNIMPLEMENTED();
}
break; break;
} }
case kRtt: // TODO(7748): Implement.
case kRttWithDepth:
case kI8: case kI8:
case kI16: case kI16:
case kVoid: case kVoid:
...@@ -3662,7 +3629,9 @@ class WasmInterpreterInternals { ...@@ -3662,7 +3629,9 @@ class WasmInterpreterInternals {
FOREACH_WASMVALUE_CTYPES(CASE_TYPE) FOREACH_WASMVALUE_CTYPES(CASE_TYPE)
#undef CASE_TYPE #undef CASE_TYPE
case kRef: case kRef:
case kOptRef: { case kOptRef:
case kRtt:
case kRttWithDepth: {
// TODO(7748): Type checks or DCHECKs for ref types? // TODO(7748): Type checks or DCHECKs for ref types?
HandleScope handle_scope(isolate_); // Avoid leaking handles. HandleScope handle_scope(isolate_); // Avoid leaking handles.
Handle<FixedArray> global_buffer; // The buffer of the global. Handle<FixedArray> global_buffer; // The buffer of the global.
...@@ -3674,8 +3643,6 @@ class WasmInterpreterInternals { ...@@ -3674,8 +3643,6 @@ class WasmInterpreterInternals {
global_buffer->set(global_index, *ref); global_buffer->set(global_index, *ref);
break; break;
} }
case kRtt: // TODO(7748): Implement.
case kRttWithDepth:
case kI8: case kI8:
case kI16: case kI16:
case kVoid: case kVoid:
...@@ -4073,25 +4040,18 @@ class WasmInterpreterInternals { ...@@ -4073,25 +4040,18 @@ class WasmInterpreterInternals {
case kVoid: case kVoid:
PrintF("void"); PrintF("void");
break; break;
case kRef: case kOptRef:
case kOptRef: { if (val.to_ref()->IsNull()) {
if (val.type().is_reference_to(HeapType::kExtern)) { PrintF("ref:null");
Handle<Object> ref = val.to_ref(); break;
if (ref->IsNull()) {
PrintF("ref:null");
} else {
PrintF("ref:0x%" V8PRIxPTR, ref->ptr());
}
} else {
// TODO(7748): Implement this properly.
PrintF("ref/ref null");
} }
V8_FALLTHROUGH;
case kRef:
PrintF("ref:0x%" V8PRIxPTR, val.to_ref()->ptr());
break; break;
}
case kRtt: case kRtt:
case kRttWithDepth: case kRttWithDepth:
// TODO(7748): Implement properly. PrintF("rtt:0x%" V8PRIxPTR, val.to_ref()->ptr());
PrintF("rtt");
break; break;
case kI8: case kI8:
case kI16: case kI16:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment