Commit b77deeca authored by Manos Koukoutos's avatar Manos Koukoutos Committed by Commit Bot

[wasm-gc] Remove abstract rtts

In the latest wasm-gc spec, rtts of abstract types are no longer
allowed. Consequently, canonical rtts of concrete types always have
a depth of 0.

Changes:
- Change the immediate argument of rtts to a type index over a heap
  type. Abstract it with TypeIndexImmediate in function body decoding.
  This affects:
  value_type.h, read_value_type(), decoding of relevant opcodes,
  wasm subtyping, WasmInitExpr, consume_init_expr(), and
  wasm-module-builder.cc.
- In function-body-decoder-impl.h, update rtt.canon to always produce
  an rtt of depth 0.
- Pass a unit32_t type index over a HeapType to all rtt-related
  utilities.
- Remove infrastructure for abstract-type rtts from the wasm compilers,
  setup-heap-internal.cc, roots.h, and module-instantiate.cc.
- Remove ObjectReferenceKnowledge::rtt_is_i31. Remove related branches
  from ref.test, ref.cast and br_on_cast implementations in the wasm
  compilers.
- Remove unused 'parent' field from WasmTypeInfo.
- Make the parent argument optional in NewWasmTypeInfo, CreateStructMap,
  and CreateArrayMap.
- Use more convenient arguments in IsHeapSubtypeOf.
- Update tests.

Bug: v8:7748
Change-Id: Ib45efe0741e6558c9b291fc8b4a75ae303146bdc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2642248
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/master@{#72321}
parent 4adf55a0
......@@ -5719,42 +5719,17 @@ Node* WasmGraphBuilder::ArrayNewWithRtt(uint32_t array_index,
return a;
}
Node* WasmGraphBuilder::RttCanon(wasm::HeapType type) {
RootIndex index;
switch (type.representation()) {
case wasm::HeapType::kEq:
index = RootIndex::kWasmRttEqrefMap;
break;
case wasm::HeapType::kExtern:
index = RootIndex::kWasmRttExternrefMap;
break;
case wasm::HeapType::kFunc:
index = RootIndex::kWasmRttFuncrefMap;
break;
case wasm::HeapType::kI31:
index = RootIndex::kWasmRttI31refMap;
break;
case wasm::HeapType::kAny:
index = RootIndex::kWasmRttAnyrefMap;
break;
case wasm::HeapType::kBottom:
UNREACHABLE();
default: {
// User-defined type.
Node* maps_list =
LOAD_INSTANCE_FIELD(ManagedObjectMaps, MachineType::TaggedPointer());
return LOAD_FIXED_ARRAY_SLOT_PTR(maps_list, type.ref_index());
}
}
return LOAD_FULL_POINTER(BuildLoadIsolateRoot(),
IsolateData::root_slot_offset(index));
Node* WasmGraphBuilder::RttCanon(uint32_t type_index) {
Node* maps_list =
LOAD_INSTANCE_FIELD(ManagedObjectMaps, MachineType::TaggedPointer());
return LOAD_FIXED_ARRAY_SLOT_PTR(maps_list, type_index);
}
Node* WasmGraphBuilder::RttSub(wasm::HeapType type, Node* parent_rtt) {
return CALL_BUILTIN(WasmAllocateRtt,
graph()->NewNode(mcgraph()->common()->Int32Constant(
type.representation())),
parent_rtt);
Node* WasmGraphBuilder::RttSub(uint32_t type_index, Node* parent_rtt) {
return CALL_BUILTIN(
WasmAllocateRtt,
graph()->NewNode(mcgraph()->common()->Int32Constant(type_index)),
parent_rtt);
}
void AssertFalse(MachineGraph* mcgraph, GraphAssembler* gasm, Node* condition) {
......@@ -5773,9 +5748,6 @@ Node* WasmGraphBuilder::RefTest(Node* object, Node* rtt,
ObjectReferenceKnowledge config) {
auto done = gasm_->MakeLabel(MachineRepresentation::kWord32);
if (config.object_can_be_i31) {
if (config.rtt_is_i31) {
return gasm_->IsI31(object);
}
gasm_->GotoIf(gasm_->IsI31(object), &done, gasm_->Int32Constant(0));
} else {
AssertFalse(mcgraph(), gasm_.get(), gasm_->IsI31(object));
......@@ -5810,12 +5782,7 @@ Node* WasmGraphBuilder::RefCast(Node* object, Node* rtt,
ObjectReferenceKnowledge config,
wasm::WasmCodePosition position) {
if (config.object_can_be_i31) {
if (config.rtt_is_i31) {
TrapIfFalse(wasm::kTrapIllegalCast, gasm_->IsI31(object), position);
return object;
} else {
TrapIfTrue(wasm::kTrapIllegalCast, gasm_->IsI31(object), position);
}
TrapIfTrue(wasm::kTrapIllegalCast, gasm_->IsI31(object), position);
} else {
AssertFalse(mcgraph(), gasm_.get(), gasm_->IsI31(object));
}
......@@ -5861,17 +5828,12 @@ Node* WasmGraphBuilder::BrOnCast(Node* object, Node* rtt,
Node* is_i31 = gasm_->IsI31(object);
if (config.object_can_be_i31) {
if (config.rtt_is_i31) {
BranchExpectFalse(is_i31, match_control, no_match_control);
return nullptr;
} else {
Node* i31_branch = graph()->NewNode(
mcgraph()->common()->Branch(BranchHint::kFalse), is_i31, control());
SetControl(graph()->NewNode(mcgraph()->common()->IfFalse(), i31_branch));
no_match_controls.emplace_back(
graph()->NewNode(mcgraph()->common()->IfTrue(), i31_branch));
no_match_effects.emplace_back(effect());
}
Node* i31_branch = graph()->NewNode(
mcgraph()->common()->Branch(BranchHint::kFalse), is_i31, control());
SetControl(graph()->NewNode(mcgraph()->common()->IfFalse(), i31_branch));
no_match_controls.emplace_back(
graph()->NewNode(mcgraph()->common()->IfTrue(), i31_branch));
no_match_effects.emplace_back(effect());
} else {
AssertFalse(mcgraph(), gasm_.get(), is_i31);
}
......
......@@ -166,7 +166,6 @@ class WasmGraphBuilder {
bool object_can_be_null;
bool object_must_be_data_ref;
bool object_can_be_i31;
bool rtt_is_i31;
uint8_t rtt_depth;
};
enum EnforceBoundsCheck : bool { // --
......@@ -434,8 +433,8 @@ class WasmGraphBuilder {
Node* I31New(Node* input);
Node* I31GetS(Node* input);
Node* I31GetU(Node* input);
Node* RttCanon(wasm::HeapType type);
Node* RttSub(wasm::HeapType type, Node* parent_rtt);
Node* RttCanon(uint32_t type_index);
Node* RttSub(uint32_t type_index, Node* parent_rtt);
Node* RefTest(Node* object, Node* rtt, ObjectReferenceKnowledge config);
Node* RefCast(Node* object, Node* rtt, ObjectReferenceKnowledge config,
wasm::WasmCodePosition position);
......
......@@ -1716,7 +1716,6 @@ void AsmWasmData::AsmWasmDataPrint(std::ostream& os) { // NOLINT
void WasmTypeInfo::WasmTypeInfoPrint(std::ostream& os) { // NOLINT
PrintHeader(os, "WasmTypeInfo");
os << "\n - type address: " << reinterpret_cast<void*>(foreign_address());
os << "\n - parent: " << Brief(parent());
os << "\n";
}
......
......@@ -1308,16 +1308,15 @@ Handle<Foreign> Factory::NewForeign(Address addr) {
}
Handle<WasmTypeInfo> Factory::NewWasmTypeInfo(Address type_address,
Handle<Map> parent) {
Handle<Map> opt_parent) {
Handle<ArrayList> subtypes = ArrayList::New(isolate(), 0);
Handle<FixedArray> supertypes;
if (parent->IsWasmStructMap() || parent->IsWasmArrayMap()) {
supertypes = CopyFixedArrayAndGrow(
handle(parent->wasm_type_info().supertypes(), isolate()), 1);
supertypes->set(supertypes->length() - 1, *parent);
if (opt_parent.is_null()) {
supertypes = NewUninitializedFixedArray(0);
} else {
supertypes = NewUninitializedFixedArray(1);
supertypes->set(0, *parent);
supertypes = CopyFixedArrayAndGrow(
handle(opt_parent->wasm_type_info().supertypes(), isolate()), 1);
supertypes->set(supertypes->length() - 1, *opt_parent);
}
Map map = *wasm_type_info_map();
HeapObject result = AllocateRawWithImmortalMap(map.instance_size(),
......@@ -1325,7 +1324,6 @@ Handle<WasmTypeInfo> Factory::NewWasmTypeInfo(Address type_address,
Handle<WasmTypeInfo> info(WasmTypeInfo::cast(result), isolate());
info->AllocateExternalPointerEntries(isolate());
info->set_foreign_address(isolate(), type_address);
info->set_parent(*parent);
info->set_supertypes(*supertypes);
info->set_subtypes(*subtypes);
return info;
......
......@@ -548,7 +548,7 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> {
Handle<JSModuleNamespace> NewJSModuleNamespace();
Handle<WasmTypeInfo> NewWasmTypeInfo(Address type_address,
Handle<Map> parent);
Handle<Map> opt_parent);
Handle<SourceTextModule> NewSourceTextModule(Handle<SharedFunctionInfo> code);
Handle<SyntheticModule> NewSyntheticModule(
......
......@@ -504,14 +504,6 @@ bool Heap::CreateInitialMaps() {
ALLOCATE_MAP(CODE_DATA_CONTAINER_TYPE, CodeDataContainer::kSize,
code_data_container)
// The wasm_rttcanon_* maps are never used for real objects, only as
// sentinels. They are maps so that they fit in with their subtype maps
// (which are real maps).
ALLOCATE_MAP(WASM_STRUCT_TYPE, 0, wasm_rttcanon_eqref)
ALLOCATE_MAP(WASM_STRUCT_TYPE, 0, wasm_rttcanon_externref)
ALLOCATE_MAP(WASM_STRUCT_TYPE, 0, wasm_rttcanon_funcref)
ALLOCATE_MAP(WASM_STRUCT_TYPE, 0, wasm_rttcanon_i31ref)
ALLOCATE_MAP(WASM_STRUCT_TYPE, 0, wasm_rttcanon_anyref)
ALLOCATE_MAP(WASM_TYPE_INFO_TYPE, WasmTypeInfo::kSize, wasm_type_info)
ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
......@@ -621,74 +613,6 @@ bool Heap::CreateInitialMaps() {
set_empty_closure_feedback_cell_array(ClosureFeedbackCellArray::cast(obj));
}
// Set up the WasmTypeInfo objects for built-in generic Wasm RTTs.
// anyref:
{
/* Subtypes. We do not cache subtypes for (rtt.canon any). */
int slot_count = ArrayList::kHeaderFields;
if (!AllocateRaw(ArrayList::SizeFor(slot_count), AllocationType::kOld)
.To(&obj)) {
return false;
}
obj.set_map_after_allocation(roots.array_list_map());
ArrayList subtypes = ArrayList::cast(obj);
subtypes.set_length(slot_count);
subtypes.SetLength(0);
/* TypeInfo */
if (!AllocateRaw(WasmTypeInfo::kSize, AllocationType::kOld).To(&obj)) {
return false;
}
obj.set_map_after_allocation(roots.wasm_type_info_map(),
SKIP_WRITE_BARRIER);
WasmTypeInfo type_info = WasmTypeInfo::cast(obj);
type_info.set_subtypes(subtypes);
type_info.set_supertypes(roots.empty_fixed_array());
type_info.set_parent(roots.null_map());
type_info.clear_foreign_address(isolate());
wasm_rttcanon_anyref_map().set_wasm_type_info(type_info);
}
// Rest of builtin types:
#define ALLOCATE_TYPE_INFO(which) \
{ \
/* Subtypes */ \
int slot_count = ArrayList::kHeaderFields; \
if (!AllocateRaw(ArrayList::SizeFor(slot_count), AllocationType::kOld) \
.To(&obj)) { \
return false; \
} \
obj.set_map_after_allocation(roots.array_list_map()); \
ArrayList subtypes = ArrayList::cast(obj); \
subtypes.set_length(slot_count); \
subtypes.SetLength(0); \
/* Supertypes */ \
if (!AllocateRaw(FixedArray::SizeFor(1), AllocationType::kOld).To(&obj)) { \
return false; \
} \
obj.set_map_after_allocation(roots.fixed_array_map(), SKIP_WRITE_BARRIER); \
FixedArray supertypes = FixedArray::cast(obj); \
supertypes.set_length(1); \
supertypes.set(0, wasm_rttcanon_anyref_map()); \
/* TypeInfo */ \
if (!AllocateRaw(WasmTypeInfo::kSize, AllocationType::kOld).To(&obj)) { \
return false; \
} \
obj.set_map_after_allocation(roots.wasm_type_info_map(), \
SKIP_WRITE_BARRIER); \
WasmTypeInfo type_info = WasmTypeInfo::cast(obj); \
type_info.set_subtypes(subtypes); \
type_info.set_supertypes(supertypes); \
type_info.set_parent(wasm_rttcanon_anyref_map()); \
type_info.clear_foreign_address(isolate()); \
wasm_rttcanon_##which##_map().set_wasm_type_info(type_info); \
}
ALLOCATE_TYPE_INFO(eqref)
ALLOCATE_TYPE_INFO(externref)
ALLOCATE_TYPE_INFO(funcref)
ALLOCATE_TYPE_INFO(i31ref)
#undef ALLOCATE_TYPE_INFO
DCHECK(!InYoungGeneration(roots.empty_fixed_array()));
roots.bigint_map().SetConstructorFunctionIndex(
......
......@@ -563,7 +563,6 @@ class WasmTypeInfo::BodyDescriptor final : public BodyDescriptorBase {
ObjectVisitor* v) {
Foreign::BodyDescriptor::IterateBody<ObjectVisitor>(map, obj, object_size,
v);
IteratePointer(obj, kParentOffset, v);
IteratePointer(obj, kSupertypesOffset, v);
IteratePointer(obj, kSubtypesOffset, v);
}
......
......@@ -200,11 +200,6 @@ class Symbol;
/* Maps */ \
V(Map, external_map, ExternalMap) \
V(Map, message_object_map, JSMessageObjectMap) \
V(Map, wasm_rttcanon_eqref_map, WasmRttEqrefMap) \
V(Map, wasm_rttcanon_externref_map, WasmRttExternrefMap) \
V(Map, wasm_rttcanon_funcref_map, WasmRttFuncrefMap) \
V(Map, wasm_rttcanon_i31ref_map, WasmRttI31refMap) \
V(Map, wasm_rttcanon_anyref_map, WasmRttAnyrefMap) \
/* Canonical empty values */ \
V(Script, empty_script, EmptyScript) \
V(FeedbackCell, many_closures_cell, ManyClosuresCell) \
......
......@@ -4359,50 +4359,20 @@ class LiftoffCompiler {
__ PushRegister(kWasmI32, dst);
}
void RttCanon(FullDecoder* decoder, const HeapTypeImmediate<validate>& imm,
Value* result) {
void RttCanon(FullDecoder* decoder, uint32_t type_index, Value* result) {
LiftoffRegister rtt = __ GetUnusedRegister(kGpReg, {});
RootIndex index;
switch (imm.type.representation()) {
case wasm::HeapType::kEq:
index = RootIndex::kWasmRttEqrefMap;
break;
case wasm::HeapType::kExtern:
index = RootIndex::kWasmRttExternrefMap;
break;
case wasm::HeapType::kFunc:
index = RootIndex::kWasmRttFuncrefMap;
break;
case wasm::HeapType::kI31:
index = RootIndex::kWasmRttI31refMap;
break;
case wasm::HeapType::kAny:
index = RootIndex::kWasmRttAnyrefMap;
break;
case wasm::HeapType::kBottom:
UNREACHABLE();
default:
// User-defined type.
LOAD_TAGGED_PTR_INSTANCE_FIELD(rtt.gp(), ManagedObjectMaps);
__ LoadTaggedPointer(
rtt.gp(), rtt.gp(), no_reg,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(
imm.type.ref_index()),
{});
__ PushRegister(ValueType::Rtt(imm.type, 1), rtt);
return;
}
LOAD_INSTANCE_FIELD(rtt.gp(), IsolateRoot, kSystemPointerSize);
__ LoadTaggedPointer(rtt.gp(), rtt.gp(), no_reg,
IsolateData::root_slot_offset(index), {});
__ PushRegister(ValueType::Rtt(imm.type, 1), rtt);
LOAD_TAGGED_PTR_INSTANCE_FIELD(rtt.gp(), ManagedObjectMaps);
__ LoadTaggedPointer(
rtt.gp(), rtt.gp(), no_reg,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(type_index), {});
__ PushRegister(ValueType::Rtt(type_index, 1), rtt);
}
void RttSub(FullDecoder* decoder, const HeapTypeImmediate<validate>& imm,
const Value& parent, Value* result) {
void RttSub(FullDecoder* decoder, uint32_t type_index, const Value& parent,
Value* result) {
ValueType parent_value_type = parent.type;
ValueType rtt_value_type =
ValueType::Rtt(imm.type, parent_value_type.depth() + 1);
ValueType::Rtt(type_index, parent_value_type.depth() + 1);
WasmCode::RuntimeStubId target = WasmCode::kWasmAllocateRtt;
compiler::CallDescriptor* call_descriptor =
GetBuiltinCallDescriptor<WasmAllocateRttDescriptor>(compilation_zone_);
......@@ -4411,7 +4381,7 @@ class LiftoffCompiler {
LiftoffAssembler::VarState parent_var =
__ cache_state()->stack_state.end()[-1];
LiftoffRegister type_reg = __ GetUnusedRegister(kGpReg, {});
__ LoadConstant(type_reg, WasmValue(imm.type.representation()));
__ LoadConstant(type_reg, WasmValue(type_index));
LiftoffAssembler::VarState type_var(kWasmI32, type_reg, 0);
__ PrepareBuiltinCall(&sig, call_descriptor, {type_var, parent_var});
__ CallRuntimeStub(target);
......@@ -4432,66 +4402,57 @@ class LiftoffCompiler {
LiftoffRegister obj_reg = pinned.set(__ PopToRegister(pinned));
bool obj_can_be_i31 = IsSubtypeOf(kWasmI31Ref, obj.type, decoder->module_);
bool rtt_is_i31 = rtt.type.heap_representation() == HeapType::kI31;
bool i31_check_only = obj_can_be_i31 && rtt_is_i31;
if (i31_check_only) {
__ emit_smi_check(obj_reg.gp(), no_match,
LiftoffAssembler::kJumpOnNotSmi);
// Emit no further code, just fall through to {match}.
} else {
// Reserve all temporary registers up front, so that the cache state
// tracking doesn't get confused by the following conditional jumps.
LiftoffRegister tmp1 =
opt_scratch != no_reg
? LiftoffRegister(opt_scratch)
: pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LiftoffRegister tmp2 = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
if (obj_can_be_i31) {
DCHECK(!rtt_is_i31);
__ emit_smi_check(obj_reg.gp(), no_match, LiftoffAssembler::kJumpOnSmi);
}
if (obj.type.is_nullable()) {
LoadNullValue(tmp1.gp(), pinned);
__ emit_cond_jump(kEqual, no_match, obj.type, obj_reg.gp(), tmp1.gp());
}
// At this point, the object is neither null nor an i31ref. Perform
// a regular type check. Check for exact match first.
__ LoadMap(tmp1.gp(), obj_reg.gp());
// {tmp1} now holds the object's map.
__ emit_cond_jump(kEqual, &match, rtt.type, tmp1.gp(), rtt_reg.gp());
// If the object isn't guaranteed to be an array or struct, check that.
// Subsequent code wouldn't handle e.g. funcrefs.
if (!is_data_ref_type(obj.type, decoder->module_)) {
EmitDataRefCheck(tmp1.gp(), no_match, tmp2, pinned);
}
// Reserve all temporary registers up front, so that the cache state
// tracking doesn't get confused by the following conditional jumps.
LiftoffRegister tmp1 =
opt_scratch != no_reg
? LiftoffRegister(opt_scratch)
: pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LiftoffRegister tmp2 = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
if (obj_can_be_i31) {
__ emit_smi_check(obj_reg.gp(), no_match, LiftoffAssembler::kJumpOnSmi);
}
if (obj.type.is_nullable()) {
LoadNullValue(tmp1.gp(), pinned);
__ emit_cond_jump(kEqual, no_match, obj.type, obj_reg.gp(), tmp1.gp());
}
// At this point, the object is neither null nor an i31ref. Perform
// a regular type check. Check for exact match first.
__ LoadMap(tmp1.gp(), obj_reg.gp());
// {tmp1} now holds the object's map.
__ emit_cond_jump(kEqual, &match, rtt.type, tmp1.gp(), rtt_reg.gp());
// If the object isn't guaranteed to be an array or struct, check that.
// Subsequent code wouldn't handle e.g. funcrefs.
if (!is_data_ref_type(obj.type, decoder->module_)) {
EmitDataRefCheck(tmp1.gp(), no_match, tmp2, pinned);
}
// Constant-time subtyping check: load exactly one candidate RTT from the
// supertypes list.
// Step 1: load the WasmTypeInfo into {tmp1}.
constexpr int kTypeInfoOffset = wasm::ObjectAccess::ToTagged(
Map::kConstructorOrBackPointerOrNativeContextOffset);
__ LoadTaggedPointer(tmp1.gp(), tmp1.gp(), no_reg, kTypeInfoOffset, pinned);
// Step 2: load the super types list into {tmp1}.
constexpr int kSuperTypesOffset =
wasm::ObjectAccess::ToTagged(WasmTypeInfo::kSupertypesOffset);
__ LoadTaggedPointer(tmp1.gp(), tmp1.gp(), no_reg, kSuperTypesOffset,
pinned);
// Step 3: check the list's length.
LiftoffRegister list_length = tmp2;
__ LoadFixedArrayLengthAsInt32(list_length, tmp1.gp(), pinned);
__ emit_i32_cond_jumpi(kUnsignedLessEqual, no_match, list_length.gp(),
rtt.type.depth());
// Step 4: load the candidate list slot into {tmp1}, and compare it.
__ LoadTaggedPointer(
tmp1.gp(), tmp1.gp(), no_reg,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(rtt.type.depth()),
pinned);
__ emit_cond_jump(kUnequal, no_match, rtt.type, tmp1.gp(), rtt_reg.gp());
// Fall through to {match}.
// Constant-time subtyping check: load exactly one candidate RTT from the
// supertypes list.
// Step 1: load the WasmTypeInfo into {tmp1}.
constexpr int kTypeInfoOffset = wasm::ObjectAccess::ToTagged(
Map::kConstructorOrBackPointerOrNativeContextOffset);
__ LoadTaggedPointer(tmp1.gp(), tmp1.gp(), no_reg, kTypeInfoOffset,
pinned);
// Step 2: load the super types list into {tmp1}.
constexpr int kSuperTypesOffset =
wasm::ObjectAccess::ToTagged(WasmTypeInfo::kSupertypesOffset);
__ LoadTaggedPointer(tmp1.gp(), tmp1.gp(), no_reg, kSuperTypesOffset,
pinned);
// Step 3: check the list's length.
LiftoffRegister list_length = tmp2;
__ LoadFixedArrayLengthAsInt32(list_length, tmp1.gp(), pinned);
__ emit_i32_cond_jumpi(kUnsignedLessEqual, no_match, list_length.gp(),
rtt.type.depth());
// Step 4: load the candidate list slot into {tmp1}, and compare it.
__ LoadTaggedPointer(
tmp1.gp(), tmp1.gp(), no_reg,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(rtt.type.depth()),
pinned);
__ emit_cond_jump(kUnequal, no_match, rtt.type, tmp1.gp(), rtt_reg.gp());
// Fall through to {match}.
}
__ bind(&match);
return obj_reg;
}
......@@ -4519,7 +4480,7 @@ class LiftoffCompiler {
Label* trap_label = AddOutOfLineTrap(decoder->position(),
WasmCode::kThrowWasmTrapIllegalCast);
LiftoffRegister obj_reg = SubtypeCheck(decoder, obj, rtt, trap_label);
__ PushRegister(ValueType::Ref(rtt.type.heap_type(), kNonNullable),
__ PushRegister(ValueType::Ref(rtt.type.ref_index(), kNonNullable),
obj_reg);
}
......@@ -4537,7 +4498,7 @@ class LiftoffCompiler {
__ PushRegister(rtt.type.is_bottom()
? kWasmBottom
: ValueType::Ref(rtt.type.heap_type(), kNonNullable),
: ValueType::Ref(rtt.type.ref_index(), kNonNullable),
obj_reg);
BrOrRet(decoder, depth);
......
This diff is collapsed.
......@@ -971,14 +971,13 @@ class WasmGraphBuildingInterface {
result->node = BUILD(I31GetU, input.node);
}
void RttCanon(FullDecoder* decoder, const HeapTypeImmediate<validate>& imm,
Value* result) {
result->node = BUILD(RttCanon, imm.type);
void RttCanon(FullDecoder* decoder, uint32_t type_index, Value* result) {
result->node = BUILD(RttCanon, type_index);
}
void RttSub(FullDecoder* decoder, const HeapTypeImmediate<validate>& imm,
const Value& parent, Value* result) {
result->node = BUILD(RttSub, imm.type, parent.node);
void RttSub(FullDecoder* decoder, uint32_t type_index, const Value& parent,
Value* result) {
result->node = BUILD(RttSub, type_index, parent.node);
}
using StaticKnowledge = compiler::WasmGraphBuilder::ObjectReferenceKnowledge;
......@@ -991,7 +990,6 @@ class WasmGraphBuildingInterface {
DCHECK(object_type.is_object_reference_type()); // Checked by validation.
result.object_must_be_data_ref = is_data_ref_type(object_type, module);
result.object_can_be_i31 = IsSubtypeOf(kWasmI31Ref, object_type, module);
result.rtt_is_i31 = rtt_type.heap_representation() == HeapType::kI31;
result.rtt_depth = rtt_type.depth();
return result;
}
......
......@@ -1369,8 +1369,7 @@ class ModuleDecoderImpl : public Decoder {
case WasmInitExpr::kRefNullConst:
return ValueType::Ref(expr.immediate().heap_type, kNullable);
case WasmInitExpr::kRttCanon: {
uint8_t depth = expr.immediate().heap_type == HeapType::kAny ? 0 : 1;
return ValueType::Rtt(expr.immediate().heap_type, depth);
return ValueType::Rtt(expr.immediate().heap_type, 0);
}
case WasmInitExpr::kRttSub: {
ValueType operand_type = TypeOf(*expr.operand());
......@@ -1768,18 +1767,21 @@ class ModuleDecoderImpl : public Decoder {
opcode = read_prefixed_opcode<validate>(pc(), &len);
switch (opcode) {
case kExprRttCanon: {
HeapTypeImmediate<validate> imm(enabled_features_, this, pc() + 2,
module_.get());
if (V8_UNLIKELY(failed())) return {};
TypeIndexImmediate<validate> imm(this, pc() + 2);
if (V8_UNLIKELY(imm.index >= module_->types.capacity())) {
errorf(pc() + 2, "type index %u is out of bounds", imm.index);
return {};
}
len += imm.length;
stack.push_back(
WasmInitExpr::RttCanon(imm.type.representation()));
stack.push_back(WasmInitExpr::RttCanon(imm.index));
break;
}
case kExprRttSub: {
HeapTypeImmediate<validate> imm(enabled_features_, this, pc() + 2,
module_.get());
if (V8_UNLIKELY(failed())) return {};
TypeIndexImmediate<validate> imm(this, pc() + 2);
if (V8_UNLIKELY(imm.index >= module_->types.capacity())) {
errorf(pc() + 2, "type index %u is out of bounds", imm.index);
return {};
}
len += imm.length;
if (stack.empty()) {
error(pc(), "calling rtt.sub without arguments");
......@@ -1788,17 +1790,15 @@ class ModuleDecoderImpl : public Decoder {
WasmInitExpr parent = std::move(stack.back());
stack.pop_back();
ValueType parent_type = TypeOf(parent);
if (V8_UNLIKELY(
parent_type.kind() != ValueType::kRtt ||
!IsSubtypeOf(
ValueType::Ref(imm.type, kNonNullable),
ValueType::Ref(parent_type.heap_type(), kNonNullable),
module_.get()))) {
if (V8_UNLIKELY(!parent_type.is_rtt() ||
!IsHeapSubtypeOf(imm.index,
parent_type.ref_index(),
module_.get()))) {
error(pc(), "rtt.sub requires a supertype rtt on stack");
return {};
}
stack.push_back(WasmInitExpr::RttSub(imm.type.representation(),
std::move(parent)));
stack.push_back(
WasmInitExpr::RttSub(imm.index, std::move(parent)));
break;
}
default: {
......
......@@ -119,7 +119,7 @@ class CompileImportWrapperJob final : public JobTask {
// TODO(jkummerow): Move these elsewhere.
Handle<Map> CreateStructMap(Isolate* isolate, const WasmModule* module,
int struct_index, Handle<Map> rtt_parent) {
int struct_index, Handle<Map> opt_rtt_parent) {
const wasm::StructType* type = module->struct_type(struct_index);
const int inobject_properties = 0;
DCHECK_LE(type->total_fields_size(), kMaxInt - WasmStruct::kHeaderSize);
......@@ -129,7 +129,7 @@ Handle<Map> CreateStructMap(Isolate* isolate, const WasmModule* module,
// TODO(jkummerow): If NO_ELEMENTS were supported, we could use that here.
const ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND;
Handle<WasmTypeInfo> type_info = isolate->factory()->NewWasmTypeInfo(
reinterpret_cast<Address>(type), rtt_parent);
reinterpret_cast<Address>(type), opt_rtt_parent);
Handle<Map> map = isolate->factory()->NewMap(
instance_type, instance_size, elements_kind, inobject_properties);
map->set_wasm_type_info(*type_info);
......@@ -137,28 +137,14 @@ Handle<Map> CreateStructMap(Isolate* isolate, const WasmModule* module,
}
Handle<Map> CreateArrayMap(Isolate* isolate, const WasmModule* module,
int array_index, Handle<Map> rtt_parent) {
int array_index, Handle<Map> opt_rtt_parent) {
const wasm::ArrayType* type = module->array_type(array_index);
const int inobject_properties = 0;
const int instance_size = kVariableSizeSentinel;
const InstanceType instance_type = WASM_ARRAY_TYPE;
const ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND;
Handle<WasmTypeInfo> type_info = isolate->factory()->NewWasmTypeInfo(
reinterpret_cast<Address>(type), rtt_parent);
Handle<Map> map = isolate->factory()->NewMap(
instance_type, instance_size, elements_kind, inobject_properties);
map->set_wasm_type_info(*type_info);
return map;
}
Handle<Map> CreateGenericRtt(Isolate* isolate, const WasmModule* module,
Handle<Map> rtt_parent) {
const int inobject_properties = 0;
const int instance_size = 0;
const InstanceType instance_type = WASM_STRUCT_TYPE; // Fake; good enough.
const ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND;
Handle<WasmTypeInfo> type_info =
isolate->factory()->NewWasmTypeInfo(0, rtt_parent);
reinterpret_cast<Address>(type), opt_rtt_parent);
Handle<Map> map = isolate->factory()->NewMap(
instance_type, instance_size, elements_kind, inobject_properties);
map->set_wasm_type_info(*type_info);
......@@ -207,9 +193,7 @@ Handle<Map> AllocateSubRtt(Isolate* isolate,
// Allocate a fresh RTT otherwise.
const wasm::WasmModule* module = instance->module();
Handle<Map> rtt;
if (wasm::HeapType(type).is_generic()) {
rtt = wasm::CreateGenericRtt(isolate, module, parent);
} else if (module->has_struct(type)) {
if (module->has_struct(type)) {
rtt = wasm::CreateStructMap(isolate, module, type, parent);
} else if (module->has_array(type)) {
rtt = wasm::CreateArrayMap(isolate, module, type, parent);
......@@ -628,18 +612,16 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
if (enabled_.has_gc()) {
Handle<FixedArray> maps = isolate_->factory()->NewUninitializedFixedArray(
static_cast<int>(module_->type_kinds.size()));
Handle<Map> anyref_map =
Handle<Map>::cast(isolate_->root_handle(RootIndex::kWasmRttAnyrefMap));
for (int map_index = 0;
map_index < static_cast<int>(module_->type_kinds.size());
map_index++) {
Handle<Map> map;
switch (module_->type_kinds[map_index]) {
case kWasmStructTypeCode:
map = CreateStructMap(isolate_, module_, map_index, anyref_map);
map = CreateStructMap(isolate_, module_, map_index, Handle<Map>());
break;
case kWasmArrayTypeCode:
map = CreateArrayMap(isolate_, module_, map_index, anyref_map);
map = CreateArrayMap(isolate_, module_, map_index, Handle<Map>());
break;
case kWasmFunctionTypeCode:
// TODO(7748): Think about canonicalizing rtts to make them work for
......@@ -1553,26 +1535,11 @@ Handle<Object> InstanceBuilder::RecursivelyEvaluateGlobalInitializer(
return handle(tagged_globals_->get(old_offset), isolate_);
}
case WasmInitExpr::kRttCanon: {
switch (init.immediate().heap_type) {
case wasm::HeapType::kEq:
return isolate_->root_handle(RootIndex::kWasmRttEqrefMap);
case wasm::HeapType::kExtern:
return isolate_->root_handle(RootIndex::kWasmRttExternrefMap);
case wasm::HeapType::kFunc:
return isolate_->root_handle(RootIndex::kWasmRttFuncrefMap);
case wasm::HeapType::kI31:
return isolate_->root_handle(RootIndex::kWasmRttI31refMap);
case wasm::HeapType::kAny:
return isolate_->root_handle(RootIndex::kWasmRttAnyrefMap);
case wasm::HeapType::kBottom:
UNREACHABLE();
}
// Non-generic types fall through.
int map_index = init.immediate().heap_type;
int map_index = init.immediate().index;
return handle(instance->managed_object_maps().get(map_index), isolate_);
}
case WasmInitExpr::kRttSub: {
uint32_t type = static_cast<uint32_t>(init.immediate().heap_type);
uint32_t type = init.immediate().index;
Handle<Object> parent =
RecursivelyEvaluateGlobalInitializer(*init.operand(), instance);
return AllocateSubRtt(isolate_, instance, type,
......
......@@ -194,17 +194,13 @@ class ValueType {
return Ref(heap_type.representation(), nullability);
}
static constexpr ValueType Rtt(uint32_t heap_type,
static constexpr ValueType Rtt(uint32_t type_index,
uint8_t inheritance_depth) {
CONSTEXPR_DCHECK(HeapType(heap_type).is_valid());
CONSTEXPR_DCHECK(HeapType(type_index).is_index());
return ValueType(KindField::encode(kRtt) |
HeapTypeField::encode(heap_type) |
HeapTypeField::encode(type_index) |
DepthField::encode(inheritance_depth));
}
static constexpr ValueType Rtt(HeapType heap_type,
uint8_t inheritance_depth) {
return Rtt(heap_type.representation(), inheritance_depth);
}
// Useful when deserializing a type stored in a runtime object.
static constexpr ValueType FromRawBitField(uint32_t bit_field) {
......@@ -231,7 +227,7 @@ class ValueType {
constexpr bool has_depth() const { return is_rtt(); }
constexpr bool has_index() const {
return is_reference_type() && heap_type().is_index();
return is_rtt() || (is_object_reference_type() && heap_type().is_index());
}
constexpr bool is_defaultable() const {
......@@ -250,11 +246,12 @@ class ValueType {
/***************************** Field Accessors ******************************/
constexpr Kind kind() const { return KindField::decode(bit_field_); }
constexpr HeapType::Representation heap_representation() const {
CONSTEXPR_DCHECK(is_reference_type());
CONSTEXPR_DCHECK(is_object_reference_type());
return static_cast<HeapType::Representation>(
HeapTypeField::decode(bit_field_));
}
constexpr HeapType heap_type() const {
CONSTEXPR_DCHECK(is_object_reference_type());
return HeapType(heap_representation());
}
constexpr uint8_t depth() const {
......@@ -263,7 +260,7 @@ class ValueType {
}
constexpr uint32_t ref_index() const {
CONSTEXPR_DCHECK(has_index());
return heap_type().ref_index();
return HeapTypeField::decode(bit_field_);
}
// Useful when serializing this type to store it into a runtime object.
......@@ -429,8 +426,8 @@ class ValueType {
}
break;
case kRtt:
buf << "(rtt " << static_cast<uint32_t>(depth()) << " "
<< heap_type().name() << ")";
buf << "(rtt " << static_cast<uint32_t>(depth()) << " " << ref_index()
<< ")";
break;
default:
buf << kind_name();
......
......@@ -414,12 +414,13 @@ void WasmModuleBuilder::SetHasSharedMemory() { has_shared_memory_ = true; }
namespace {
void WriteValueType(ZoneBuffer* buffer, const ValueType& type) {
buffer->write_u8(type.value_type_code());
if (type.has_depth()) {
buffer->write_u32v(type.depth());
}
if (type.encoding_needs_heap_type()) {
if (type.is_object_reference_type() && type.encoding_needs_heap_type()) {
buffer->write_i32v(type.heap_type().code());
}
if (type.is_rtt()) {
buffer->write_u32v(type.depth());
buffer->write_u32v(type.ref_index());
}
}
void WriteGlobalInitializer(ZoneBuffer* buffer, const WasmInitExpr& init,
......@@ -497,7 +498,7 @@ void WriteGlobalInitializer(ZoneBuffer* buffer, const WasmInitExpr& init,
STATIC_ASSERT((kExprRttCanon >> 8) == kGCPrefix);
buffer->write_u8(kGCPrefix);
buffer->write_u8(static_cast<uint8_t>(kExprRttCanon));
buffer->write_i32v(HeapType(init.immediate().heap_type).code());
buffer->write_i32v(static_cast<int32_t>(init.immediate().index));
break;
case WasmInitExpr::kRttSub:
// The operand to rtt.sub must be emitted first.
......@@ -507,7 +508,7 @@ void WriteGlobalInitializer(ZoneBuffer* buffer, const WasmInitExpr& init,
STATIC_ASSERT((kExprRttSub >> 8) == kGCPrefix);
buffer->write_u8(kGCPrefix);
buffer->write_u8(static_cast<uint8_t>(kExprRttSub));
buffer->write_i32v(HeapType(init.immediate().heap_type).code());
buffer->write_i32v(static_cast<int32_t>(init.immediate().index));
break;
}
}
......
......@@ -933,11 +933,9 @@ class WasmArray : public TorqueGeneratedWasmArray<WasmArray, HeapObject> {
namespace wasm {
Handle<Map> CreateStructMap(Isolate* isolate, const WasmModule* module,
int struct_index, Handle<Map> rtt_parent);
int struct_index, MaybeHandle<Map> rtt_parent);
Handle<Map> CreateArrayMap(Isolate* isolate, const WasmModule* module,
int array_index, Handle<Map> rtt_parent);
Handle<Map> CreateGenericRtt(Isolate* isolate, const WasmModule* module,
Handle<Map> rtt_parent);
int array_index, MaybeHandle<Map> rtt_parent);
Handle<Map> AllocateSubRtt(Isolate* isolate,
Handle<WasmInstanceObject> instance, uint32_t type,
Handle<Map> parent);
......
......@@ -106,7 +106,6 @@ extern class AsmWasmData extends Struct {
@generateCppClass
extern class WasmTypeInfo extends Foreign {
parent: Map;
supertypes: FixedArray;
subtypes: ArrayList;
}
......
......@@ -864,18 +864,17 @@ class WasmInitExpr {
return expr;
}
static WasmInitExpr RttCanon(HeapType::Representation heap_type) {
static WasmInitExpr RttCanon(uint32_t index) {
WasmInitExpr expr;
expr.kind_ = kRttCanon;
expr.immediate_.heap_type = heap_type;
expr.immediate_.index = index;
return expr;
}
static WasmInitExpr RttSub(HeapType::Representation heap_type,
WasmInitExpr supertype) {
static WasmInitExpr RttSub(uint32_t index, WasmInitExpr supertype) {
WasmInitExpr expr;
expr.kind_ = kRttSub;
expr.immediate_.heap_type = heap_type;
expr.immediate_.index = index;
expr.operand_ = std::make_unique<WasmInitExpr>(std::move(supertype));
return expr;
}
......@@ -891,6 +890,7 @@ class WasmInitExpr {
return true;
case kGlobalGet:
case kRefFuncConst:
case kRttCanon:
return immediate().index == other.immediate().index;
case kI32Const:
return immediate().i32_const == other.immediate().i32_const;
......@@ -903,10 +903,9 @@ class WasmInitExpr {
case kS128Const:
return immediate().s128_const == other.immediate().s128_const;
case kRefNullConst:
case kRttCanon:
return immediate().heap_type == other.immediate().heap_type;
case kRttSub:
return immediate().heap_type == other.immediate().heap_type &&
return immediate().index == other.immediate().index &&
*operand() == *other.operand();
}
}
......
......@@ -277,12 +277,9 @@ V8_NOINLINE V8_EXPORT_PRIVATE bool IsSubtypeOfImpl(
if (!subtype.is_reference_type()) return subtype == supertype;
if (subtype.is_rtt()) {
return subtype.heap_type().is_generic()
? subtype == supertype
: (supertype.is_rtt() && subtype.depth() == supertype.depth() &&
supertype.has_index() &&
EquivalentIndices(subtype.ref_index(), supertype.ref_index(),
sub_module, super_module));
return supertype.is_rtt() && subtype.depth() == supertype.depth() &&
EquivalentIndices(subtype.ref_index(), supertype.ref_index(),
sub_module, super_module);
}
DCHECK(subtype.is_object_reference_type());
......
......@@ -70,12 +70,18 @@ V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype,
}
// We have this function call IsSubtypeOf instead of the opposite because type
// checks are much more common than heap type checks.
V8_INLINE bool IsHeapSubtypeOf(HeapType subtype, HeapType supertype,
// checks are much more common than heap type checks.}
V8_INLINE bool IsHeapSubtypeOf(uint32_t subtype_index,
HeapType::Representation supertype,
const WasmModule* module) {
return IsSubtypeOf(ValueType::Ref(subtype, kNonNullable),
return IsSubtypeOf(ValueType::Ref(subtype_index, kNonNullable),
ValueType::Ref(supertype, kNonNullable), module);
}
V8_INLINE bool IsHeapSubtypeOf(uint32_t subtype_index, uint32_t supertype_index,
const WasmModule* module) {
return IsSubtypeOf(ValueType::Ref(subtype_index, kNonNullable),
ValueType::Ref(supertype_index, kNonNullable), module);
}
// Returns the weakest type that is a subtype of both a and b
// (which is currently always one of a, b, or kWasmBottom).
......
This diff is collapsed.
......@@ -383,11 +383,6 @@ KNOWN_MAPS = {
("read_only_space", 0x05d4d): (78, "StoreHandler3Map"),
("map_space", 0x02119): (1057, "ExternalMap"),
("map_space", 0x02141): (1098, "JSMessageObjectMap"),
("map_space", 0x02169): (181, "WasmRttEqrefMap"),
("map_space", 0x02191): (181, "WasmRttAnyrefMap"),
("map_space", 0x021b9): (181, "WasmRttExternrefMap"),
("map_space", 0x021e1): (181, "WasmRttFuncrefMap"),
("map_space", 0x02209): (181, "WasmRttI31refMap"),
}
# List of known V8 objects.
......@@ -448,49 +443,49 @@ KNOWN_OBJECTS = {
("old_space", 0x0237d): "FunctionPrototypeAccessor",
("old_space", 0x023c1): "StringLengthAccessor",
("old_space", 0x02405): "InvalidPrototypeValidityCell",
("old_space", 0x024f1): "EmptyScript",
("old_space", 0x02531): "ManyClosuresCell",
("old_space", 0x0253d): "ArrayConstructorProtector",
("old_space", 0x02551): "NoElementsProtector",
("old_space", 0x02565): "IsConcatSpreadableProtector",
("old_space", 0x02579): "ArraySpeciesProtector",
("old_space", 0x0258d): "TypedArraySpeciesProtector",
("old_space", 0x025a1): "PromiseSpeciesProtector",
("old_space", 0x025b5): "RegExpSpeciesProtector",
("old_space", 0x025c9): "StringLengthProtector",
("old_space", 0x025dd): "ArrayIteratorProtector",
("old_space", 0x025f1): "ArrayBufferDetachingProtector",
("old_space", 0x02605): "PromiseHookProtector",
("old_space", 0x02619): "PromiseResolveProtector",
("old_space", 0x0262d): "MapIteratorProtector",
("old_space", 0x02641): "PromiseThenProtector",
("old_space", 0x02655): "SetIteratorProtector",
("old_space", 0x02669): "StringIteratorProtector",
("old_space", 0x0267d): "SingleCharacterStringCache",
("old_space", 0x02a85): "StringSplitCache",
("old_space", 0x02e8d): "RegExpMultipleCache",
("old_space", 0x03295): "BuiltinsConstantsTable",
("old_space", 0x0368d): "AsyncFunctionAwaitRejectSharedFun",
("old_space", 0x036b1): "AsyncFunctionAwaitResolveSharedFun",
("old_space", 0x036d5): "AsyncGeneratorAwaitRejectSharedFun",
("old_space", 0x036f9): "AsyncGeneratorAwaitResolveSharedFun",
("old_space", 0x0371d): "AsyncGeneratorYieldResolveSharedFun",
("old_space", 0x03741): "AsyncGeneratorReturnResolveSharedFun",
("old_space", 0x03765): "AsyncGeneratorReturnClosedRejectSharedFun",
("old_space", 0x03789): "AsyncGeneratorReturnClosedResolveSharedFun",
("old_space", 0x037ad): "AsyncIteratorValueUnwrapSharedFun",
("old_space", 0x037d1): "PromiseAllResolveElementSharedFun",
("old_space", 0x037f5): "PromiseAllSettledResolveElementSharedFun",
("old_space", 0x03819): "PromiseAllSettledRejectElementSharedFun",
("old_space", 0x0383d): "PromiseAnyRejectElementSharedFun",
("old_space", 0x03861): "PromiseCapabilityDefaultRejectSharedFun",
("old_space", 0x03885): "PromiseCapabilityDefaultResolveSharedFun",
("old_space", 0x038a9): "PromiseCatchFinallySharedFun",
("old_space", 0x038cd): "PromiseGetCapabilitiesExecutorSharedFun",
("old_space", 0x038f1): "PromiseThenFinallySharedFun",
("old_space", 0x03915): "PromiseThrowerFinallySharedFun",
("old_space", 0x03939): "PromiseValueThunkFinallySharedFun",
("old_space", 0x0395d): "ProxyRevokeSharedFun",
("old_space", 0x0240d): "EmptyScript",
("old_space", 0x0244d): "ManyClosuresCell",
("old_space", 0x02459): "ArrayConstructorProtector",
("old_space", 0x0246d): "NoElementsProtector",
("old_space", 0x02481): "IsConcatSpreadableProtector",
("old_space", 0x02495): "ArraySpeciesProtector",
("old_space", 0x024a9): "TypedArraySpeciesProtector",
("old_space", 0x024bd): "PromiseSpeciesProtector",
("old_space", 0x024d1): "RegExpSpeciesProtector",
("old_space", 0x024e5): "StringLengthProtector",
("old_space", 0x024f9): "ArrayIteratorProtector",
("old_space", 0x0250d): "ArrayBufferDetachingProtector",
("old_space", 0x02521): "PromiseHookProtector",
("old_space", 0x02535): "PromiseResolveProtector",
("old_space", 0x02549): "MapIteratorProtector",
("old_space", 0x0255d): "PromiseThenProtector",
("old_space", 0x02571): "SetIteratorProtector",
("old_space", 0x02585): "StringIteratorProtector",
("old_space", 0x02599): "SingleCharacterStringCache",
("old_space", 0x029a1): "StringSplitCache",
("old_space", 0x02da9): "RegExpMultipleCache",
("old_space", 0x031b1): "BuiltinsConstantsTable",
("old_space", 0x035a9): "AsyncFunctionAwaitRejectSharedFun",
("old_space", 0x035cd): "AsyncFunctionAwaitResolveSharedFun",
("old_space", 0x035f1): "AsyncGeneratorAwaitRejectSharedFun",
("old_space", 0x03615): "AsyncGeneratorAwaitResolveSharedFun",
("old_space", 0x03639): "AsyncGeneratorYieldResolveSharedFun",
("old_space", 0x0365d): "AsyncGeneratorReturnResolveSharedFun",
("old_space", 0x03681): "AsyncGeneratorReturnClosedRejectSharedFun",
("old_space", 0x036a5): "AsyncGeneratorReturnClosedResolveSharedFun",
("old_space", 0x036c9): "AsyncIteratorValueUnwrapSharedFun",
("old_space", 0x036ed): "PromiseAllResolveElementSharedFun",
("old_space", 0x03711): "PromiseAllSettledResolveElementSharedFun",
("old_space", 0x03735): "PromiseAllSettledRejectElementSharedFun",
("old_space", 0x03759): "PromiseAnyRejectElementSharedFun",
("old_space", 0x0377d): "PromiseCapabilityDefaultRejectSharedFun",
("old_space", 0x037a1): "PromiseCapabilityDefaultResolveSharedFun",
("old_space", 0x037c5): "PromiseCatchFinallySharedFun",
("old_space", 0x037e9): "PromiseGetCapabilitiesExecutorSharedFun",
("old_space", 0x0380d): "PromiseThenFinallySharedFun",
("old_space", 0x03831): "PromiseThrowerFinallySharedFun",
("old_space", 0x03855): "PromiseValueThunkFinallySharedFun",
("old_space", 0x03879): "ProxyRevokeSharedFun",
}
# Lower 32 bits of first page addresses for various heap spaces.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment