Commit e335c01d authored by Jakob Kummerow's avatar Jakob Kummerow Committed by V8 LUCI CQ

[wasm-gc] Inline allocations for structs

Avoiding the builtin call, and potentially enabling other Turbofan
optimizations to apply.
This required teaching the MemoryLowering pass how to produce
Wasm-style isolate-independent code, where external references won't
work, and builtin calls have to be encoded differently.

Bug: v8:7748
Change-Id: Ib6d87da644330e7c3623c11aa333b23074aeae19
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3038524
Commit-Queue: Jakob Kummerow <jkummerow@chromium.org>
Reviewed-by: 's avatarManos Koukoutos <manoskouk@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#75824}
parent 070659f6
......@@ -574,6 +574,15 @@ TNode<Map> GraphAssembler::LoadMap(Node* object) {
#endif
}
void GraphAssembler::StoreMap(Node* object, TNode<Map> map) {
#ifdef V8_MAP_PACKING
map = PackMapWord(map);
#endif
StoreRepresentation rep(MachineType::TaggedRepresentation(),
kMapWriteBarrier);
Store(rep, object, HeapObject::kMapOffset - kHeapObjectTag, map);
}
Node* JSGraphAssembler::StoreElement(ElementAccess const& access, Node* object,
Node* index, Node* value) {
return AddNode(graph()->NewNode(simplified()->StoreElement(access), object,
......
......@@ -276,6 +276,7 @@ class V8_EXPORT_PRIVATE GraphAssembler {
TNode<Map> UnpackMapWord(Node* map_word);
#endif
TNode<Map> LoadMap(Node* object);
void StoreMap(Node* object, TNode<Map> map);
Node* DebugBreak();
......
......@@ -15,6 +15,10 @@
#include "src/compiler/simplified-operator.h"
#include "src/roots/roots-inl.h"
#if V8_ENABLE_WEBASSEMBLY
#include "src/wasm/wasm-linkage.h"
#include "src/wasm/wasm-objects.h"
#endif
namespace v8 {
namespace internal {
namespace compiler {
......@@ -100,6 +104,32 @@ Reduction MemoryLowering::Reduce(Node* node) {
}
}
void MemoryLowering::EnsureAllocateOperator() {
if (allocate_operator_.is_set()) return;
auto descriptor = AllocateDescriptor{};
StubCallMode mode = isolate_ != nullptr ? StubCallMode::kCallCodeObject
: StubCallMode::kCallBuiltinPointer;
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph_zone(), descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kCanUseRoots, Operator::kNoThrow, mode);
allocate_operator_.set(common()->Call(call_descriptor));
}
#if V8_ENABLE_WEBASSEMBLY
Node* MemoryLowering::GetWasmInstanceNode() {
if (wasm_instance_node_.is_set()) return wasm_instance_node_.get();
for (Node* use : graph()->start()->uses()) {
if (use->opcode() == IrOpcode::kParameter &&
ParameterIndexOf(use->op()) == wasm::kWasmInstanceParameterIndex) {
wasm_instance_node_.set(use);
return use;
}
}
UNREACHABLE(); // The instance node must have been created before.
}
#endif // V8_ENABLE_WEBASSEMBLY
#define __ gasm()->
Reduction MemoryLowering::ReduceAllocateRaw(
......@@ -123,29 +153,82 @@ Reduction MemoryLowering::ReduceAllocateRaw(
gasm()->InitializeEffectControl(effect, control);
Node* allocate_builtin;
if (allocation_type == AllocationType::kYoung) {
if (allow_large_objects == AllowLargeObjects::kTrue) {
allocate_builtin = __ AllocateInYoungGenerationStubConstant();
if (isolate_ != nullptr) {
if (allocation_type == AllocationType::kYoung) {
if (allow_large_objects == AllowLargeObjects::kTrue) {
allocate_builtin = __ AllocateInYoungGenerationStubConstant();
} else {
allocate_builtin = __ AllocateRegularInYoungGenerationStubConstant();
}
} else {
allocate_builtin = __ AllocateRegularInYoungGenerationStubConstant();
if (allow_large_objects == AllowLargeObjects::kTrue) {
allocate_builtin = __ AllocateInOldGenerationStubConstant();
} else {
allocate_builtin = __ AllocateRegularInOldGenerationStubConstant();
}
}
} else {
if (allow_large_objects == AllowLargeObjects::kTrue) {
allocate_builtin = __ AllocateInOldGenerationStubConstant();
// This lowering is used by Wasm, where we compile isolate-independent
// code. Builtin calls simply encode the target builtin ID, which will
// be patched to the builtin's address later.
#if V8_ENABLE_WEBASSEMBLY
Builtin builtin;
if (allocation_type == AllocationType::kYoung) {
if (allow_large_objects == AllowLargeObjects::kTrue) {
builtin = Builtin::kAllocateInYoungGeneration;
} else {
builtin = Builtin::kAllocateRegularInYoungGeneration;
}
} else {
allocate_builtin = __ AllocateRegularInOldGenerationStubConstant();
if (allow_large_objects == AllowLargeObjects::kTrue) {
builtin = Builtin::kAllocateInOldGeneration;
} else {
builtin = Builtin::kAllocateRegularInOldGeneration;
}
}
static_assert(std::is_same<Smi, BuiltinPtr>(), "BuiltinPtr must be Smi");
allocate_builtin =
graph()->NewNode(common()->NumberConstant(static_cast<int>(builtin)));
#else
UNREACHABLE();
#endif
}
// Determine the top/limit addresses.
Node* top_address = __ ExternalConstant(
allocation_type == AllocationType::kYoung
? ExternalReference::new_space_allocation_top_address(isolate())
: ExternalReference::old_space_allocation_top_address(isolate()));
Node* limit_address = __ ExternalConstant(
allocation_type == AllocationType::kYoung
? ExternalReference::new_space_allocation_limit_address(isolate())
: ExternalReference::old_space_allocation_limit_address(isolate()));
Node* top_address;
Node* limit_address;
if (isolate_ != nullptr) {
top_address = __ ExternalConstant(
allocation_type == AllocationType::kYoung
? ExternalReference::new_space_allocation_top_address(isolate())
: ExternalReference::old_space_allocation_top_address(isolate()));
limit_address = __ ExternalConstant(
allocation_type == AllocationType::kYoung
? ExternalReference::new_space_allocation_limit_address(isolate())
: ExternalReference::old_space_allocation_limit_address(isolate()));
} else {
// Wasm mode: producing isolate-independent code, loading the isolate
// address at runtime.
#if V8_ENABLE_WEBASSEMBLY
Node* instance_node = GetWasmInstanceNode();
int top_address_offset =
allocation_type == AllocationType::kYoung
? WasmInstanceObject::kNewAllocationTopAddressOffset
: WasmInstanceObject::kOldAllocationTopAddressOffset;
int limit_address_offset =
allocation_type == AllocationType::kYoung
? WasmInstanceObject::kNewAllocationLimitAddressOffset
: WasmInstanceObject::kOldAllocationLimitAddressOffset;
top_address =
__ Load(MachineType::Pointer(), instance_node,
__ IntPtrConstant(top_address_offset - kHeapObjectTag));
limit_address =
__ Load(MachineType::Pointer(), instance_node,
__ IntPtrConstant(limit_address_offset - kHeapObjectTag));
#else
UNREACHABLE();
#endif // V8_ENABLE_WEBASSEMBLY
}
// Check if we can fold this allocation into a previous allocation represented
// by the incoming {state}.
......@@ -216,13 +299,7 @@ Reduction MemoryLowering::ReduceAllocateRaw(
__ Bind(&call_runtime);
{
if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph_zone(), descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kCanUseRoots, Operator::kNoThrow);
allocate_operator_.set(common()->Call(call_descriptor));
}
EnsureAllocateOperator();
Node* vfalse = __ BitcastTaggedToWord(
__ Call(allocate_operator_.get(), allocate_builtin, size));
vfalse = __ IntSub(vfalse, __ IntPtrConstant(kHeapObjectTag));
......@@ -277,13 +354,7 @@ Reduction MemoryLowering::ReduceAllocateRaw(
__ IntAdd(top, __ IntPtrConstant(kHeapObjectTag))));
__ Bind(&call_runtime);
if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph_zone(), descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kCanUseRoots, Operator::kNoThrow);
allocate_operator_.set(common()->Call(call_descriptor));
}
EnsureAllocateOperator();
__ Goto(&done, __ Call(allocate_operator_.get(), allocate_builtin, size));
__ Bind(&done);
......
......@@ -113,6 +113,8 @@ class MemoryLowering final : public Reducer {
Reduction ReduceLoadMap(Node* encoded_pointer);
Node* ComputeIndex(ElementAccess const& access, Node* node);
bool NeedsPoisoning(LoadSensitivity load_sensitivity) const;
void EnsureAllocateOperator();
Node* GetWasmInstanceNode();
Graph* graph() const { return graph_; }
Isolate* isolate() const { return isolate_; }
......@@ -123,6 +125,7 @@ class MemoryLowering final : public Reducer {
JSGraphAssembler* gasm() const { return graph_assembler_; }
SetOncePointer<const Operator> allocate_operator_;
SetOncePointer<Node> wasm_instance_node_;
Isolate* isolate_;
Zone* zone_;
Graph* graph_;
......
......@@ -244,6 +244,21 @@ class WasmGraphAssembler : public GraphAssembler {
// Rule of thumb: if access to a given field in an object is required in
// at least two places, put a helper function here.
Node* Allocate(int size) {
AllowLargeObjects allow_large = size < kMaxRegularHeapObjectSize
? AllowLargeObjects::kFalse
: AllowLargeObjects::kTrue;
return Allocate(Int32Constant(size), allow_large);
}
Node* Allocate(Node* size,
AllowLargeObjects allow_large = AllowLargeObjects::kTrue) {
return AddNode(
graph()->NewNode(simplified_.AllocateRaw(
Type::Any(), AllocationType::kYoung, allow_large),
size, effect(), control()));
}
Node* LoadFromObject(MachineType type, Node* base, Node* offset) {
return AddNode(graph()->NewNode(
simplified_.LoadFromObject(ObjectAccess(type, kNoWriteBarrier)), base,
......@@ -5560,8 +5575,13 @@ Node* WasmGraphBuilder::StructNewWithRtt(uint32_t struct_index,
const wasm::StructType* type,
Node* rtt,
base::Vector<Node*> fields) {
Node* s = gasm_->CallBuiltin(Builtin::kWasmAllocateStructWithRtt,
Operator::kEliminatable, rtt);
int size = WasmStruct::Size(type);
Node* s = gasm_->Allocate(size);
gasm_->StoreMap(s, TNode<Map>::UncheckedCast(rtt));
gasm_->StoreToObject(
ObjectAccess(MachineType::TaggedPointer(), kNoWriteBarrier), s,
wasm::ObjectAccess::ToTagged(JSReceiver::kPropertiesOrHashOffset),
LOAD_ROOT(EmptyFixedArray, empty_fixed_array));
for (uint32_t i = 0; i < type->field_count(); i++) {
gasm_->StoreStructField(s, type, i, fields[i]);
}
......
......@@ -193,6 +193,14 @@ PRIMITIVE_ACCESSORS(WasmInstanceObject, stack_limit_address, Address,
kStackLimitAddressOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, real_stack_limit_address, Address,
kRealStackLimitAddressOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, new_allocation_limit_address, Address*,
kNewAllocationLimitAddressOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, new_allocation_top_address, Address*,
kNewAllocationTopAddressOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, old_allocation_limit_address, Address*,
kOldAllocationLimitAddressOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, old_allocation_top_address, Address*,
kOldAllocationTopAddressOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, imported_function_targets, Address*,
kImportedFunctionTargetsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, globals_start, byte*,
......
......@@ -1295,6 +1295,14 @@ Handle<WasmInstanceObject> WasmInstanceObject::New(
isolate->stack_guard()->address_of_jslimit());
instance->set_real_stack_limit_address(
isolate->stack_guard()->address_of_real_jslimit());
instance->set_new_allocation_limit_address(
isolate->heap()->NewSpaceAllocationLimitAddress());
instance->set_new_allocation_top_address(
isolate->heap()->NewSpaceAllocationTopAddress());
instance->set_old_allocation_limit_address(
isolate->heap()->OldSpaceAllocationLimitAddress());
instance->set_old_allocation_top_address(
isolate->heap()->OldSpaceAllocationTopAddress());
instance->set_globals_start(nullptr);
instance->set_indirect_function_table_size(0);
instance->set_indirect_function_table_refs(
......
......@@ -360,6 +360,10 @@ class V8_EXPORT_PRIVATE WasmInstanceObject : public JSObject {
DECL_PRIMITIVE_ACCESSORS(isolate_root, Address)
DECL_PRIMITIVE_ACCESSORS(stack_limit_address, Address)
DECL_PRIMITIVE_ACCESSORS(real_stack_limit_address, Address)
DECL_PRIMITIVE_ACCESSORS(new_allocation_limit_address, Address*)
DECL_PRIMITIVE_ACCESSORS(new_allocation_top_address, Address*)
DECL_PRIMITIVE_ACCESSORS(old_allocation_limit_address, Address*)
DECL_PRIMITIVE_ACCESSORS(old_allocation_top_address, Address*)
DECL_PRIMITIVE_ACCESSORS(imported_function_targets, Address*)
DECL_PRIMITIVE_ACCESSORS(globals_start, byte*)
DECL_PRIMITIVE_ACCESSORS(imported_mutable_globals, Address*)
......@@ -402,6 +406,10 @@ class V8_EXPORT_PRIVATE WasmInstanceObject : public JSObject {
V(kIsolateRootOffset, kSystemPointerSize) \
V(kJumpTableStartOffset, kSystemPointerSize) \
/* End of often-accessed fields. */ \
V(kNewAllocationLimitAddressOffset, kSystemPointerSize) \
V(kNewAllocationTopAddressOffset, kSystemPointerSize) \
V(kOldAllocationLimitAddressOffset, kSystemPointerSize) \
V(kOldAllocationTopAddressOffset, kSystemPointerSize) \
V(kModuleObjectOffset, kTaggedSize) \
V(kExportsObjectOffset, kTaggedSize) \
V(kNativeContextOffset, kTaggedSize) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment