Commit 6151ae01 authored by Luis Fernando Pardo Sixtos's avatar Luis Fernando Pardo Sixtos Committed by V8 LUCI CQ

[shared-struct] Support shared arrays in Atomics.{load,store,exchange}

Atomics.load, Atomics.store, and Atomics.exchange now accept shared
array objects as their 1st argument.

Currently these are implemented in C++ and not yet in CSA.

Bug: v8:12547
Change-Id: I54ed8816a696a4f45dda964739b1cfd917d39dc0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3723974Reviewed-by: 's avatarShu-yu Guo <syg@chromium.org>
Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Commit-Queue: Luis Fernando Pardo Sixtos <lpardosixtos@microsoft.com>
Cr-Commit-Position: refs/heads/main@{#81537}
parent 995063d1
......@@ -889,12 +889,12 @@ namespace internal {
/* https://tc39.es/proposal-resizablearraybuffer/ */ \
CPP(SharedArrayBufferPrototypeGrow) \
\
TFJ(AtomicsLoad, kJSArgcReceiverSlots + 2, kReceiver, kArrayOrSharedStruct, \
TFJ(AtomicsLoad, kJSArgcReceiverSlots + 2, kReceiver, kArrayOrSharedObject, \
kIndexOrFieldName) \
TFJ(AtomicsStore, kJSArgcReceiverSlots + 3, kReceiver, kArrayOrSharedStruct, \
TFJ(AtomicsStore, kJSArgcReceiverSlots + 3, kReceiver, kArrayOrSharedObject, \
kIndexOrFieldName, kValue) \
TFJ(AtomicsExchange, kJSArgcReceiverSlots + 3, kReceiver, \
kArrayOrSharedStruct, kIndexOrFieldName, kValue) \
kArrayOrSharedObject, kIndexOrFieldName, kValue) \
TFJ(AtomicsCompareExchange, kJSArgcReceiverSlots + 4, kReceiver, kArray, \
kIndex, kOldValue, kNewValue) \
TFJ(AtomicsAdd, kJSArgcReceiverSlots + 3, kReceiver, kArray, kIndex, kValue) \
......
......@@ -28,7 +28,8 @@ class SharedArrayBufferBuiltinsAssembler : public CodeStubAssembler {
TNode<Context> context,
TNode<Int32T>* out_elements_kind,
TNode<RawPtrT>* out_backing_store,
Label* detached);
Label* detached,
Label* shared_struct_or_shared_array);
TNode<UintPtrT> ValidateAtomicAccess(TNode<JSTypedArray> array,
TNode<Object> index,
......@@ -52,21 +53,21 @@ class SharedArrayBufferBuiltinsAssembler : public CodeStubAssembler {
// https://tc39.es/ecma262/#sec-validateintegertypedarray
void SharedArrayBufferBuiltinsAssembler::ValidateIntegerTypedArray(
TNode<Object> maybe_array, TNode<Context> context,
TNode<Object> maybe_array_or_shared_object, TNode<Context> context,
TNode<Int32T>* out_elements_kind, TNode<RawPtrT>* out_backing_store,
Label* detached) {
Label* detached, Label* is_shared_struct_or_shared_array = nullptr) {
Label not_float_or_clamped(this), invalid(this);
// The logic of TypedArrayBuiltinsAssembler::ValidateTypedArrayBuffer is
// inlined to avoid duplicate error branches.
// Fail if it is not a heap object.
GotoIf(TaggedIsSmi(maybe_array), &invalid);
GotoIf(TaggedIsSmi(maybe_array_or_shared_object), &invalid);
// Fail if the array's instance type is not JSTypedArray.
TNode<Map> map = LoadMap(CAST(maybe_array));
TNode<Map> map = LoadMap(CAST(maybe_array_or_shared_object));
GotoIfNot(IsJSTypedArrayMap(map), &invalid);
TNode<JSTypedArray> array = CAST(maybe_array);
TNode<JSTypedArray> array = CAST(maybe_array_or_shared_object);
// Fail if the array's JSArrayBuffer is detached / out of bounds.
GotoIf(IsJSArrayBufferViewDetachedOrOutOfBoundsBoolean(array), detached);
......@@ -89,8 +90,14 @@ void SharedArrayBufferBuiltinsAssembler::ValidateIntegerTypedArray(
BIND(&invalid);
{
if (is_shared_struct_or_shared_array) {
GotoIf(IsJSSharedStruct(maybe_array_or_shared_object),
is_shared_struct_or_shared_array);
GotoIf(IsJSSharedArray(maybe_array_or_shared_object),
is_shared_struct_or_shared_array);
}
ThrowTypeError(context, MessageTemplate::kNotIntegerTypedArray,
maybe_array);
maybe_array_or_shared_object);
}
BIND(&not_float_or_clamped);
......@@ -183,22 +190,19 @@ TNode<BigInt> SharedArrayBufferBuiltinsAssembler::BigIntFromUnsigned64(
// https://tc39.es/ecma262/#sec-atomicload
TF_BUILTIN(AtomicsLoad, SharedArrayBufferBuiltinsAssembler) {
auto maybe_array_or_shared_struct =
Parameter<Object>(Descriptor::kArrayOrSharedStruct);
auto maybe_array_or_shared_object =
Parameter<Object>(Descriptor::kArrayOrSharedObject);
auto index_or_field_name = Parameter<Object>(Descriptor::kIndexOrFieldName);
auto context = Parameter<Context>(Descriptor::kContext);
Label shared_struct(this);
GotoIf(IsJSSharedStruct(maybe_array_or_shared_struct), &shared_struct);
// 1. Let buffer be ? ValidateIntegerTypedArray(typedArray).
Label detached_or_out_of_bounds(this);
Label detached_or_out_of_bounds(this), is_shared_struct_or_shared_array(this);
TNode<Int32T> elements_kind;
TNode<RawPtrT> backing_store;
ValidateIntegerTypedArray(maybe_array_or_shared_struct, context,
&elements_kind, &backing_store,
&detached_or_out_of_bounds);
TNode<JSTypedArray> array = CAST(maybe_array_or_shared_struct);
ValidateIntegerTypedArray(
maybe_array_or_shared_object, context, &elements_kind, &backing_store,
&detached_or_out_of_bounds, &is_shared_struct_or_shared_array);
TNode<JSTypedArray> array = CAST(maybe_array_or_shared_object);
// 2. Let i be ? ValidateAtomicAccess(typedArray, index).
TNode<UintPtrT> index_word =
......@@ -276,32 +280,29 @@ TF_BUILTIN(AtomicsLoad, SharedArrayBufferBuiltinsAssembler) {
"Atomics.load");
}
BIND(&shared_struct);
BIND(&is_shared_struct_or_shared_array);
{
Return(CallRuntime(Runtime::kAtomicsLoadSharedStructField, context,
maybe_array_or_shared_struct, index_or_field_name));
Return(CallRuntime(Runtime::kAtomicsLoadSharedStructOrArray, context,
maybe_array_or_shared_object, index_or_field_name));
}
}
// https://tc39.es/ecma262/#sec-atomics.store
TF_BUILTIN(AtomicsStore, SharedArrayBufferBuiltinsAssembler) {
auto maybe_array_or_shared_struct =
Parameter<Object>(Descriptor::kArrayOrSharedStruct);
auto maybe_array_or_shared_object =
Parameter<Object>(Descriptor::kArrayOrSharedObject);
auto index_or_field_name = Parameter<Object>(Descriptor::kIndexOrFieldName);
auto value = Parameter<Object>(Descriptor::kValue);
auto context = Parameter<Context>(Descriptor::kContext);
Label shared_struct(this);
GotoIf(IsJSSharedStruct(maybe_array_or_shared_struct), &shared_struct);
// 1. Let buffer be ? ValidateIntegerTypedArray(typedArray).
Label detached_or_out_of_bounds(this);
Label detached_or_out_of_bounds(this), is_shared_struct_or_shared_array(this);
TNode<Int32T> elements_kind;
TNode<RawPtrT> backing_store;
ValidateIntegerTypedArray(maybe_array_or_shared_struct, context,
&elements_kind, &backing_store,
&detached_or_out_of_bounds);
TNode<JSTypedArray> array = CAST(maybe_array_or_shared_struct);
ValidateIntegerTypedArray(
maybe_array_or_shared_object, context, &elements_kind, &backing_store,
&detached_or_out_of_bounds, &is_shared_struct_or_shared_array);
TNode<JSTypedArray> array = CAST(maybe_array_or_shared_object);
// 2. Let i be ? ValidateAtomicAccess(typedArray, index).
TNode<UintPtrT> index_word =
......@@ -390,36 +391,33 @@ TF_BUILTIN(AtomicsStore, SharedArrayBufferBuiltinsAssembler) {
"Atomics.store");
}
BIND(&shared_struct);
BIND(&is_shared_struct_or_shared_array);
{
Return(CallRuntime(Runtime::kAtomicsStoreSharedStructField, context,
maybe_array_or_shared_struct, index_or_field_name,
Return(CallRuntime(Runtime::kAtomicsStoreSharedStructOrArray, context,
maybe_array_or_shared_object, index_or_field_name,
value));
}
}
// https://tc39.es/ecma262/#sec-atomics.exchange
TF_BUILTIN(AtomicsExchange, SharedArrayBufferBuiltinsAssembler) {
auto maybe_array_or_shared_struct =
Parameter<Object>(Descriptor::kArrayOrSharedStruct);
auto maybe_array_or_shared_object =
Parameter<Object>(Descriptor::kArrayOrSharedObject);
auto index_or_field_name = Parameter<Object>(Descriptor::kIndexOrFieldName);
auto value = Parameter<Object>(Descriptor::kValue);
auto context = Parameter<Context>(Descriptor::kContext);
Label shared_struct(this);
GotoIf(IsJSSharedStruct(maybe_array_or_shared_struct), &shared_struct);
// Inlines AtomicReadModifyWrite
// https://tc39.es/ecma262/#sec-atomicreadmodifywrite
// 1. Let buffer be ? ValidateIntegerTypedArray(typedArray).
Label detached_or_out_of_bounds(this);
Label detached_or_out_of_bounds(this), is_shared_struct_or_shared_array(this);
TNode<Int32T> elements_kind;
TNode<RawPtrT> backing_store;
ValidateIntegerTypedArray(maybe_array_or_shared_struct, context,
&elements_kind, &backing_store,
&detached_or_out_of_bounds);
TNode<JSTypedArray> array = CAST(maybe_array_or_shared_struct);
ValidateIntegerTypedArray(
maybe_array_or_shared_object, context, &elements_kind, &backing_store,
&detached_or_out_of_bounds, &is_shared_struct_or_shared_array);
TNode<JSTypedArray> array = CAST(maybe_array_or_shared_object);
// 2. Let i be ? ValidateAtomicAccess(typedArray, index).
TNode<UintPtrT> index_word =
......@@ -534,10 +532,10 @@ TF_BUILTIN(AtomicsExchange, SharedArrayBufferBuiltinsAssembler) {
"Atomics.exchange");
}
BIND(&shared_struct);
BIND(&is_shared_struct_or_shared_array);
{
Return(CallRuntime(Runtime::kAtomicsExchangeSharedStructField, context,
maybe_array_or_shared_struct, index_or_field_name,
Return(CallRuntime(Runtime::kAtomicsExchangeSharedStructOrArray, context,
maybe_array_or_shared_object, index_or_field_name,
value));
}
}
......
......@@ -6569,6 +6569,23 @@ TNode<BoolT> CodeStubAssembler::IsJSSharedArrayInstanceType(
return InstanceTypeEqual(instance_type, JS_SHARED_ARRAY_TYPE);
}
TNode<BoolT> CodeStubAssembler::IsJSSharedArrayMap(TNode<Map> map) {
return IsJSSharedArrayInstanceType(LoadMapInstanceType(map));
}
TNode<BoolT> CodeStubAssembler::IsJSSharedArray(TNode<HeapObject> object) {
return IsJSSharedArrayMap(LoadMap(object));
}
TNode<BoolT> CodeStubAssembler::IsJSSharedArray(TNode<Object> object) {
return Select<BoolT>(
TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
[=] {
TNode<HeapObject> heap_object = CAST(object);
return IsJSSharedArray(heap_object);
});
}
TNode<BoolT> CodeStubAssembler::IsJSSharedStructInstanceType(
TNode<Int32T> instance_type) {
return InstanceTypeEqual(instance_type, JS_SHARED_STRUCT_TYPE);
......
......@@ -2611,6 +2611,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<BoolT> IsJSPrimitiveWrapperMap(TNode<Map> map);
TNode<BoolT> IsJSPrimitiveWrapper(TNode<HeapObject> object);
TNode<BoolT> IsJSSharedArrayInstanceType(TNode<Int32T> instance_type);
TNode<BoolT> IsJSSharedArrayMap(TNode<Map> map);
TNode<BoolT> IsJSSharedArray(TNode<HeapObject> object);
TNode<BoolT> IsJSSharedArray(TNode<Object> object);
TNode<BoolT> IsJSSharedStructInstanceType(TNode<Int32T> instance_type);
TNode<BoolT> IsJSSharedStructMap(TNode<Map> map);
TNode<BoolT> IsJSSharedStruct(TNode<HeapObject> object);
......
......@@ -19,6 +19,7 @@
#include "src/objects/hash-table-inl.h"
#include "src/objects/js-array-buffer-inl.h"
#include "src/objects/js-array-inl.h"
#include "src/objects/js-shared-array-inl.h"
#include "src/objects/keys.h"
#include "src/objects/objects-inl.h"
#include "src/objects/slots-atomic-inl.h"
......@@ -650,6 +651,43 @@ class ElementsAccessorBase : public InternalElementsAccessor {
isolate);
}
Handle<Object> GetAtomic(Isolate* isolate, Handle<JSObject> holder,
InternalIndex entry, SeqCstAccessTag tag) final {
return Subclass::GetAtomicInternalImpl(isolate, holder, entry, tag);
}
static Handle<Object> GetAtomicInternalImpl(Isolate* isolate,
Handle<JSObject> holder,
InternalIndex entry,
SeqCstAccessTag tag) {
UNREACHABLE();
}
void SetAtomic(Handle<JSObject> holder, InternalIndex entry, Object value,
SeqCstAccessTag tag) final {
Subclass::SetAtomicInternalImpl(holder, entry, value, tag);
}
static void SetAtomicInternalImpl(Handle<JSObject> holder,
InternalIndex entry, Object value,
SeqCstAccessTag tag) {
UNREACHABLE();
}
Handle<Object> SwapAtomic(Isolate* isolate, Handle<JSObject> holder,
InternalIndex entry, Object value,
SeqCstAccessTag tag) final {
return Subclass::SwapAtomicInternalImpl(isolate, holder, entry, value, tag);
}
static Handle<Object> SwapAtomicInternalImpl(Isolate* isolate,
Handle<JSObject> holder,
InternalIndex entry,
Object value,
SeqCstAccessTag tag) {
UNREACHABLE();
}
void Set(Handle<JSObject> holder, InternalIndex entry, Object value) final {
Subclass::SetImpl(holder, entry, value);
}
......@@ -2813,7 +2851,33 @@ class FastPackedSealedObjectElementsAccessor
class SharedArrayElementsAccessor
: public FastSealedObjectElementsAccessor<
SharedArrayElementsAccessor,
ElementsKindTraits<SHARED_ARRAY_ELEMENTS>> {};
ElementsKindTraits<SHARED_ARRAY_ELEMENTS>> {
public:
static Handle<Object> GetAtomicInternalImpl(Isolate* isolate,
Handle<JSObject> holder,
InternalIndex entry,
SeqCstAccessTag tag) {
return handle(
BackingStore::cast(holder->elements()).get(entry.as_int(), tag),
isolate);
}
static void SetAtomicInternalImpl(Handle<JSObject> holder,
InternalIndex entry, Object value,
SeqCstAccessTag tag) {
BackingStore::cast(holder->elements()).set(entry.as_int(), value, tag);
}
static Handle<Object> SwapAtomicInternalImpl(Isolate* isolate,
Handle<JSObject> holder,
InternalIndex entry,
Object value,
SeqCstAccessTag tag) {
return handle(
BackingStore::cast(holder->elements()).swap(entry.as_int(), value, tag),
isolate);
}
};
class FastHoleySealedObjectElementsAccessor
: public FastSealedObjectElementsAccessor<
......
......@@ -59,6 +59,12 @@ class ElementsAccessor {
virtual Handle<Object> Get(Isolate* isolate, Handle<JSObject> holder,
InternalIndex entry) = 0;
// Currently only shared array elements support sequentially consistent
// access.
virtual Handle<Object> GetAtomic(Isolate* isolate, Handle<JSObject> holder,
InternalIndex entry,
SeqCstAccessTag tag) = 0;
virtual bool HasAccessors(JSObject holder) = 0;
virtual size_t NumberOfElements(JSObject holder) = 0;
......@@ -113,6 +119,17 @@ class ElementsAccessor {
virtual void Set(Handle<JSObject> holder, InternalIndex entry,
Object value) = 0;
// Currently only shared array elements support sequentially consistent
// access.
virtual void SetAtomic(Handle<JSObject> holder, InternalIndex entry,
Object value, SeqCstAccessTag tag) = 0;
// Currently only shared array elements support sequentially consistent
// access.
virtual Handle<Object> SwapAtomic(Isolate* isolate, Handle<JSObject> holder,
InternalIndex entry, Object value,
SeqCstAccessTag tag) = 0;
V8_WARN_UNUSED_RESULT virtual Maybe<bool> Add(Handle<JSObject> object,
uint32_t index,
Handle<Object> value,
......
......@@ -142,6 +142,30 @@ void FixedArray::set(int index, Smi value, RelaxedStoreTag tag) {
set(index, value, tag, SKIP_WRITE_BARRIER);
}
Object FixedArray::get(int index, SeqCstAccessTag) const {
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return get(cage_base, index);
}
Object FixedArray::get(PtrComprCageBase cage_base, int index,
SeqCstAccessTag) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
return SEQ_CST_READ_FIELD(*this, OffsetOfElementAt(index));
}
void FixedArray::set(int index, Object value, SeqCstAccessTag,
WriteBarrierMode mode) {
DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
SEQ_CST_WRITE_FIELD(*this, OffsetOfElementAt(index), value);
CONDITIONAL_WRITE_BARRIER(*this, OffsetOfElementAt(index), value, mode);
}
void FixedArray::set(int index, Smi value, SeqCstAccessTag tag) {
DCHECK(Object(value).IsSmi());
set(index, value, tag, SKIP_WRITE_BARRIER);
}
Object FixedArray::get(int index, AcquireLoadTag) const {
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return get(cage_base, index);
......@@ -200,6 +224,21 @@ void FixedArray::set_the_hole(ReadOnlyRoots ro_roots, int index) {
FixedArray::NoWriteBarrierSet(*this, index, ro_roots.the_hole_value());
}
Object FixedArray::swap(int index, Object value, SeqCstAccessTag,
WriteBarrierMode mode) {
DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
Object previous_value =
SEQ_CST_SWAP_FIELD(*this, OffsetOfElementAt(index), value);
CONDITIONAL_WRITE_BARRIER(*this, OffsetOfElementAt(index), value, mode);
return previous_value;
}
Object FixedArray::swap(int index, Smi value, SeqCstAccessTag tag) {
DCHECK(Object(value).IsSmi());
return swap(index, value, tag, SKIP_WRITE_BARRIER);
}
void FixedArray::FillWithHoles(int from, int to) {
for (int i = from; i < to; i++) {
set_the_hole(i);
......
......@@ -121,6 +121,14 @@ class FixedArray
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline void set(int index, Smi value, RelaxedStoreTag);
// SeqCst accessors.
inline Object get(int index, SeqCstAccessTag) const;
inline Object get(PtrComprCageBase cage_base, int index,
SeqCstAccessTag) const;
inline void set(int index, Object value, SeqCstAccessTag,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline void set(int index, Smi value, SeqCstAccessTag);
// Acquire/release accessors.
inline Object get(int index, AcquireLoadTag) const;
inline Object get(PtrComprCageBase cage_base, int index,
......@@ -138,6 +146,12 @@ class FixedArray
// Setter with explicit barrier mode.
inline void set(int index, Object value, WriteBarrierMode mode);
// Atomic swap that doesn't need write barrier.
inline Object swap(int index, Smi value, SeqCstAccessTag);
// Atomic swap with explicit barrier mode.
inline Object swap(int index, Object value, SeqCstAccessTag,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// Setters for frequently used oddballs located in old space.
inline void set_undefined(int index);
inline void set_undefined(Isolate* isolate, int index);
......
......@@ -15,6 +15,7 @@
#include "src/objects/field-type.h"
#include "src/objects/hash-table-inl.h"
#include "src/objects/heap-number-inl.h"
#include "src/objects/js-shared-array-inl.h"
#include "src/objects/js-struct-inl.h"
#include "src/objects/map-updater.h"
#include "src/objects/ordered-hash-table.h"
......@@ -1060,14 +1061,21 @@ Handle<Object> LookupIterator::GetDataValue(
Handle<Object> LookupIterator::GetDataValue(SeqCstAccessTag tag) const {
DCHECK_EQ(DATA, state_);
DCHECK_EQ(PropertyLocation::kField, property_details_.location());
DCHECK_EQ(PropertyKind::kData, property_details_.kind());
// Currently only shared structs support sequentially consistent access.
Handle<JSSharedStruct> holder = GetHolder<JSSharedStruct>();
FieldIndex field_index =
FieldIndex::ForDescriptor(holder->map(isolate_), descriptor_number());
return JSObject::FastPropertyAt(
isolate_, holder, property_details_.representation(), field_index, tag);
// Currently only shared structs and arrays support sequentially consistent
// access.
if (holder_->IsJSSharedStruct(isolate_)) {
DCHECK_EQ(PropertyLocation::kField, property_details_.location());
DCHECK_EQ(PropertyKind::kData, property_details_.kind());
Handle<JSSharedStruct> holder = GetHolder<JSSharedStruct>();
FieldIndex field_index =
FieldIndex::ForDescriptor(holder->map(isolate_), descriptor_number());
return JSObject::FastPropertyAt(
isolate_, holder, property_details_.representation(), field_index, tag);
}
DCHECK(holder_->IsJSSharedArray(isolate_));
Handle<JSSharedArray> holder = GetHolder<JSSharedArray>();
ElementsAccessor* accessor = holder->GetElementsAccessor(isolate_);
return accessor->GetAtomic(isolate_, holder, number_, kSeqCstAccess);
}
void LookupIterator::WriteDataValue(Handle<Object> value,
......@@ -1131,28 +1139,44 @@ void LookupIterator::WriteDataValue(Handle<Object> value,
void LookupIterator::WriteDataValue(Handle<Object> value, SeqCstAccessTag tag) {
DCHECK_EQ(DATA, state_);
DCHECK_EQ(PropertyLocation::kField, property_details_.location());
DCHECK_EQ(PropertyKind::kData, property_details_.kind());
// Currently only shared structs support sequentially consistent access.
Handle<JSSharedStruct> holder = GetHolder<JSSharedStruct>();
DisallowGarbageCollection no_gc;
FieldIndex field_index =
FieldIndex::ForDescriptor(holder->map(isolate_), descriptor_number());
holder->FastPropertyAtPut(field_index, *value, tag);
// Currently only shared structs and arrays support sequentially consistent
// access.
if (holder_->IsJSSharedStruct(isolate_)) {
DCHECK_EQ(PropertyLocation::kField, property_details_.location());
DCHECK_EQ(PropertyKind::kData, property_details_.kind());
Handle<JSSharedStruct> holder = GetHolder<JSSharedStruct>();
DisallowGarbageCollection no_gc;
FieldIndex field_index =
FieldIndex::ForDescriptor(holder->map(isolate_), descriptor_number());
holder->FastPropertyAtPut(field_index, *value, tag);
return;
}
DCHECK(holder_->IsJSSharedArray(isolate_));
Handle<JSSharedArray> holder = GetHolder<JSSharedArray>();
ElementsAccessor* accessor = holder->GetElementsAccessor(isolate_);
accessor->SetAtomic(holder, number_, *value, kSeqCstAccess);
}
Handle<Object> LookupIterator::SwapDataValue(Handle<Object> value,
SeqCstAccessTag tag) {
DCHECK_EQ(DATA, state_);
DCHECK_EQ(PropertyLocation::kField, property_details_.location());
DCHECK_EQ(PropertyKind::kData, property_details_.kind());
// Currently only shared structs support sequentially consistent access.
Handle<JSSharedStruct> holder = GetHolder<JSSharedStruct>();
DisallowGarbageCollection no_gc;
FieldIndex field_index =
FieldIndex::ForDescriptor(holder->map(isolate_), descriptor_number());
return handle(holder->RawFastPropertyAtSwap(field_index, *value, tag),
isolate_);
// Currently only shared structs and arrays support sequentially consistent
// access.
if (holder_->IsJSSharedStruct(isolate_)) {
DCHECK_EQ(PropertyLocation::kField, property_details_.location());
DCHECK_EQ(PropertyKind::kData, property_details_.kind());
// Currently only shared structs support sequentially consistent access.
Handle<JSSharedStruct> holder = GetHolder<JSSharedStruct>();
DisallowGarbageCollection no_gc;
FieldIndex field_index =
FieldIndex::ForDescriptor(holder->map(isolate_), descriptor_number());
return handle(holder->RawFastPropertyAtSwap(field_index, *value, tag),
isolate_);
}
DCHECK(holder_->IsJSSharedArray(isolate_));
Handle<JSSharedArray> holder = GetHolder<JSSharedArray>();
ElementsAccessor* accessor = holder->GetElementsAccessor(isolate_);
return accessor->SwapAtomic(isolate_, holder, number_, *value, kSeqCstAccess);
}
#if V8_ENABLE_WEBASSEMBLY
......
......@@ -473,6 +473,9 @@
#define RELAXED_WRITE_WEAK_FIELD(p, offset, value) \
TaggedField<MaybeObject>::Relaxed_Store(p, offset, value)
#define SEQ_CST_SWAP_FIELD(p, offset, value) \
TaggedField<Object>::SeqCst_Swap(p, offset, value)
#ifdef V8_DISABLE_WRITE_BARRIERS
#define WRITE_BARRIER(object, offset, value)
#else
......
......@@ -9,6 +9,7 @@
#include "src/logging/counters.h"
#include "src/numbers/conversions-inl.h"
#include "src/objects/js-array-buffer-inl.h"
#include "src/objects/js-shared-array-inl.h"
#include "src/objects/js-struct-inl.h"
#include "src/runtime/runtime-utils.h"
......@@ -614,23 +615,24 @@ RUNTIME_FUNCTION(Runtime_AtomicsXor) { UNREACHABLE(); }
// || V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_S390X
// || V8_TARGET_ARCH_RISCV64 || V8_TARGET_ARCH_LOONG64
RUNTIME_FUNCTION(Runtime_AtomicsLoadSharedStructField) {
RUNTIME_FUNCTION(Runtime_AtomicsLoadSharedStructOrArray) {
HandleScope scope(isolate);
DCHECK_EQ(2, args.length());
Handle<JSSharedStruct> shared_struct = args.at<JSSharedStruct>(0);
Handle<JSObject> shared_struct_or_shared_array = args.at<JSObject>(0);
Handle<Name> field_name;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, field_name,
Object::ToName(isolate, args.at(1)));
// Shared structs are prototypeless.
LookupIterator it(isolate, shared_struct, field_name, LookupIterator::OWN);
LookupIterator it(isolate, shared_struct_or_shared_array,
PropertyKey(isolate, field_name), LookupIterator::OWN);
if (it.IsFound()) return *it.GetDataValue(kSeqCstAccess);
return ReadOnlyRoots(isolate).undefined_value();
}
RUNTIME_FUNCTION(Runtime_AtomicsStoreSharedStructField) {
RUNTIME_FUNCTION(Runtime_AtomicsStoreSharedStructOrArray) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
Handle<JSSharedStruct> shared_struct = args.at<JSSharedStruct>(0);
Handle<JSObject> shared_struct_or_shared_array = args.at<JSObject>(0);
Handle<Name> field_name;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, field_name,
Object::ToName(isolate, args.at(1)));
......@@ -638,7 +640,8 @@ RUNTIME_FUNCTION(Runtime_AtomicsStoreSharedStructField) {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, shared_value, Object::Share(isolate, args.at(2), kThrowOnError));
// Shared structs are prototypeless.
LookupIterator it(isolate, shared_struct, field_name, LookupIterator::OWN);
LookupIterator it(isolate, shared_struct_or_shared_array,
PropertyKey(isolate, field_name), LookupIterator::OWN);
if (it.IsFound()) {
it.WriteDataValue(shared_value, kSeqCstAccess);
return *shared_value;
......@@ -653,10 +656,10 @@ RUNTIME_FUNCTION(Runtime_AtomicsStoreSharedStructField) {
return ReadOnlyRoots(isolate).exception();
}
RUNTIME_FUNCTION(Runtime_AtomicsExchangeSharedStructField) {
RUNTIME_FUNCTION(Runtime_AtomicsExchangeSharedStructOrArray) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
Handle<JSSharedStruct> shared_struct = args.at<JSSharedStruct>(0);
Handle<JSObject> shared_struct_or_shared_array = args.at<JSObject>(0);
Handle<Name> field_name;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, field_name,
Object::ToName(isolate, args.at(1)));
......@@ -664,7 +667,8 @@ RUNTIME_FUNCTION(Runtime_AtomicsExchangeSharedStructField) {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, shared_value, Object::Share(isolate, args.at(2), kThrowOnError));
// Shared structs are prototypeless.
LookupIterator it(isolate, shared_struct, field_name, LookupIterator::OWN);
LookupIterator it(isolate, shared_struct_or_shared_array,
PropertyKey(isolate, field_name), LookupIterator::OWN);
if (it.IsFound()) return *it.SwapDataValue(shared_value, kSeqCstAccess);
// Shared structs are non-extensible. Instead of duplicating logic, call
// Object::AddDataProperty to handle the error case.
......
......@@ -66,9 +66,9 @@ namespace internal {
F(AtomicsSub, 3, 1) \
F(AtomicsXor, 3, 1) \
F(SetAllowAtomicsWait, 1, 1) \
F(AtomicsLoadSharedStructField, 2, 1) \
F(AtomicsStoreSharedStructField, 3, 1) \
F(AtomicsExchangeSharedStructField, 3, 1)
F(AtomicsLoadSharedStructOrArray, 2, 1) \
F(AtomicsStoreSharedStructOrArray, 3, 1) \
F(AtomicsExchangeSharedStructOrArray, 3, 1)
#define FOR_EACH_INTRINSIC_BIGINT(F, I) \
F(BigIntBinaryOp, 3, 1) \
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Flags: --shared-string-table --harmony-struct --allow-natives-syntax
'use strict';
if (this.Worker) {
(function TestSharedArrayPostMessage() {
let workerScript = `onmessage = function(arr) {
// Non-atomic write that will be made visible once main thread
// observes the atomic write below.
arr[0][0] = 42;
arr[1].payload = 84;
Atomics.store(arr, 2, "worker");
};
postMessage("started");`;
let worker = new Worker(workerScript, {type: 'string'});
let started = worker.getMessage();
assertEquals('started', started);
let OuterArray = new SharedArray(3);
let Struct = new SharedStructType(['payload']);
OuterArray[0] = new SharedArray(1);
OuterArray[1] = new Struct();
OuterArray[2] = 'main';
assertEquals(undefined, OuterArray[0][0]);
assertEquals(undefined, OuterArray[1].payload);
assertEquals('main', OuterArray[2]);
worker.postMessage(OuterArray);
// Spin until we observe the worker's write on index 2.
while (Atomics.load(OuterArray, 2) !== 'worker') {
}
// The non-atomic store write must also be visible.
assertEquals(42, OuterArray[0][0]);
assertEquals(84, OuterArray[1].payload);
worker.terminate();
})();
}
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Flags: --shared-string-table --harmony-struct
'use strict';
(function TestPrimitivesUsingAtomics() {
// All primitives can be stored in fields.
const prims = [42, -0, undefined, null, true, false, 'foo'];
let arr = new SharedArray(1);
for (let prim of prims) {
Atomics.store(arr, 0, prim);
assertEquals(Atomics.load(arr, 0), prim);
}
for (let prim1 of prims) {
for (let prim2 of prims) {
arr[0] = prim1;
assertEquals(Atomics.exchange(arr, 0, prim2), prim1);
assertEquals(arr[0], prim2);
}
}
})();
(function TestObjectsUsingAtomics() {
let arr = new SharedArray(1);
// Shared objects cannot point to non-shared objects.
assertThrows(() => {
Atomics.store(arr, 0, []);
});
assertThrows(() => {
Atomics.store(arr, 0, {});
});
// Shared objects can point to other shared objects.
let shared_rhs = new SharedArray(10);
Atomics.store(arr, 0, shared_rhs);
assertEquals(Atomics.load(arr, 0), shared_rhs);
let Struct = new SharedStructType(['field']);
shared_rhs = new Struct();
Atomics.store(arr, 0, shared_rhs);
assertEquals(Atomics.load(arr, 0), shared_rhs);
let shared_rhs2 = new SharedArray(10);
assertEquals(Atomics.exchange(arr, 0, shared_rhs2), shared_rhs);
assertEquals(arr[0], shared_rhs2);
})();
(function TestOutOfBounds() {
let arr = new SharedArray(1);
// Shared structs are non-extensible.
assertThrows(() => {
Atomics.store(arr, 2, 42);
});
assertThrows(() => {
Atomics.store(arr, -1, 42);
});
assertThrows(() => {
Atomics.store(arr, 'field', 42);
});
})();
......@@ -33,7 +33,11 @@ let S = new SharedStructType(['field']);
assertThrows(() => { Atomics.store(s, 'field', []); });
assertThrows(() => { Atomics.store(s, 'field', {}); });
// Shared objects can point to other shared objects.
let shared_rhs = new S();
let shared_rhs = new SharedArray(10);
Atomics.store(s, 'field', shared_rhs);
assertEquals(Atomics.load(s, 'field'), shared_rhs);
shared_rhs = new S();
Atomics.store(s, 'field', shared_rhs);
assertEquals(Atomics.load(s, 'field'), shared_rhs);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment