Commit b6d4d9be authored by Shu-yu Guo's avatar Shu-yu Guo Committed by V8 LUCI CQ

Reland^2 "[shared-struct] Add Atomics.Condition"

This is a reland of commit b1020a43

Changes since revert:
- Fixed global safepoint interrupts in
https://chromium-review.googlesource.com/c/v8/v8/+/3820913

Original change's description:
> Reland "[shared-struct] Add Atomics.Condition"
>
> This is a reland of commit e2066ff6
>
> Changes since revert:
> - Rebased against c9918524, which
>   uses the external pointer table for the WaiterQueueNode stored
>   in the state field when compressing pointers. This relaxes
>   the alignment requirement of the state field to be 4-bytes when
>   compressing pointers.
> - Moved the state field into the JSSynchronizationPrimitive base
>   class, since alignment and padding can now be made simpler.
>
> Original change's description:
> > [shared-struct] Add Atomics.Condition
> >
> > Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3630350
> > Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
> > Reviewed-by: Adam Klein <adamk@chromium.org>
>
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3763787
> Reviewed-by: Adam Klein <adamk@chromium.org>
> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>

Bug: v8:12547
Change-Id: Ibc6de74c7853e4ea766ff2c70f92339ba69f2675
Cq-Include-Trybots: luci.v8.try:v8_linux_arm64_rel_ng,v8_linux64_tsan_rel_ng
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3820901Reviewed-by: 's avatarAdam Klein <adamk@chromium.org>
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82368}
parent 267889f6
......@@ -29,8 +29,8 @@ BUILTIN(AtomicsMutexLock) {
Handle<JSAtomicsMutex> js_mutex = Handle<JSAtomicsMutex>::cast(js_mutex_obj);
Handle<Object> run_under_lock = args.atOrUndefined(isolate, 2);
if (!run_under_lock->IsCallable()) {
THROW_NEW_ERROR_RETURN_FAILURE(isolate,
NewTypeError(MessageTemplate::kNotCallable));
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kNotCallable, run_under_lock));
}
// Like Atomics.wait, synchronous locking may block, and so is disallowed on
......@@ -39,7 +39,9 @@ BUILTIN(AtomicsMutexLock) {
// This is not a recursive lock, so also throw if recursively locking.
if (!isolate->allow_atomics_wait() || js_mutex->IsCurrentThreadOwner()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kAtomicsMutexLockNotAllowed));
isolate, NewTypeError(MessageTemplate::kAtomicsOperationNotAllowed,
isolate->factory()->NewStringFromAsciiChecked(
method_name)));
}
Handle<Object> result;
......@@ -69,8 +71,8 @@ BUILTIN(AtomicsMutexTryLock) {
Handle<JSAtomicsMutex> js_mutex = Handle<JSAtomicsMutex>::cast(js_mutex_obj);
Handle<Object> run_under_lock = args.atOrUndefined(isolate, 2);
if (!run_under_lock->IsCallable()) {
THROW_NEW_ERROR_RETURN_FAILURE(isolate,
NewTypeError(MessageTemplate::kNotCallable));
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kNotCallable, run_under_lock));
}
JSAtomicsMutex::TryLockGuard try_lock_guard(isolate, js_mutex);
......@@ -86,5 +88,96 @@ BUILTIN(AtomicsMutexTryLock) {
return ReadOnlyRoots(isolate).false_value();
}
BUILTIN(AtomicsConditionConstructor) {
DCHECK(FLAG_harmony_struct);
HandleScope scope(isolate);
return *JSAtomicsCondition::Create(isolate);
}
BUILTIN(AtomicsConditionWait) {
DCHECK(FLAG_harmony_struct);
constexpr char method_name[] = "Atomics.Condition.wait";
HandleScope scope(isolate);
Handle<Object> js_condition_obj = args.atOrUndefined(isolate, 1);
Handle<Object> js_mutex_obj = args.atOrUndefined(isolate, 2);
Handle<Object> timeout_obj = args.atOrUndefined(isolate, 3);
if (!js_condition_obj->IsJSAtomicsCondition() ||
!js_mutex_obj->IsJSAtomicsMutex()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kMethodInvokedOnWrongType,
isolate->factory()->NewStringFromAsciiChecked(
method_name)));
}
base::Optional<base::TimeDelta> timeout = base::nullopt;
if (!timeout_obj->IsUndefined(isolate)) {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, timeout_obj,
Object::ToNumber(isolate, timeout_obj));
double ms = timeout_obj->Number();
if (!std::isnan(ms)) {
if (ms < 0) ms = 0;
if (ms <= static_cast<double>(std::numeric_limits<int64_t>::max())) {
timeout = base::TimeDelta::FromMilliseconds(static_cast<int64_t>(ms));
}
}
}
if (!isolate->allow_atomics_wait()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kAtomicsOperationNotAllowed,
isolate->factory()->NewStringFromAsciiChecked(
method_name)));
}
Handle<JSAtomicsCondition> js_condition =
Handle<JSAtomicsCondition>::cast(js_condition_obj);
Handle<JSAtomicsMutex> js_mutex = Handle<JSAtomicsMutex>::cast(js_mutex_obj);
if (!js_mutex->IsCurrentThreadOwner()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate,
NewTypeError(MessageTemplate::kAtomicsMutexNotOwnedByCurrentThread));
}
return isolate->heap()->ToBoolean(
JSAtomicsCondition::WaitFor(isolate, js_condition, js_mutex, timeout));
}
BUILTIN(AtomicsConditionNotify) {
DCHECK(FLAG_harmony_struct);
constexpr char method_name[] = "Atomics.Condition.notify";
HandleScope scope(isolate);
Handle<Object> js_condition_obj = args.atOrUndefined(isolate, 1);
Handle<Object> count_obj = args.atOrUndefined(isolate, 2);
if (!js_condition_obj->IsJSAtomicsCondition()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kMethodInvokedOnWrongType,
isolate->factory()->NewStringFromAsciiChecked(
method_name)));
}
uint32_t count;
if (count_obj->IsUndefined(isolate)) {
count = JSAtomicsCondition::kAllWaiters;
} else {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, count_obj,
Object::ToInteger(isolate, count_obj));
double count_double = count_obj->Number();
if (count_double < 0) {
count_double = 0;
} else if (count_double > JSAtomicsCondition::kAllWaiters) {
count_double = JSAtomicsCondition::kAllWaiters;
}
count = static_cast<uint32_t>(count_double);
}
Handle<JSAtomicsCondition> js_condition =
Handle<JSAtomicsCondition>::cast(js_condition_obj);
return *isolate->factory()->NewNumberFromUint(
js_condition->Notify(isolate, count));
}
} // namespace internal
} // namespace v8
......@@ -1005,6 +1005,9 @@ namespace internal {
CPP(AtomicsMutexConstructor) \
CPP(AtomicsMutexLock) \
CPP(AtomicsMutexTryLock) \
CPP(AtomicsConditionConstructor) \
CPP(AtomicsConditionWait) \
CPP(AtomicsConditionNotify) \
\
/* AsyncGenerator */ \
\
......
......@@ -231,7 +231,9 @@ Object DoWait(Isolate* isolate, FutexEmulation::WaitMode mode,
if (mode == FutexEmulation::WaitMode::kSync &&
!isolate->allow_atomics_wait()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kAtomicsWaitNotAllowed));
isolate, NewTypeError(MessageTemplate::kAtomicsOperationNotAllowed,
isolate->factory()->NewStringFromAsciiChecked(
"Atomics.wait")));
}
Handle<JSArrayBuffer> array_buffer = sta->GetBuffer();
......
......@@ -41,9 +41,9 @@ namespace internal {
T(AwaitNotInDebugEvaluate, \
"await can not be used when evaluating code " \
"while paused in the debugger") \
T(AtomicsMutexLockNotAllowed, \
"Atomics.Mutex.lock cannot be called in this context") \
T(AtomicsWaitNotAllowed, "Atomics.wait cannot be called in this context") \
T(AtomicsMutexNotOwnedByCurrentThread, \
"Atomics.Mutex is not owned by the current agent") \
T(AtomicsOperationNotAllowed, "% cannot be called in this context") \
T(BadRoundingType, "RoundingType is not fractionDigits") \
T(BadSortComparisonFunction, \
"The comparison function must be either a function or undefined") \
......
......@@ -264,6 +264,7 @@ Type::bitset BitsetType::Lub(const MapRefLike& map) {
case JS_SHADOW_REALM_TYPE:
case JS_SHARED_ARRAY_TYPE:
case JS_SHARED_STRUCT_TYPE:
case JS_ATOMICS_CONDITION_TYPE:
case JS_ATOMICS_MUTEX_TYPE:
case JS_TEMPORAL_CALENDAR_TYPE:
case JS_TEMPORAL_DURATION_TYPE:
......
......@@ -551,7 +551,8 @@ void Map::MapVerify(Isolate* isolate) {
JSObject::GetEmbedderFieldCount(*this) * kEmbedderDataSlotSize,
inobject_fields_start_offset);
if (IsJSSharedStructMap() || IsJSSharedArrayMap()) {
if (IsJSSharedStructMap() || IsJSSharedArrayMap() || IsJSAtomicsMutex() ||
IsJSAtomicsCondition()) {
CHECK(InSharedHeap());
CHECK(GetBackPointer().IsUndefined(isolate));
Object maybe_cell = prototype_validity_cell();
......@@ -1265,10 +1266,12 @@ void JSAtomicsMutex::JSAtomicsMutexVerify(Isolate* isolate) {
CHECK(IsJSAtomicsMutex());
CHECK(InSharedWritableHeap());
JSObjectVerify(isolate);
Map mutex_map = map();
CHECK(mutex_map.GetBackPointer().IsUndefined(isolate));
CHECK(!mutex_map.is_extensible());
CHECK(!mutex_map.is_prototype_map());
}
void JSAtomicsCondition::JSAtomicsConditionVerify(Isolate* isolate) {
CHECK(IsJSAtomicsCondition());
CHECK(InSharedHeap());
JSObjectVerify(isolate);
}
void JSSharedArray::JSSharedArrayVerify(Isolate* isolate) {
......
......@@ -1495,6 +1495,15 @@ void JSAtomicsMutex::JSAtomicsMutexPrint(std::ostream& os) {
JSObjectPrintBody(os, *this);
}
void JSAtomicsCondition::JSAtomicsConditionPrint(std::ostream& os) {
JSObjectPrintHeader(os, *this, "JSAtomicsCondition");
Isolate* isolate = GetIsolateFromWritableObject(*this);
os << "\n - isolate: " << isolate;
if (isolate->is_shared()) os << " (shared)";
os << "\n - state: " << this->state();
JSObjectPrintBody(os, *this);
}
void JSWeakMap::JSWeakMapPrint(std::ostream& os) {
JSObjectPrintHeader(os, *this, "JSWeakMap");
os << "\n - table: " << Brief(table());
......
......@@ -31,12 +31,12 @@ namespace internal {
V(FeedbackMetadata) \
V(FixedDoubleArray) \
V(JSArrayBuffer) \
V(JSAtomicsMutex) \
V(JSDataView) \
V(JSExternalObject) \
V(JSFinalizationRegistry) \
V(JSFunction) \
V(JSObject) \
V(JSSynchronizationPrimitive) \
V(JSTypedArray) \
V(WeakCell) \
V(JSWeakCollection) \
......
......@@ -4663,8 +4663,10 @@ void Genesis::InitializeGlobal_harmony_struct() {
DONT_ENUM);
}
// TODO(v8:12547): Make a single canonical copy of the Mutex and Condition
// maps.
{ // Atomics.Mutex
// TODO(syg): Make a single canonical copy of the map.
Handle<String> mutex_str =
isolate()->factory()->InternalizeUtf8String("Mutex");
Handle<JSFunction> mutex_fun = CreateSharedObjectConstructor(
......@@ -4683,6 +4685,27 @@ void Genesis::InitializeGlobal_harmony_struct() {
SimpleInstallFunction(isolate(), mutex_fun, "tryLock",
Builtin::kAtomicsMutexTryLock, 2, true);
}
{ // Atomics.Condition
Handle<String> condition_str =
isolate()->factory()->InternalizeUtf8String("Condition");
Handle<JSFunction> condition_fun = CreateSharedObjectConstructor(
isolate(), condition_str, JS_ATOMICS_CONDITION_TYPE,
JSAtomicsCondition::kHeaderSize, TERMINAL_FAST_ELEMENTS_KIND,
Builtin::kAtomicsConditionConstructor);
condition_fun->shared().set_internal_formal_parameter_count(
JSParameterCount(0));
condition_fun->shared().set_length(0);
native_context()->set_js_atomics_condition_map(
condition_fun->initial_map());
JSObject::AddProperty(isolate(), isolate()->atomics_object(), condition_str,
condition_fun, DONT_ENUM);
SimpleInstallFunction(isolate(), condition_fun, "wait",
Builtin::kAtomicsConditionWait, 2, false);
SimpleInstallFunction(isolate(), condition_fun, "notify",
Builtin::kAtomicsConditionNotify, 2, false);
}
}
void Genesis::InitializeGlobal_harmony_array_find_last() {
......
......@@ -178,6 +178,7 @@ enum ContextLookupFlags {
js_array_packed_double_elements_map) \
V(JS_ARRAY_HOLEY_DOUBLE_ELEMENTS_MAP_INDEX, Map, \
js_array_holey_double_elements_map) \
V(JS_ATOMICS_CONDITION_MAP, Map, js_atomics_condition_map) \
V(JS_ATOMICS_MUTEX_MAP, Map, js_atomics_mutex_map) \
V(JS_MAP_FUN_INDEX, JSFunction, js_map_fun) \
V(JS_MAP_MAP_INDEX, Map, js_map_map) \
......
......@@ -19,6 +19,15 @@ namespace internal {
#include "torque-generated/src/objects/js-atomics-synchronization-tq-inl.inc"
TQ_OBJECT_CONSTRUCTORS_IMPL(JSSynchronizationPrimitive)
std::atomic<JSSynchronizationPrimitive::StateT>*
JSSynchronizationPrimitive::AtomicStatePtr() {
StateT* state_ptr = reinterpret_cast<StateT*>(field_address(kStateOffset));
DCHECK(IsAligned(reinterpret_cast<uintptr_t>(state_ptr), sizeof(StateT)));
return base::AsAtomicPtr(state_ptr);
}
TQ_OBJECT_CONSTRUCTORS_IMPL(JSAtomicsMutex)
CAST_ACCESSOR(JSAtomicsMutex)
......@@ -111,18 +120,16 @@ void JSAtomicsMutex::ClearOwnerThread() {
std::memory_order_relaxed);
}
std::atomic<JSAtomicsMutex::StateT>* JSAtomicsMutex::AtomicStatePtr() {
StateT* state_ptr = reinterpret_cast<StateT*>(field_address(kStateOffset));
DCHECK(IsAligned(reinterpret_cast<uintptr_t>(state_ptr), sizeof(StateT)));
return base::AsAtomicPtr(state_ptr);
}
std::atomic<int32_t>* JSAtomicsMutex::AtomicOwnerThreadIdPtr() {
int32_t* owner_thread_id_ptr =
reinterpret_cast<int32_t*>(field_address(kOwnerThreadIdOffset));
return base::AsAtomicPtr(owner_thread_id_ptr);
}
TQ_OBJECT_CONSTRUCTORS_IMPL(JSAtomicsCondition)
CAST_ACCESSOR(JSAtomicsCondition)
} // namespace internal
} // namespace v8
......
This diff is collapsed.
......@@ -7,6 +7,7 @@
#include <atomic>
#include "src/base/platform/time.h"
#include "src/execution/thread-id.h"
#include "src/objects/js-objects.h"
......@@ -22,12 +23,38 @@ namespace detail {
class WaiterQueueNode;
} // namespace detail
// Base class for JSAtomicsMutex and JSAtomicsCondition
class JSSynchronizationPrimitive
: public TorqueGeneratedJSSynchronizationPrimitive<
JSSynchronizationPrimitive, JSObject> {
public:
// Synchronization only store raw data as state.
static constexpr int kEndOfTaggedFieldsOffset = JSObject::kHeaderSize;
class BodyDescriptor;
TQ_OBJECT_CONSTRUCTORS(JSSynchronizationPrimitive)
protected:
#ifdef V8_COMPRESS_POINTERS
using StateT = uint32_t;
static_assert(sizeof(StateT) == sizeof(ExternalPointerHandle));
#else
using StateT = uintptr_t;
#endif // V8_COMPRESS_POINTERS
inline std::atomic<StateT>* AtomicStatePtr();
using TorqueGeneratedJSSynchronizationPrimitive<JSSynchronizationPrimitive,
JSObject>::state;
using TorqueGeneratedJSSynchronizationPrimitive<JSSynchronizationPrimitive,
JSObject>::set_state;
};
// A non-recursive mutex that is exposed to JS.
//
// It has the following properties:
// - Slim: 8-12 bytes. Lock state is 4 bytes when
// V8_SANDBOXED_EXTERNAL_POINTERS, and sizeof(void*) otherwise. Owner
// thread is an additional 4 bytes.
// - Slim: 8-12 bytes. Lock state is 4 bytes when V8_COMPRESS_POINTERS, and
// sizeof(void*) otherwise. Owner thread is an additional 4 bytes.
// - Fast when uncontended: a single weak CAS.
// - Possibly unfair under contention.
// - Moving GC safe. It uses an index into the shared Isolate's external
......@@ -41,7 +68,8 @@ class WaiterQueueNode;
// it implements a futex in userland. The algorithm is inspired by WebKit's
// ParkingLot.
class JSAtomicsMutex
: public TorqueGeneratedJSAtomicsMutex<JSAtomicsMutex, JSObject> {
: public TorqueGeneratedJSAtomicsMutex<JSAtomicsMutex,
JSSynchronizationPrimitive> {
public:
// A non-copyable wrapper class that provides an RAII-style mechanism for
// owning the JSAtomicsMutex.
......@@ -96,9 +124,6 @@ class JSAtomicsMutex
inline bool IsHeld();
inline bool IsCurrentThreadOwner();
static constexpr int kEndOfTaggedFieldsOffset = JSObject::kHeaderSize;
class BodyDescriptor;
TQ_OBJECT_CONSTRUCTORS(JSAtomicsMutex)
private:
......@@ -110,13 +135,6 @@ class JSAtomicsMutex
static constexpr int kIsWaiterQueueLockedBit = 1 << 1;
static constexpr int kLockBitsSize = 2;
#ifdef V8_COMPRESS_POINTERS
using StateT = uint32_t;
static_assert(sizeof(StateT) == sizeof(ExternalPointerHandle));
#else
using StateT = uintptr_t;
#endif
static constexpr StateT kUnlocked = 0;
static constexpr StateT kLockedUncontended = 1;
......@@ -126,7 +144,6 @@ class JSAtomicsMutex
inline void SetCurrentThreadAsOwner();
inline void ClearOwnerThread();
inline std::atomic<StateT>* AtomicStatePtr();
inline std::atomic<int32_t>* AtomicOwnerThreadIdPtr();
bool TryLockExplicit(std::atomic<StateT>* state, StateT& expected);
......@@ -138,12 +155,59 @@ class JSAtomicsMutex
V8_EXPORT_PRIVATE void UnlockSlowPath(Isolate* requester,
std::atomic<StateT>* state);
using TorqueGeneratedJSAtomicsMutex<JSAtomicsMutex, JSObject>::state;
using TorqueGeneratedJSAtomicsMutex<JSAtomicsMutex, JSObject>::set_state;
using TorqueGeneratedJSAtomicsMutex<JSAtomicsMutex,
JSObject>::owner_thread_id;
using TorqueGeneratedJSAtomicsMutex<JSAtomicsMutex,
JSObject>::set_owner_thread_id;
using TorqueGeneratedJSAtomicsMutex<
JSAtomicsMutex, JSSynchronizationPrimitive>::owner_thread_id;
using TorqueGeneratedJSAtomicsMutex<
JSAtomicsMutex, JSSynchronizationPrimitive>::set_owner_thread_id;
};
// A condition variable that is exposed to JS.
//
// It has the following properties:
// - Slim: 4-8 bytes. Lock state is 4 bytes when V8_COMPRESS_POINTERS, and
// sizeof(void*) otherwise.
// - Moving GC safe. It uses an index into the shared Isolate's external
// pointer table to store a queue of sleeping threads.
// - Parks the main thread LocalHeap when waiting. Unparks the main thread
// LocalHeap after waking up.
//
// This condition variable manages its own queue of waiting threads, like
// JSAtomicsMutex. The algorithm is inspired by WebKit's ParkingLot.
class JSAtomicsCondition
: public TorqueGeneratedJSAtomicsCondition<JSAtomicsCondition,
JSSynchronizationPrimitive> {
public:
DECL_CAST(JSAtomicsCondition)
DECL_PRINTER(JSAtomicsCondition)
EXPORT_DECL_VERIFIER(JSAtomicsCondition)
V8_EXPORT_PRIVATE static Handle<JSAtomicsCondition> Create(Isolate* isolate);
V8_EXPORT_PRIVATE static bool WaitFor(
Isolate* requester, Handle<JSAtomicsCondition> cv,
Handle<JSAtomicsMutex> mutex, base::Optional<base::TimeDelta> timeout);
static constexpr uint32_t kAllWaiters = UINT32_MAX;
// Notify {count} waiters. Returns the number of waiters woken up.
V8_EXPORT_PRIVATE uint32_t Notify(Isolate* requester, uint32_t count);
Object NumWaitersForTesting(Isolate* isolate);
TQ_OBJECT_CONSTRUCTORS(JSAtomicsCondition)
private:
friend class detail::WaiterQueueNode;
// There is 1 lock bit: whether the waiter queue is locked.
static constexpr int kIsWaiterQueueLockedBit = 1 << 0;
static constexpr int kLockBitsSize = 1;
static constexpr StateT kEmptyState = 0;
static constexpr StateT kLockBitsMask = (1 << kLockBitsSize) - 1;
static constexpr StateT kWaiterQueueHeadMask = ~kLockBitsMask;
bool TryLockWaiterQueueExplicit(std::atomic<StateT>* state, StateT& expected);
};
} // namespace internal
......
......@@ -2,12 +2,17 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
extern class JSAtomicsMutex extends JSObject {
@abstract
extern class JSSynchronizationPrimitive extends JSObject {
@if(TAGGED_SIZE_8_BYTES) state: uintptr;
@ifnot(TAGGED_SIZE_8_BYTES) state: uint32;
}
extern class JSAtomicsMutex extends JSSynchronizationPrimitive {
owner_thread_id: int32;
@if(TAGGED_SIZE_8_BYTES) optional_padding: uint32;
@ifnot(TAGGED_SIZE_8_BYTES) optional_padding: void;
}
extern class JSAtomicsCondition extends JSSynchronizationPrimitive {}
......@@ -2474,6 +2474,8 @@ int JSObject::GetHeaderSize(InstanceType type,
return JSSharedStruct::kHeaderSize;
case JS_ATOMICS_MUTEX_TYPE:
return JSAtomicsMutex::kHeaderSize;
case JS_ATOMICS_CONDITION_TYPE:
return JSAtomicsCondition::kHeaderSize;
case JS_TEMPORAL_CALENDAR_TYPE:
return JSTemporalCalendar::kHeaderSize;
case JS_TEMPORAL_DURATION_TYPE:
......
......@@ -324,6 +324,7 @@ VisitorId Map::GetVisitorId(Map map) {
#endif // V8_ENABLE_WEBASSEMBLY
case JS_BOUND_FUNCTION_TYPE:
case JS_WRAPPED_FUNCTION_TYPE: {
// Is GetEmbedderFieldCount(map) > 0 for Atomics.Mutex?
const bool has_raw_data_fields =
COMPRESS_POINTERS_BOOL && JSObject::GetEmbedderFieldCount(map) > 0;
return has_raw_data_fields ? kVisitJSObject : kVisitJSObjectFast;
......@@ -337,15 +338,16 @@ VisitorId Map::GetVisitorId(Map map) {
case JS_WEAK_REF_TYPE:
return kVisitJSWeakRef;
case JS_ATOMICS_MUTEX_TYPE:
return kVisitJSAtomicsMutex;
case WEAK_CELL_TYPE:
return kVisitWeakCell;
case JS_FINALIZATION_REGISTRY_TYPE:
return kVisitJSFinalizationRegistry;
case JS_ATOMICS_MUTEX_TYPE:
case JS_ATOMICS_CONDITION_TYPE:
return kVisitJSSynchronizationPrimitive;
case FILLER_TYPE:
case FOREIGN_TYPE:
case HEAP_NUMBER_TYPE:
......
......@@ -47,13 +47,13 @@ enum InstanceType : uint16_t;
V(FreeSpace) \
V(JSApiObject) \
V(JSArrayBuffer) \
V(JSAtomicsMutex) \
V(JSDataView) \
V(JSExternalObject) \
V(JSFinalizationRegistry) \
V(JSFunction) \
V(JSObject) \
V(JSObjectFast) \
V(JSSynchronizationPrimitive) \
V(JSTypedArray) \
V(JSWeakRef) \
V(JSWeakCollection) \
......
......@@ -132,6 +132,7 @@ class ZoneForwardList;
V(JSAsyncFromSyncIterator) \
V(JSAsyncFunctionObject) \
V(JSAsyncGeneratorObject) \
V(JSAtomicsCondition) \
V(JSAtomicsMutex) \
V(JSBoundFunction) \
V(JSCollection) \
......@@ -167,6 +168,7 @@ class ZoneForwardList;
V(JSSharedStruct) \
V(JSSpecialObject) \
V(JSStringIterator) \
V(JSSynchronizationPrimitive) \
V(JSTemporalCalendar) \
V(JSTemporalDuration) \
V(JSTemporalInstant) \
......
......@@ -668,7 +668,8 @@ class JSWeakCollection::BodyDescriptorImpl final : public BodyDescriptorBase {
}
};
class JSAtomicsMutex::BodyDescriptor final : public BodyDescriptorBase {
class JSSynchronizationPrimitive::BodyDescriptor final
: public BodyDescriptorBase {
public:
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
if (offset < kEndOfTaggedFieldsOffset) return true;
......@@ -680,7 +681,6 @@ class JSAtomicsMutex::BodyDescriptor final : public BodyDescriptorBase {
static inline void IterateBody(Map map, HeapObject obj, int object_size,
ObjectVisitor* v) {
IteratePointers(obj, kPropertiesOrHashOffset, kEndOfTaggedFieldsOffset, v);
IterateJSObjectBodyImpl(map, obj, kHeaderSize, object_size, v);
}
static inline int SizeOf(Map map, HeapObject object) {
......@@ -1309,7 +1309,8 @@ auto BodyDescriptorApply(InstanceType type, Args&&... args) {
case JS_PROXY_TYPE:
return CALL_APPLY(JSProxy);
case JS_ATOMICS_MUTEX_TYPE:
return CALL_APPLY(JSAtomicsMutex);
case JS_ATOMICS_CONDITION_TYPE:
return CALL_APPLY(JSSynchronizationPrimitive);
case FOREIGN_TYPE:
return CALL_APPLY(Foreign);
case MAP_TYPE:
......
......@@ -1188,6 +1188,7 @@ bool Object::IsShared() const {
case JS_SHARED_ARRAY_TYPE:
case JS_SHARED_STRUCT_TYPE:
case JS_ATOMICS_MUTEX_TYPE:
case JS_ATOMICS_CONDITION_TYPE:
DCHECK(object.InSharedHeap());
return true;
case INTERNALIZED_STRING_TYPE:
......
......@@ -615,6 +615,7 @@ Maybe<bool> ValueSerializer::WriteJSReceiver(Handle<JSReceiver> receiver) {
case JS_SHARED_STRUCT_TYPE:
return WriteJSSharedStruct(Handle<JSSharedStruct>::cast(receiver));
case JS_ATOMICS_MUTEX_TYPE:
case JS_ATOMICS_CONDITION_TYPE:
return WriteSharedObject(receiver);
#if V8_ENABLE_WEBASSEMBLY
case WASM_MODULE_OBJECT_TYPE:
......
......@@ -23,6 +23,7 @@
#include "src/heap/heap-inl.h" // For ToBoolean. TODO(jkummerow): Drop.
#include "src/heap/heap-write-barrier-inl.h"
#include "src/ic/stub-cache.h"
#include "src/objects/js-atomics-synchronization-inl.h"
#include "src/objects/js-function-inl.h"
#include "src/objects/js-regexp-inl.h"
#include "src/objects/smi.h"
......@@ -1709,5 +1710,12 @@ RUNTIME_FUNCTION(Runtime_SharedGC) {
return ReadOnlyRoots(isolate).undefined_value();
}
RUNTIME_FUNCTION(Runtime_AtomicsConditionNumWaitersForTesting) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
Handle<JSAtomicsCondition> cv = args.at<JSAtomicsCondition>(0);
return cv->NumWaitersForTesting(isolate);
}
} // namespace internal
} // namespace v8
......@@ -106,8 +106,10 @@ class V8_NODISCARD ClearThreadInWasmScope {
Isolate* isolate_;
};
Object ThrowWasmError(Isolate* isolate, MessageTemplate message) {
Handle<JSObject> error_obj = isolate->factory()->NewWasmRuntimeError(message);
Object ThrowWasmError(Isolate* isolate, MessageTemplate message,
Handle<Object> arg0 = Handle<Object>()) {
Handle<JSObject> error_obj =
isolate->factory()->NewWasmRuntimeError(message, arg0);
JSObject::AddProperty(isolate, error_obj,
isolate->factory()->wasm_uncatchable_symbol(),
isolate->factory()->true_value(), NONE);
......@@ -370,7 +372,9 @@ RUNTIME_FUNCTION(Runtime_WasmI32AtomicWait) {
// Trap if memory is not shared, or wait is not allowed on the isolate
if (!array_buffer->is_shared() || !isolate->allow_atomics_wait()) {
return ThrowWasmError(isolate, MessageTemplate::kAtomicsWaitNotAllowed);
return ThrowWasmError(
isolate, MessageTemplate::kAtomicsOperationNotAllowed,
isolate->factory()->NewStringFromAsciiChecked("Atomics.wait"));
}
return FutexEmulation::WaitWasm32(isolate, array_buffer, offset,
expected_value, timeout_ns.AsInt64());
......@@ -393,7 +397,9 @@ RUNTIME_FUNCTION(Runtime_WasmI64AtomicWait) {
// Trap if memory is not shared, or if wait is not allowed on the isolate
if (!array_buffer->is_shared() || !isolate->allow_atomics_wait()) {
return ThrowWasmError(isolate, MessageTemplate::kAtomicsWaitNotAllowed);
return ThrowWasmError(
isolate, MessageTemplate::kAtomicsOperationNotAllowed,
isolate->factory()->NewStringFromAsciiChecked("Atomics.wait"));
}
return FutexEmulation::WaitWasm64(isolate, array_buffer, offset,
expected_value.AsInt64(),
......
......@@ -65,7 +65,8 @@ namespace internal {
F(SetAllowAtomicsWait, 1, 1) \
F(AtomicsLoadSharedStructOrArray, 2, 1) \
F(AtomicsStoreSharedStructOrArray, 3, 1) \
F(AtomicsExchangeSharedStructOrArray, 3, 1)
F(AtomicsExchangeSharedStructOrArray, 3, 1) \
F(AtomicsConditionNumWaitersForTesting, 1, 1)
#define FOR_EACH_INTRINSIC_BIGINT(F, I) \
F(BigIntBinaryOp, 3, 1) \
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Flags: --harmony-struct --allow-natives-syntax
"use strict";
if (this.Worker) {
(function TestWait() {
let workerScript =
`onmessage = function(msg) {
let mutex = msg.mutex;
let cv = msg.cv;
let res = Atomics.Mutex.lock(mutex, function() {
return Atomics.Condition.wait(cv, mutex);
});
postMessage(res);
};`;
let mutex = new Atomics.Mutex;
let cv = new Atomics.Condition;
let msg = {mutex, cv};
let worker1 = new Worker(workerScript, { type: 'string' });
let worker2 = new Worker(workerScript, { type: 'string' });
worker1.postMessage(msg);
worker2.postMessage(msg);
// Spin until both workers are waiting.
while (%AtomicsConditionNumWaitersForTesting(cv) != 2) {}
assertEquals(2, Atomics.Condition.notify(cv, 2));
assertEquals(true, worker1.getMessage());
assertEquals(true, worker2.getMessage());
worker1.terminate();
worker2.terminate();
})();
}
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Flags: --harmony-struct --allow-natives-syntax
let mutex = new Atomics.Mutex;
let cv = new Atomics.Condition;
(function TestConditionWaitNotAllowed() {
assertThrows(() => {
Atomics.Mutex.lock(mutex, () => {
%SetAllowAtomicsWait(false);
Atomics.Condition.wait(cv, mutex);
});
});
%SetAllowAtomicsWait(true);
})();
(function TestConditionMutexNotHeld() {
// Cannot wait on a mutex not owned by the current thread.
assertThrows(() => {
Atomics.Condition.wait(cv, mutex);
});
})();
(function TestConditionNoWaiters() {
// Notify returns number of threads woken up.
assertEquals(0, Atomics.Condition.notify(cv));
})();
(function TestConditionWaitTimeout() {
Atomics.Mutex.lock(mutex, () => {
assertEquals(false, Atomics.Condition.wait(cv, mutex, 100));
});
})();
......@@ -416,7 +416,7 @@ v8_source_set("unittests_sources") {
"interpreter/source-position-matcher.cc",
"interpreter/source-position-matcher.h",
"interpreter/source-positions-unittest.cc",
"js-atomics/js-atomics-mutex-unittest.cc",
"js-atomics/js-atomics-synchronization-primitive-unittest.cc",
"libplatform/default-job-unittest.cc",
"libplatform/default-platform-unittest.cc",
"libplatform/default-worker-threads-task-runner-unittest.cc",
......
......@@ -13,6 +13,7 @@ namespace v8 {
namespace internal {
using JSAtomicsMutexTest = TestWithSharedIsolate;
using JSAtomicsConditionTest = TestWithSharedIsolate;
namespace {
......@@ -36,13 +37,26 @@ class ClientIsolateWithContextWrapper final {
v8::Context::Scope context_scope_;
};
class LockingThread final : public v8::base::Thread {
class ParkingThread : public v8::base::Thread {
public:
explicit ParkingThread(const Options& options) : v8::base::Thread(options) {}
void ParkedJoin(const ParkedScope& scope) {
USE(scope);
Join();
}
private:
using base::Thread::Join;
};
class LockingThread final : public ParkingThread {
public:
LockingThread(v8::Isolate* shared_isolate, Handle<JSAtomicsMutex> mutex,
ParkingSemaphore* sema_ready,
ParkingSemaphore* sema_execute_start,
ParkingSemaphore* sema_execute_complete)
: Thread(Options("ThreadWithAtomicsMutex")),
: ParkingThread(Options("LockingThread")),
shared_isolate_(shared_isolate),
mutex_(mutex),
sema_ready_(sema_ready),
......@@ -66,14 +80,7 @@ class LockingThread final : public v8::base::Thread {
sema_execute_complete_->Signal();
}
void ParkedJoin(const ParkedScope& scope) {
USE(scope);
Join();
}
protected:
using base::Thread::Join;
private:
v8::Isolate* shared_isolate_;
Handle<JSAtomicsMutex> mutex_;
ParkingSemaphore* sema_ready_;
......@@ -125,5 +132,112 @@ TEST_F(JSAtomicsMutexTest, Contention) {
EXPECT_FALSE(contended_mutex->IsHeld());
}
namespace {
class WaitOnConditionThread final : public ParkingThread {
public:
WaitOnConditionThread(v8::Isolate* shared_isolate,
Handle<JSAtomicsMutex> mutex,
Handle<JSAtomicsCondition> condition,
uint32_t* waiting_threads_count,
ParkingSemaphore* sema_ready,
ParkingSemaphore* sema_execute_complete)
: ParkingThread(Options("WaitOnConditionThread")),
shared_isolate_(shared_isolate),
mutex_(mutex),
condition_(condition),
waiting_threads_count_(waiting_threads_count),
sema_ready_(sema_ready),
sema_execute_complete_(sema_execute_complete) {}
void Run() override {
ClientIsolateWithContextWrapper client_isolate_wrapper(shared_isolate_);
Isolate* isolate = client_isolate_wrapper.isolate();
sema_ready_->Signal();
HandleScope scope(isolate);
JSAtomicsMutex::Lock(isolate, mutex_);
while (keep_waiting) {
(*waiting_threads_count_)++;
EXPECT_TRUE(JSAtomicsCondition::WaitFor(isolate, condition_, mutex_,
base::nullopt));
(*waiting_threads_count_)--;
}
mutex_->Unlock(isolate);
sema_execute_complete_->Signal();
}
bool keep_waiting = true;
private:
v8::Isolate* shared_isolate_;
Handle<JSAtomicsMutex> mutex_;
Handle<JSAtomicsCondition> condition_;
uint32_t* waiting_threads_count_;
ParkingSemaphore* sema_ready_;
ParkingSemaphore* sema_execute_complete_;
};
} // namespace
TEST_F(JSAtomicsConditionTest, NotifyAll) {
if (!IsJSSharedMemorySupported()) return;
FLAG_harmony_struct = true;
v8::Isolate* shared_isolate = v8_isolate();
ClientIsolateWithContextWrapper client_isolate_wrapper(shared_isolate);
Isolate* client_isolate = client_isolate_wrapper.isolate();
constexpr uint32_t kThreads = 32;
Handle<JSAtomicsMutex> mutex = JSAtomicsMutex::Create(client_isolate);
Handle<JSAtomicsCondition> condition =
JSAtomicsCondition::Create(client_isolate);
uint32_t waiting_threads_count = 0;
ParkingSemaphore sema_ready(0);
ParkingSemaphore sema_execute_complete(0);
std::vector<std::unique_ptr<WaitOnConditionThread>> threads;
for (uint32_t i = 0; i < kThreads; i++) {
auto thread = std::make_unique<WaitOnConditionThread>(
shared_isolate, mutex, condition, &waiting_threads_count, &sema_ready,
&sema_execute_complete);
CHECK(thread->Start());
threads.push_back(std::move(thread));
}
LocalIsolate* local_isolate = client_isolate->main_thread_local_isolate();
for (uint32_t i = 0; i < kThreads; i++) {
sema_ready.ParkedWait(local_isolate);
}
// Wait until all threads are waiting on the condition.
for (;;) {
JSAtomicsMutex::LockGuard lock_guard(client_isolate, mutex);
uint32_t count = waiting_threads_count;
if (count == kThreads) break;
}
// Wake all the threads up.
for (uint32_t i = 0; i < kThreads; i++) {
threads[i]->keep_waiting = false;
}
EXPECT_EQ(kThreads,
condition->Notify(client_isolate, JSAtomicsCondition::kAllWaiters));
for (uint32_t i = 0; i < kThreads; i++) {
sema_execute_complete.ParkedWait(local_isolate);
}
ParkedScope parked(local_isolate);
for (auto& thread : threads) {
thread->ParkedJoin(parked);
}
EXPECT_EQ(0U, waiting_threads_count);
EXPECT_FALSE(mutex->IsHeld());
}
} // namespace internal
} // namespace v8
......@@ -224,57 +224,58 @@ INSTANCE_TYPES = {
2098: "JS_ASYNC_GENERATOR_OBJECT_TYPE",
2099: "JS_MAP_TYPE",
2100: "JS_SET_TYPE",
2101: "JS_WEAK_MAP_TYPE",
2102: "JS_WEAK_SET_TYPE",
2103: "JS_ARGUMENTS_OBJECT_TYPE",
2104: "JS_ARRAY_TYPE",
2105: "JS_ARRAY_ITERATOR_TYPE",
2106: "JS_ASYNC_FROM_SYNC_ITERATOR_TYPE",
2107: "JS_ATOMICS_MUTEX_TYPE",
2108: "JS_COLLATOR_TYPE",
2109: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
2110: "JS_DATE_TYPE",
2111: "JS_DATE_TIME_FORMAT_TYPE",
2112: "JS_DISPLAY_NAMES_TYPE",
2113: "JS_ERROR_TYPE",
2114: "JS_EXTERNAL_OBJECT_TYPE",
2115: "JS_FINALIZATION_REGISTRY_TYPE",
2116: "JS_LIST_FORMAT_TYPE",
2117: "JS_LOCALE_TYPE",
2118: "JS_MESSAGE_OBJECT_TYPE",
2119: "JS_NUMBER_FORMAT_TYPE",
2120: "JS_PLURAL_RULES_TYPE",
2121: "JS_REG_EXP_TYPE",
2122: "JS_REG_EXP_STRING_ITERATOR_TYPE",
2123: "JS_RELATIVE_TIME_FORMAT_TYPE",
2124: "JS_SEGMENT_ITERATOR_TYPE",
2125: "JS_SEGMENTER_TYPE",
2126: "JS_SEGMENTS_TYPE",
2127: "JS_SHADOW_REALM_TYPE",
2128: "JS_SHARED_ARRAY_TYPE",
2129: "JS_SHARED_STRUCT_TYPE",
2130: "JS_STRING_ITERATOR_TYPE",
2131: "JS_TEMPORAL_CALENDAR_TYPE",
2132: "JS_TEMPORAL_DURATION_TYPE",
2133: "JS_TEMPORAL_INSTANT_TYPE",
2134: "JS_TEMPORAL_PLAIN_DATE_TYPE",
2135: "JS_TEMPORAL_PLAIN_DATE_TIME_TYPE",
2136: "JS_TEMPORAL_PLAIN_MONTH_DAY_TYPE",
2137: "JS_TEMPORAL_PLAIN_TIME_TYPE",
2138: "JS_TEMPORAL_PLAIN_YEAR_MONTH_TYPE",
2139: "JS_TEMPORAL_TIME_ZONE_TYPE",
2140: "JS_TEMPORAL_ZONED_DATE_TIME_TYPE",
2141: "JS_V8_BREAK_ITERATOR_TYPE",
2142: "JS_WEAK_REF_TYPE",
2143: "WASM_EXCEPTION_PACKAGE_TYPE",
2144: "WASM_GLOBAL_OBJECT_TYPE",
2145: "WASM_INSTANCE_OBJECT_TYPE",
2146: "WASM_MEMORY_OBJECT_TYPE",
2147: "WASM_MODULE_OBJECT_TYPE",
2148: "WASM_SUSPENDER_OBJECT_TYPE",
2149: "WASM_TABLE_OBJECT_TYPE",
2150: "WASM_TAG_OBJECT_TYPE",
2151: "WASM_VALUE_OBJECT_TYPE",
2101: "JS_ATOMICS_CONDITION_TYPE",
2102: "JS_ATOMICS_MUTEX_TYPE",
2103: "JS_WEAK_MAP_TYPE",
2104: "JS_WEAK_SET_TYPE",
2105: "JS_ARGUMENTS_OBJECT_TYPE",
2106: "JS_ARRAY_TYPE",
2107: "JS_ARRAY_ITERATOR_TYPE",
2108: "JS_ASYNC_FROM_SYNC_ITERATOR_TYPE",
2109: "JS_COLLATOR_TYPE",
2110: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
2111: "JS_DATE_TYPE",
2112: "JS_DATE_TIME_FORMAT_TYPE",
2113: "JS_DISPLAY_NAMES_TYPE",
2114: "JS_ERROR_TYPE",
2115: "JS_EXTERNAL_OBJECT_TYPE",
2116: "JS_FINALIZATION_REGISTRY_TYPE",
2117: "JS_LIST_FORMAT_TYPE",
2118: "JS_LOCALE_TYPE",
2119: "JS_MESSAGE_OBJECT_TYPE",
2120: "JS_NUMBER_FORMAT_TYPE",
2121: "JS_PLURAL_RULES_TYPE",
2122: "JS_REG_EXP_TYPE",
2123: "JS_REG_EXP_STRING_ITERATOR_TYPE",
2124: "JS_RELATIVE_TIME_FORMAT_TYPE",
2125: "JS_SEGMENT_ITERATOR_TYPE",
2126: "JS_SEGMENTER_TYPE",
2127: "JS_SEGMENTS_TYPE",
2128: "JS_SHADOW_REALM_TYPE",
2129: "JS_SHARED_ARRAY_TYPE",
2130: "JS_SHARED_STRUCT_TYPE",
2131: "JS_STRING_ITERATOR_TYPE",
2132: "JS_TEMPORAL_CALENDAR_TYPE",
2133: "JS_TEMPORAL_DURATION_TYPE",
2134: "JS_TEMPORAL_INSTANT_TYPE",
2135: "JS_TEMPORAL_PLAIN_DATE_TYPE",
2136: "JS_TEMPORAL_PLAIN_DATE_TIME_TYPE",
2137: "JS_TEMPORAL_PLAIN_MONTH_DAY_TYPE",
2138: "JS_TEMPORAL_PLAIN_TIME_TYPE",
2139: "JS_TEMPORAL_PLAIN_YEAR_MONTH_TYPE",
2140: "JS_TEMPORAL_TIME_ZONE_TYPE",
2141: "JS_TEMPORAL_ZONED_DATE_TIME_TYPE",
2142: "JS_V8_BREAK_ITERATOR_TYPE",
2143: "JS_WEAK_REF_TYPE",
2144: "WASM_EXCEPTION_PACKAGE_TYPE",
2145: "WASM_GLOBAL_OBJECT_TYPE",
2146: "WASM_INSTANCE_OBJECT_TYPE",
2147: "WASM_MEMORY_OBJECT_TYPE",
2148: "WASM_MODULE_OBJECT_TYPE",
2149: "WASM_SUSPENDER_OBJECT_TYPE",
2150: "WASM_TABLE_OBJECT_TYPE",
2151: "WASM_TAG_OBJECT_TYPE",
2152: "WASM_VALUE_OBJECT_TYPE",
}
# List of known V8 maps.
......@@ -454,8 +455,8 @@ KNOWN_MAPS = {
("read_only_space", 0x07e75): (138, "StoreHandler1Map"),
("read_only_space", 0x07e9d): (138, "StoreHandler2Map"),
("read_only_space", 0x07ec5): (138, "StoreHandler3Map"),
("map_space", 0x02139): (2114, "ExternalMap"),
("map_space", 0x02161): (2118, "JSMessageObjectMap"),
("map_space", 0x02139): (2115, "ExternalMap"),
("map_space", 0x02161): (2119, "JSMessageObjectMap"),
}
# List of known V8 objects.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment