Commit bfa18f9e authored by Marja Hölttä's avatar Marja Hölttä Committed by V8 LUCI CQ

[rab/gsab] Move OOB check to ValidateIntegerTypedArray

The OOB check belongs in ValidateIntegerTypedArray according to the
spec.

This also fixes the error types for OOB TypedArrays when doing Atomics:
OOB TypedArrays should get a TypeError, not RangeError.

Bug: v8:11111
Change-Id: Ice2e5695d69d84b2c20a4cf8f06880673d901a91
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3676859
Commit-Queue: Marja Hölttä <marja@chromium.org>
Reviewed-by: 's avatarShu-yu Guo <syg@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80898}
parent 8721f45b
......@@ -68,9 +68,8 @@ void SharedArrayBufferBuiltinsAssembler::ValidateIntegerTypedArray(
GotoIfNot(IsJSTypedArrayMap(map), &invalid);
TNode<JSTypedArray> array = CAST(maybe_array);
// Fail if the array's JSArrayBuffer is detached.
TNode<JSArrayBuffer> array_buffer = GetTypedArrayBuffer(context, array);
GotoIf(IsDetachedBuffer(array_buffer), detached);
// Fail if the array's JSArrayBuffer is detached / out of bounds.
GotoIf(IsJSArrayBufferViewDetachedOrOutOfBoundsBoolean(array), detached);
// Fail if the array's element type is float32, float64 or clamped.
static_assert(INT8_ELEMENTS < FLOAT32_ELEMENTS);
......@@ -97,6 +96,7 @@ void SharedArrayBufferBuiltinsAssembler::ValidateIntegerTypedArray(
BIND(&not_float_or_clamped);
*out_elements_kind = elements_kind;
TNode<JSArrayBuffer> array_buffer = GetTypedArrayBuffer(context, array);
TNode<RawPtrT> backing_store = LoadJSArrayBufferBackingStorePtr(array_buffer);
TNode<UintPtrT> byte_offset = LoadJSArrayBufferViewByteOffset(array);
*out_backing_store = RawPtrAdd(backing_store, Signed(byte_offset));
......@@ -106,13 +106,13 @@ void SharedArrayBufferBuiltinsAssembler::ValidateIntegerTypedArray(
// ValidateAtomicAccess( typedArray, requestIndex )
TNode<UintPtrT> SharedArrayBufferBuiltinsAssembler::ValidateAtomicAccess(
TNode<JSTypedArray> array, TNode<Object> index, TNode<Context> context) {
Label done(this), range_error(this);
Label done(this), range_error(this), unreachable(this);
// 1. Assert: typedArray is an Object that has a [[ViewedArrayBuffer]]
// internal slot.
// 2. Let length be IntegerIndexedObjectLength(typedArray);
TNode<UintPtrT> array_length =
LoadJSTypedArrayLengthAndCheckDetached(array, &range_error);
LoadJSTypedArrayLengthAndCheckDetached(array, &unreachable);
// 3. Let accessIndex be ? ToIndex(requestIndex).
TNode<UintPtrT> index_uintptr = ToIndex(context, index, &range_error);
......@@ -121,6 +121,10 @@ TNode<UintPtrT> SharedArrayBufferBuiltinsAssembler::ValidateAtomicAccess(
// 5. If accessIndex ≥ length, throw a RangeError exception.
Branch(UintPtrLessThan(index_uintptr, array_length), &done, &range_error);
BIND(&unreachable);
// This should not happen, since we've just called ValidateIntegerTypedArray.
Unreachable();
BIND(&range_error);
ThrowRangeError(context, MessageTemplate::kInvalidAtomicAccessIndex);
......
......@@ -55,7 +55,7 @@ V8_WARN_UNUSED_RESULT MaybeHandle<JSTypedArray> ValidateIntegerTypedArray(
if (object->IsJSTypedArray()) {
Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(object);
if (typed_array->WasDetached()) {
if (typed_array->IsDetachedOrOutOfBounds()) {
THROW_NEW_ERROR(
isolate,
NewTypeError(
......
......@@ -95,26 +95,26 @@ const notSharedErrorMessage =
// Shrink so that fixed length TAs go out of bounds.
rab.resize(3 * ctor.BYTES_PER_ELEMENT);
AssertAtomicsOperationsThrow(fixedLength, 0, RangeError);
AssertAtomicsOperationsThrow(fixedLengthWithOffset, 0, RangeError);
AssertAtomicsOperationsThrow(fixedLength, 0, TypeError);
AssertAtomicsOperationsThrow(fixedLengthWithOffset, 0, TypeError);
TestAtomicsOperations(lengthTracking, 0);
TestAtomicsOperations(lengthTrackingWithOffset, 0);
// Shrink so that the TAs with offset go out of bounds.
rab.resize(1 * ctor.BYTES_PER_ELEMENT);
AssertAtomicsOperationsThrow(fixedLength, 0, RangeError);
AssertAtomicsOperationsThrow(fixedLengthWithOffset, 0, RangeError);
AssertAtomicsOperationsThrow(lengthTrackingWithOffset, 0, RangeError);
AssertAtomicsOperationsThrow(fixedLength, 0, TypeError);
AssertAtomicsOperationsThrow(fixedLengthWithOffset, 0, TypeError);
AssertAtomicsOperationsThrow(lengthTrackingWithOffset, 0, TypeError);
TestAtomicsOperations(lengthTracking, 0);
// Shrink to zero.
rab.resize(0);
AssertAtomicsOperationsThrow(fixedLength, 0, RangeError);
AssertAtomicsOperationsThrow(fixedLengthWithOffset, 0, RangeError);
AssertAtomicsOperationsThrow(fixedLength, 0, TypeError);
AssertAtomicsOperationsThrow(fixedLengthWithOffset, 0, TypeError);
AssertAtomicsOperationsThrow(lengthTracking, 0, RangeError);
AssertAtomicsOperationsThrow(lengthTrackingWithOffset, 0, RangeError);
AssertAtomicsOperationsThrow(lengthTrackingWithOffset, 0, TypeError);
// Grow so that all TAs are back in-bounds.
rab.resize(6 * ctor.BYTES_PER_ELEMENT);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment