Commit 65200967 authored by binji's avatar binji Committed by Commit bot

Reland moving Atomics builtins to C++

This reverts the previous revert, commit
5a04f4fd.

Previously reverted changes:

>    Revert "[SAB] Move Atomics builtins to C++"
>
>     This reverts commit 2b9840d8.
>
>     Revert "[SAB] Remove unreachable Uint8Clamped atomics paths"
>
>     This reverts commit d1160fb1.
>
>     Revert "Remove tiny unit test for MinSimple/MaxSimple"
>
>     This reverts commit 837760ec.
>
>     Revert "Remove infrastructure for experimental JS natives"
>
>     This reverts commit 8cfe45b6.

These changes were reverted to improve a perf regression on a Chrome
bot. Since then, the regression has reappeared, then disappeared again
all from seemingly unrelated changes.

BUG=v8:6033
TBR=adamk@chromium.org,hpayer@chromium.org,yangguo@chromium.org

Review-Url: https://codereview.chromium.org/2732213005
Cr-Commit-Position: refs/heads/master@{#43758}
parent aba151b9
......@@ -523,43 +523,6 @@ action("js2c") {
}
}
action("js2c_experimental") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
script = "tools/js2c.py"
# The script depends on this other script, this rule causes a rebuild if it
# changes.
inputs = [
"tools/jsmin.py",
]
# NOSORT
sources = [
"src/js/macros.py",
"src/messages.h",
"src/js/harmony-atomics.js",
]
outputs = [
"$target_gen_dir/experimental-libraries.cc",
]
args = [
rebase_path("$target_gen_dir/experimental-libraries.cc",
root_build_dir),
"EXPERIMENTAL",
] + rebase_path(sources, root_build_dir)
if (v8_use_external_startup_data) {
outputs += [ "$target_gen_dir/libraries_experimental.bin" ]
args += [
"--startup_blob",
rebase_path("$target_gen_dir/libraries_experimental.bin", root_build_dir),
]
}
}
action("js2c_extras") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
......@@ -670,7 +633,6 @@ if (v8_use_external_startup_data) {
deps = [
":js2c",
":js2c_experimental",
":js2c_experimental_extras",
":js2c_extras",
]
......@@ -678,7 +640,6 @@ if (v8_use_external_startup_data) {
# NOSORT
sources = [
"$target_gen_dir/libraries.bin",
"$target_gen_dir/libraries_experimental.bin",
"$target_gen_dir/libraries_extras.bin",
"$target_gen_dir/libraries_experimental_extras.bin",
]
......@@ -841,7 +802,6 @@ v8_source_set("v8_nosnapshot") {
deps = [
":js2c",
":js2c_experimental",
":js2c_experimental_extras",
":js2c_extras",
":v8_base",
......@@ -849,7 +809,6 @@ v8_source_set("v8_nosnapshot") {
sources = [
"$target_gen_dir/experimental-extras-libraries.cc",
"$target_gen_dir/experimental-libraries.cc",
"$target_gen_dir/extras-libraries.cc",
"$target_gen_dir/libraries.cc",
"src/snapshot/snapshot-empty.cc",
......@@ -868,7 +827,6 @@ v8_source_set("v8_snapshot") {
deps = [
":js2c",
":js2c_experimental",
":js2c_experimental_extras",
":js2c_extras",
":v8_base",
......@@ -881,7 +839,6 @@ v8_source_set("v8_snapshot") {
sources = [
"$target_gen_dir/experimental-extras-libraries.cc",
"$target_gen_dir/experimental-libraries.cc",
"$target_gen_dir/extras-libraries.cc",
"$target_gen_dir/libraries.cc",
"$target_gen_dir/snapshot.cc",
......@@ -896,7 +853,6 @@ if (v8_use_external_startup_data) {
deps = [
":js2c",
":js2c_experimental",
":js2c_experimental_extras",
":js2c_extras",
":v8_base",
......
......@@ -57,8 +57,6 @@ Handle<String> Bootstrapper::SourceLookup(int index) {
template Handle<String> Bootstrapper::SourceLookup<Natives>(int index);
template Handle<String> Bootstrapper::SourceLookup<ExperimentalNatives>(
int index);
template Handle<String> Bootstrapper::SourceLookup<ExperimentalExtraNatives>(
int index);
template Handle<String> Bootstrapper::SourceLookup<ExtraNatives>(int index);
......@@ -133,7 +131,6 @@ void DeleteNativeSources(Object* maybe_array) {
void Bootstrapper::TearDown() {
DeleteNativeSources(Natives::GetSourceCache(isolate_->heap()));
DeleteNativeSources(ExperimentalNatives::GetSourceCache(isolate_->heap()));
DeleteNativeSources(ExtraNatives::GetSourceCache(isolate_->heap()));
DeleteNativeSources(
ExperimentalExtraNatives::GetSourceCache(isolate_->heap()));
......@@ -231,7 +228,6 @@ class Genesis BASE_EMBEDDED {
void InstallTypedArray(const char* name, ElementsKind elements_kind,
Handle<JSFunction>* fun);
bool InstallExperimentalNatives();
bool InstallExtraNatives();
bool InstallExperimentalExtraNatives();
bool InstallDebuggerNatives();
......@@ -3096,19 +3092,6 @@ bool Bootstrapper::CompileBuiltin(Isolate* isolate, int index) {
}
bool Bootstrapper::CompileExperimentalBuiltin(Isolate* isolate, int index) {
HandleScope scope(isolate);
Vector<const char> name = ExperimentalNatives::GetScriptName(index);
Handle<String> source_code =
isolate->bootstrapper()->SourceLookup<ExperimentalNatives>(index);
Handle<Object> global = isolate->global_object();
Handle<Object> utils = isolate->natives_utils_object();
Handle<Object> args[] = {global, utils};
return Bootstrapper::CompileNative(isolate, name, source_code,
arraysize(args), args, NATIVES_CODE);
}
bool Bootstrapper::CompileExtraBuiltin(Isolate* isolate, int index) {
HandleScope scope(isolate);
Vector<const char> name = ExtraNatives::GetScriptName(index);
......@@ -3760,14 +3743,36 @@ void Genesis::InitializeGlobal_harmony_sharedarraybuffer() {
Handle<JSObject> atomics_object = factory->NewJSObject(cons, TENURED);
DCHECK(atomics_object->IsJSObject());
JSObject::AddProperty(global, name, atomics_object, DONT_ENUM);
JSObject::AddProperty(atomics_object, factory->to_string_tag_symbol(), name,
static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY));
SimpleInstallFunction(atomics_object, factory->InternalizeUtf8String("load"),
Builtins::kAtomicsLoad, 2, true);
SimpleInstallFunction(atomics_object, factory->InternalizeUtf8String("store"),
Builtins::kAtomicsStore, 3, true);
SimpleInstallFunction(atomics_object, factory->InternalizeUtf8String("add"),
Builtins::kAtomicsAdd, 3, true);
SimpleInstallFunction(atomics_object, factory->InternalizeUtf8String("sub"),
Builtins::kAtomicsSub, 3, true);
SimpleInstallFunction(atomics_object, factory->InternalizeUtf8String("and"),
Builtins::kAtomicsAnd, 3, true);
SimpleInstallFunction(atomics_object, factory->InternalizeUtf8String("or"),
Builtins::kAtomicsOr, 3, true);
SimpleInstallFunction(atomics_object, factory->InternalizeUtf8String("xor"),
Builtins::kAtomicsXor, 3, true);
SimpleInstallFunction(atomics_object,
factory->InternalizeUtf8String("exchange"),
Builtins::kAtomicsExchange, 3, true);
SimpleInstallFunction(atomics_object,
factory->InternalizeUtf8String("compareExchange"),
Builtins::kAtomicsCompareExchange, 4, true);
SimpleInstallFunction(atomics_object,
factory->InternalizeUtf8String("isLockFree"),
Builtins::kAtomicsIsLockFree, 1, true);
SimpleInstallFunction(atomics_object, factory->InternalizeUtf8String("wait"),
Builtins::kAtomicsWait, 4, true);
SimpleInstallFunction(atomics_object, factory->InternalizeUtf8String("wake"),
Builtins::kAtomicsWake, 3, true);
}
void Genesis::InitializeGlobal_harmony_array_prototype_values() {
......@@ -4378,58 +4383,6 @@ bool Genesis::InstallNatives(GlobalContextType context_type) {
return true;
}
bool Genesis::InstallExperimentalNatives() {
static const char* harmony_tailcalls_natives[] = {nullptr};
static const char* harmony_sharedarraybuffer_natives[] = {
"native harmony-atomics.js", NULL};
static const char* harmony_do_expressions_natives[] = {nullptr};
static const char* harmony_regexp_lookbehind_natives[] = {nullptr};
static const char* harmony_regexp_named_captures_natives[] = {nullptr};
static const char* harmony_regexp_property_natives[] = {nullptr};
static const char* harmony_function_sent_natives[] = {nullptr};
static const char* harmony_array_prototype_values_natives[] = {nullptr};
#ifdef V8_I18N_SUPPORT
static const char* icu_case_mapping_natives[] = {nullptr};
static const char* datetime_format_to_parts_natives[] = {nullptr};
#endif
static const char* harmony_restrictive_generators_natives[] = {nullptr};
static const char* harmony_trailing_commas_natives[] = {nullptr};
static const char* harmony_function_tostring_natives[] = {nullptr};
static const char* harmony_class_fields_natives[] = {nullptr};
static const char* harmony_object_rest_spread_natives[] = {nullptr};
static const char* harmony_async_iteration_natives[] = {nullptr};
static const char* harmony_dynamic_import_natives[] = {nullptr};
static const char* harmony_promise_finally_natives[] = {nullptr};
static const char* harmony_template_escapes_natives[] = {nullptr};
for (int i = ExperimentalNatives::GetDebuggerCount();
i < ExperimentalNatives::GetBuiltinsCount(); i++) {
#define INSTALL_EXPERIMENTAL_NATIVES(id, desc) \
if (FLAG_##id) { \
for (size_t j = 0; id##_natives[j] != NULL; j++) { \
Vector<const char> script_name = ExperimentalNatives::GetScriptName(i); \
if (strncmp(script_name.start(), id##_natives[j], \
script_name.length()) == 0) { \
if (!Bootstrapper::CompileExperimentalBuiltin(isolate(), i)) { \
return false; \
} \
} \
} \
}
HARMONY_INPROGRESS(INSTALL_EXPERIMENTAL_NATIVES);
HARMONY_STAGED(INSTALL_EXPERIMENTAL_NATIVES);
HARMONY_SHIPPING(INSTALL_EXPERIMENTAL_NATIVES);
#undef INSTALL_EXPERIMENTAL_NATIVES
}
if (!CallUtilsFunction(isolate(), "PostExperimentals")) return false;
InstallExperimentalBuiltinFunctionIds();
return true;
}
bool Genesis::InstallExtraNatives() {
HandleScope scope(isolate());
......@@ -4461,7 +4414,7 @@ bool Genesis::InstallDebuggerNatives() {
for (int i = 0; i < Natives::GetDebuggerCount(); ++i) {
if (!Bootstrapper::CompileBuiltin(isolate(), i)) return false;
}
return CallUtilsFunction(isolate(), "PostDebug");
return true;
}
......@@ -4499,26 +4452,6 @@ void Genesis::InstallBuiltinFunctionIds() {
}
}
void Genesis::InstallExperimentalBuiltinFunctionIds() {
if (FLAG_harmony_sharedarraybuffer) {
struct BuiltinFunctionIds {
const char* holder_expr;
const char* fun_name;
BuiltinFunctionId id;
};
const BuiltinFunctionIds atomic_builtins[] = {
ATOMIC_FUNCTIONS_WITH_ID_LIST(INSTALL_BUILTIN_ID)};
for (const BuiltinFunctionIds& builtin : atomic_builtins) {
Handle<JSObject> holder =
ResolveBuiltinIdHolder(native_context(), builtin.holder_expr);
InstallBuiltinFunctionId(holder, builtin.fun_name, builtin.id);
}
}
}
#undef INSTALL_BUILTIN_ID
......@@ -5029,7 +4962,6 @@ Genesis::Genesis(
if (context_type == FULL_CONTEXT) {
if (!isolate->serializer_enabled()) {
InitializeExperimentalGlobal();
if (!InstallExperimentalNatives()) return;
if (FLAG_experimental_extras) {
if (!InstallExperimentalExtraNatives()) return;
......@@ -5044,8 +4976,6 @@ Genesis::Genesis(
native_context()->set_string_function_prototype_map(
string_function_prototype->map());
}
// The serializer cannot serialize typed arrays. Reset those typed arrays
// for each new context.
} else if (context_type == DEBUG_CONTEXT) {
DCHECK(!isolate->serializer_enabled());
InitializeExperimentalGlobal();
......
......@@ -116,7 +116,6 @@ class Bootstrapper final {
Handle<String> source, int argc,
Handle<Object> argv[], NativesFlag natives_flag);
static bool CompileBuiltin(Isolate* isolate, int index);
static bool CompileExperimentalBuiltin(Isolate* isolate, int index);
static bool CompileExtraBuiltin(Isolate* isolate, int index);
static bool CompileExperimentalExtraBuiltin(Isolate* isolate, int index);
......
......@@ -2,10 +2,19 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/base/macros.h"
#include "src/base/platform/mutex.h"
#include "src/base/platform/time.h"
#include "src/builtins/builtins-utils.h"
#include "src/builtins/builtins.h"
#include "src/code-factory.h"
#include "src/code-stub-assembler.h"
#include "src/conversions-inl.h"
#include "src/counters.h"
#include "src/factory.h"
#include "src/futex-emulation.h"
#include "src/globals.h"
#include "src/objects-inl.h"
namespace v8 {
namespace internal {
......@@ -337,5 +346,601 @@ TF_BUILTIN(AtomicsExchange, SharedArrayBufferBuiltinsAssembler) {
// || V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_S390X
}
inline bool AtomicIsLockFree(uint32_t size) {
return size == 1 || size == 2 || size == 4;
}
// ES #sec-atomics.islockfree
BUILTIN(AtomicsIsLockFree) {
HandleScope scope(isolate);
Handle<Object> size = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, size, Object::ToNumber(size));
return *isolate->factory()->ToBoolean(AtomicIsLockFree(size->Number()));
}
// ES #sec-validatesharedintegertypedarray
MUST_USE_RESULT MaybeHandle<JSTypedArray> ValidateSharedIntegerTypedArray(
Isolate* isolate, Handle<Object> object, bool only_int32 = false) {
if (object->IsJSTypedArray()) {
Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(object);
if (typed_array->GetBuffer()->is_shared()) {
if (only_int32) {
if (typed_array->type() == kExternalInt32Array) return typed_array;
} else {
if (typed_array->type() != kExternalFloat32Array &&
typed_array->type() != kExternalFloat64Array &&
typed_array->type() != kExternalUint8ClampedArray)
return typed_array;
}
}
}
THROW_NEW_ERROR(
isolate,
NewTypeError(only_int32 ? MessageTemplate::kNotInt32SharedTypedArray
: MessageTemplate::kNotIntegerSharedTypedArray,
object),
JSTypedArray);
}
// ES #sec-validateatomicaccess
// ValidateAtomicAccess( typedArray, requestIndex )
MUST_USE_RESULT Maybe<size_t> ValidateAtomicAccess(
Isolate* isolate, Handle<JSTypedArray> typed_array,
Handle<Object> request_index) {
// TOOD(v8:5961): Use ToIndex for indexes
ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, request_index,
Object::ToNumber(request_index),
Nothing<size_t>());
Handle<Object> offset;
ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, offset,
Object::ToInteger(isolate, request_index),
Nothing<size_t>());
if (!request_index->SameValue(*offset)) {
isolate->Throw(*isolate->factory()->NewRangeError(
MessageTemplate::kInvalidAtomicAccessIndex));
return Nothing<size_t>();
}
size_t access_index;
uint32_t length = typed_array->length_value();
if (!TryNumberToSize(*request_index, &access_index) ||
access_index >= length) {
isolate->Throw(*isolate->factory()->NewRangeError(
MessageTemplate::kInvalidAtomicAccessIndex));
return Nothing<size_t>();
}
return Just<size_t>(access_index);
}
// ES #sec-atomics.wake
// Atomics.wake( typedArray, index, count )
BUILTIN(AtomicsWake) {
HandleScope scope(isolate);
Handle<Object> array = args.atOrUndefined(isolate, 1);
Handle<Object> index = args.atOrUndefined(isolate, 2);
Handle<Object> count = args.atOrUndefined(isolate, 3);
Handle<JSTypedArray> sta;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, sta, ValidateSharedIntegerTypedArray(isolate, array, true));
Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
if (maybe_index.IsNothing()) return isolate->heap()->exception();
size_t i = maybe_index.FromJust();
uint32_t c;
if (count->IsUndefined(isolate)) {
c = kMaxUInt32;
} else {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, count,
Object::ToInteger(isolate, count));
double count_double = count->Number();
if (count_double < 0)
count_double = 0;
else if (count_double > kMaxUInt32)
count_double = kMaxUInt32;
c = static_cast<uint32_t>(count_double);
}
Handle<JSArrayBuffer> array_buffer = sta->GetBuffer();
size_t addr = (i << 2) + NumberToSize(sta->byte_offset());
return FutexEmulation::Wake(isolate, array_buffer, addr, c);
}
// ES #sec-atomics.wait
// Atomics.wait( typedArray, index, value, timeout )
BUILTIN(AtomicsWait) {
HandleScope scope(isolate);
Handle<Object> array = args.atOrUndefined(isolate, 1);
Handle<Object> index = args.atOrUndefined(isolate, 2);
Handle<Object> value = args.atOrUndefined(isolate, 3);
Handle<Object> timeout = args.atOrUndefined(isolate, 4);
Handle<JSTypedArray> sta;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, sta, ValidateSharedIntegerTypedArray(isolate, array, true));
Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
if (maybe_index.IsNothing()) return isolate->heap()->exception();
size_t i = maybe_index.FromJust();
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
Object::ToInt32(isolate, value));
int32_t value_int32 = NumberToInt32(*value);
double timeout_number;
if (timeout->IsUndefined(isolate)) {
timeout_number = isolate->heap()->infinity_value()->Number();
} else {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, timeout,
Object::ToNumber(timeout));
timeout_number = timeout->Number();
if (std::isnan(timeout_number))
timeout_number = isolate->heap()->infinity_value()->Number();
else if (timeout_number < 0)
timeout_number = 0;
}
if (!isolate->allow_atomics_wait()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kAtomicsWaitNotAllowed));
}
Handle<JSArrayBuffer> array_buffer = sta->GetBuffer();
size_t addr = (i << 2) + NumberToSize(sta->byte_offset());
return FutexEmulation::Wait(isolate, array_buffer, addr, value_int32,
timeout_number);
}
namespace {
#if V8_CC_GNU
template <typename T>
inline T CompareExchangeSeqCst(T* p, T oldval, T newval) {
(void)__atomic_compare_exchange_n(p, &oldval, newval, 0, __ATOMIC_SEQ_CST,
__ATOMIC_SEQ_CST);
return oldval;
}
template <typename T>
inline T AddSeqCst(T* p, T value) {
return __atomic_fetch_add(p, value, __ATOMIC_SEQ_CST);
}
template <typename T>
inline T SubSeqCst(T* p, T value) {
return __atomic_fetch_sub(p, value, __ATOMIC_SEQ_CST);
}
template <typename T>
inline T AndSeqCst(T* p, T value) {
return __atomic_fetch_and(p, value, __ATOMIC_SEQ_CST);
}
template <typename T>
inline T OrSeqCst(T* p, T value) {
return __atomic_fetch_or(p, value, __ATOMIC_SEQ_CST);
}
template <typename T>
inline T XorSeqCst(T* p, T value) {
return __atomic_fetch_xor(p, value, __ATOMIC_SEQ_CST);
}
#elif V8_CC_MSVC
#define InterlockedCompareExchange32 _InterlockedCompareExchange
#define InterlockedExchange32 _InterlockedExchange
#define InterlockedExchangeAdd32 _InterlockedExchangeAdd
#define InterlockedAnd32 _InterlockedAnd
#define InterlockedOr32 _InterlockedOr
#define InterlockedXor32 _InterlockedXor
#define InterlockedExchangeAdd16 _InterlockedExchangeAdd16
#define InterlockedCompareExchange8 _InterlockedCompareExchange8
#define InterlockedExchangeAdd8 _InterlockedExchangeAdd8
#define ATOMIC_OPS(type, suffix, vctype) \
inline type AddSeqCst(type* p, type value) { \
return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(value)); \
} \
inline type SubSeqCst(type* p, type value) { \
return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
-bit_cast<vctype>(value)); \
} \
inline type AndSeqCst(type* p, type value) { \
return InterlockedAnd##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(value)); \
} \
inline type OrSeqCst(type* p, type value) { \
return InterlockedOr##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(value)); \
} \
inline type XorSeqCst(type* p, type value) { \
return InterlockedXor##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(value)); \
} \
inline type CompareExchangeSeqCst(type* p, type oldval, type newval) { \
return InterlockedCompareExchange##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(newval), \
bit_cast<vctype>(oldval)); \
}
ATOMIC_OPS(int8_t, 8, char)
ATOMIC_OPS(uint8_t, 8, char)
ATOMIC_OPS(int16_t, 16, short) /* NOLINT(runtime/int) */
ATOMIC_OPS(uint16_t, 16, short) /* NOLINT(runtime/int) */
ATOMIC_OPS(int32_t, 32, long) /* NOLINT(runtime/int) */
ATOMIC_OPS(uint32_t, 32, long) /* NOLINT(runtime/int) */
#undef ATOMIC_OPS_INTEGER
#undef ATOMIC_OPS
#undef InterlockedCompareExchange32
#undef InterlockedExchange32
#undef InterlockedExchangeAdd32
#undef InterlockedAnd32
#undef InterlockedOr32
#undef InterlockedXor32
#undef InterlockedExchangeAdd16
#undef InterlockedCompareExchange8
#undef InterlockedExchangeAdd8
#else
#error Unsupported platform!
#endif
template <typename T>
T FromObject(Handle<Object> number);
template <>
inline uint8_t FromObject<uint8_t>(Handle<Object> number) {
return NumberToUint32(*number);
}
template <>
inline int8_t FromObject<int8_t>(Handle<Object> number) {
return NumberToInt32(*number);
}
template <>
inline uint16_t FromObject<uint16_t>(Handle<Object> number) {
return NumberToUint32(*number);
}
template <>
inline int16_t FromObject<int16_t>(Handle<Object> number) {
return NumberToInt32(*number);
}
template <>
inline uint32_t FromObject<uint32_t>(Handle<Object> number) {
return NumberToUint32(*number);
}
template <>
inline int32_t FromObject<int32_t>(Handle<Object> number) {
return NumberToInt32(*number);
}
inline Object* ToObject(Isolate* isolate, int8_t t) { return Smi::FromInt(t); }
inline Object* ToObject(Isolate* isolate, uint8_t t) { return Smi::FromInt(t); }
inline Object* ToObject(Isolate* isolate, int16_t t) { return Smi::FromInt(t); }
inline Object* ToObject(Isolate* isolate, uint16_t t) {
return Smi::FromInt(t);
}
inline Object* ToObject(Isolate* isolate, int32_t t) {
return *isolate->factory()->NewNumber(t);
}
inline Object* ToObject(Isolate* isolate, uint32_t t) {
return *isolate->factory()->NewNumber(t);
}
template <typename T>
inline Object* DoCompareExchange(Isolate* isolate, void* buffer, size_t index,
Handle<Object> oldobj, Handle<Object> newobj) {
T oldval = FromObject<T>(oldobj);
T newval = FromObject<T>(newobj);
T result =
CompareExchangeSeqCst(static_cast<T*>(buffer) + index, oldval, newval);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoAdd(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = AddSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoSub(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = SubSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoAnd(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = AndSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoOr(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = OrSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoXor(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = XorSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
} // anonymous namespace
// Duplicated from objects.h
// V has parameters (Type, type, TYPE, C type, element_size)
#define INTEGER_TYPED_ARRAYS(V) \
V(Uint8, uint8, UINT8, uint8_t, 1) \
V(Int8, int8, INT8, int8_t, 1) \
V(Uint16, uint16, UINT16, uint16_t, 2) \
V(Int16, int16, INT16, int16_t, 2) \
V(Uint32, uint32, UINT32, uint32_t, 4) \
V(Int32, int32, INT32, int32_t, 4)
// ES #sec-atomics.wait
// Atomics.compareExchange( typedArray, index, expectedValue, replacementValue )
BUILTIN(AtomicsCompareExchange) {
HandleScope scope(isolate);
Handle<Object> array = args.atOrUndefined(isolate, 1);
Handle<Object> index = args.atOrUndefined(isolate, 2);
Handle<Object> expected_value = args.atOrUndefined(isolate, 3);
Handle<Object> replacement_value = args.atOrUndefined(isolate, 4);
Handle<JSTypedArray> sta;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
if (maybe_index.IsNothing()) return isolate->heap()->exception();
size_t i = maybe_index.FromJust();
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, expected_value, Object::ToInteger(isolate, expected_value));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, replacement_value,
Object::ToInteger(isolate, replacement_value));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoCompareExchange<ctype>(isolate, source, i, expected_value, \
replacement_value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
// ES #sec-atomics.add
// Atomics.add( typedArray, index, value )
BUILTIN(AtomicsAdd) {
HandleScope scope(isolate);
Handle<Object> array = args.atOrUndefined(isolate, 1);
Handle<Object> index = args.atOrUndefined(isolate, 2);
Handle<Object> value = args.atOrUndefined(isolate, 3);
Handle<JSTypedArray> sta;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
if (maybe_index.IsNothing()) return isolate->heap()->exception();
size_t i = maybe_index.FromJust();
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
Object::ToInteger(isolate, value));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoAdd<ctype>(isolate, source, i, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
// ES #sec-atomics.sub
// Atomics.sub( typedArray, index, value )
BUILTIN(AtomicsSub) {
HandleScope scope(isolate);
Handle<Object> array = args.atOrUndefined(isolate, 1);
Handle<Object> index = args.atOrUndefined(isolate, 2);
Handle<Object> value = args.atOrUndefined(isolate, 3);
Handle<JSTypedArray> sta;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
if (maybe_index.IsNothing()) return isolate->heap()->exception();
size_t i = maybe_index.FromJust();
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
Object::ToInteger(isolate, value));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoSub<ctype>(isolate, source, i, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
// ES #sec-atomics.and
// Atomics.and( typedArray, index, value )
BUILTIN(AtomicsAnd) {
HandleScope scope(isolate);
Handle<Object> array = args.atOrUndefined(isolate, 1);
Handle<Object> index = args.atOrUndefined(isolate, 2);
Handle<Object> value = args.atOrUndefined(isolate, 3);
Handle<JSTypedArray> sta;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
if (maybe_index.IsNothing()) return isolate->heap()->exception();
size_t i = maybe_index.FromJust();
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
Object::ToInteger(isolate, value));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoAnd<ctype>(isolate, source, i, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
// ES #sec-atomics.or
// Atomics.or( typedArray, index, value )
BUILTIN(AtomicsOr) {
HandleScope scope(isolate);
Handle<Object> array = args.atOrUndefined(isolate, 1);
Handle<Object> index = args.atOrUndefined(isolate, 2);
Handle<Object> value = args.atOrUndefined(isolate, 3);
Handle<JSTypedArray> sta;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
if (maybe_index.IsNothing()) return isolate->heap()->exception();
size_t i = maybe_index.FromJust();
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
Object::ToInteger(isolate, value));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoOr<ctype>(isolate, source, i, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
// ES #sec-atomics.xor
// Atomics.xor( typedArray, index, value )
BUILTIN(AtomicsXor) {
HandleScope scope(isolate);
Handle<Object> array = args.atOrUndefined(isolate, 1);
Handle<Object> index = args.atOrUndefined(isolate, 2);
Handle<Object> value = args.atOrUndefined(isolate, 3);
Handle<JSTypedArray> sta;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
if (maybe_index.IsNothing()) return isolate->heap()->exception();
size_t i = maybe_index.FromJust();
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
Object::ToInteger(isolate, value));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoXor<ctype>(isolate, source, i, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
} // namespace internal
} // namespace v8
......@@ -734,6 +734,15 @@ class Isolate;
TFJ(AtomicsLoad, 2) \
TFJ(AtomicsStore, 3) \
TFJ(AtomicsExchange, 3) \
CPP(AtomicsCompareExchange) \
CPP(AtomicsAdd) \
CPP(AtomicsSub) \
CPP(AtomicsAnd) \
CPP(AtomicsOr) \
CPP(AtomicsXor) \
CPP(AtomicsIsLockFree) \
CPP(AtomicsWait) \
CPP(AtomicsWake) \
\
/* String */ \
ASM(StringConstructor) \
......
......@@ -11,17 +11,10 @@
var GlobalArray = global.Array;
var IsNaN = global.isNaN;
var JSONStringify = global.JSON.stringify;
var MapEntries;
var MapIteratorNext;
var SetIteratorNext;
var SetValues;
utils.Import(function(from) {
MapEntries = from.MapEntries;
MapIteratorNext = from.MapIteratorNext;
SetIteratorNext = from.SetIteratorNext;
SetValues = from.SetValues;
});
var MapEntries = global.Map.prototype.entries;
var MapIteratorNext = (new global.Map).entries().next;
var SetIteratorNext = (new global.Set).values().next;
var SetValues = global.Set.prototype.values;
// ----------------------------------------------------------------------------
......
......@@ -2766,9 +2766,6 @@ void Heap::CreateInitialObjects() {
set_natives_source_cache(
*factory->NewFixedArray(Natives::GetBuiltinsCount()));
set_experimental_natives_source_cache(
*factory->NewFixedArray(ExperimentalNatives::GetBuiltinsCount()));
set_extra_natives_source_cache(
*factory->NewFixedArray(ExtraNatives::GetBuiltinsCount()));
......
......@@ -178,8 +178,6 @@ using v8::MemoryPressureLevel;
V(Object, instanceof_cache_map, InstanceofCacheMap) \
V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
V(FixedArray, natives_source_cache, NativesSourceCache) \
V(FixedArray, experimental_natives_source_cache, \
ExperimentalNativesSourceCache) \
V(FixedArray, extra_natives_source_cache, ExtraNativesSourceCache) \
V(FixedArray, experimental_extra_natives_source_cache, \
ExperimentalExtraNativesSourceCache) \
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(function(global, utils) {
"use strict";
%CheckIsBootstrapping();
// -------------------------------------------------------------------
// Imports
var GlobalObject = global.Object;
var MaxSimple;
var MinSimple;
var toStringTagSymbol = utils.ImportNow("to_string_tag_symbol");
utils.Import(function(from) {
MaxSimple = from.MaxSimple;
MinSimple = from.MinSimple;
});
// -------------------------------------------------------------------
function CheckSharedIntegerTypedArray(ia) {
if (!%IsSharedIntegerTypedArray(ia)) {
throw %make_type_error(kNotIntegerSharedTypedArray, ia);
}
}
function CheckSharedInteger32TypedArray(ia) {
CheckSharedIntegerTypedArray(ia);
if (!%IsSharedInteger32TypedArray(ia)) {
throw %make_type_error(kNotInt32SharedTypedArray, ia);
}
}
// https://tc39.github.io/ecmascript_sharedmem/shmem.html#Atomics.ValidateAtomicAccess
function ValidateIndex(index, length) {
var numberIndex = TO_NUMBER(index);
var accessIndex = TO_INTEGER(numberIndex);
if (numberIndex !== accessIndex) {
throw %make_range_error(kInvalidAtomicAccessIndex);
}
if (accessIndex < 0 || accessIndex >= length) {
throw %make_range_error(kInvalidAtomicAccessIndex);
}
return accessIndex;
}
//-------------------------------------------------------------------
function AtomicsCompareExchangeJS(sta, index, oldValue, newValue) {
CheckSharedIntegerTypedArray(sta);
index = ValidateIndex(index, %_TypedArrayGetLength(sta));
oldValue = TO_NUMBER(oldValue);
newValue = TO_NUMBER(newValue);
return %_AtomicsCompareExchange(sta, index, oldValue, newValue);
}
function AtomicsAddJS(ia, index, value) {
CheckSharedIntegerTypedArray(ia);
index = ValidateIndex(index, %_TypedArrayGetLength(ia));
value = TO_NUMBER(value);
return %_AtomicsAdd(ia, index, value);
}
function AtomicsSubJS(ia, index, value) {
CheckSharedIntegerTypedArray(ia);
index = ValidateIndex(index, %_TypedArrayGetLength(ia));
value = TO_NUMBER(value);
return %_AtomicsSub(ia, index, value);
}
function AtomicsAndJS(ia, index, value) {
CheckSharedIntegerTypedArray(ia);
index = ValidateIndex(index, %_TypedArrayGetLength(ia));
value = TO_NUMBER(value);
return %_AtomicsAnd(ia, index, value);
}
function AtomicsOrJS(ia, index, value) {
CheckSharedIntegerTypedArray(ia);
index = ValidateIndex(index, %_TypedArrayGetLength(ia));
value = TO_NUMBER(value);
return %_AtomicsOr(ia, index, value);
}
function AtomicsXorJS(ia, index, value) {
CheckSharedIntegerTypedArray(ia);
index = ValidateIndex(index, %_TypedArrayGetLength(ia));
value = TO_NUMBER(value);
return %_AtomicsXor(ia, index, value);
}
function AtomicsIsLockFreeJS(size) {
return %_AtomicsIsLockFree(TO_INTEGER(size));
}
function AtomicsWaitJS(ia, index, value, timeout) {
CheckSharedInteger32TypedArray(ia);
index = ValidateIndex(index, %_TypedArrayGetLength(ia));
if (IS_UNDEFINED(timeout)) {
timeout = INFINITY;
} else {
timeout = TO_NUMBER(timeout);
if (NUMBER_IS_NAN(timeout)) {
timeout = INFINITY;
} else {
timeout = MaxSimple(0, timeout);
}
}
return %AtomicsWait(ia, index, value, timeout);
}
function AtomicsWakeJS(ia, index, count) {
CheckSharedInteger32TypedArray(ia);
index = ValidateIndex(index, %_TypedArrayGetLength(ia));
if (IS_UNDEFINED(count)) {
count = kMaxUint32;
} else {
// Clamp to [0, kMaxUint32].
count = MinSimple(MaxSimple(0, TO_INTEGER(count)), kMaxUint32);
}
return %AtomicsWake(ia, index, count);
}
// -------------------------------------------------------------------
var Atomics = global.Atomics;
// The Atomics global is defined by the bootstrapper.
%AddNamedProperty(Atomics, toStringTagSymbol, "Atomics", READ_ONLY | DONT_ENUM);
utils.InstallFunctions(Atomics, DONT_ENUM, [
// TODO(binji): remove the rest of the (non futex) Atomics functions as they
// become builtins.
"compareExchange", AtomicsCompareExchangeJS,
"add", AtomicsAddJS,
"sub", AtomicsSubJS,
"and", AtomicsAndJS,
"or", AtomicsOrJS,
"xor", AtomicsXorJS,
"isLockFree", AtomicsIsLockFreeJS,
"wait", AtomicsWaitJS,
"wake", AtomicsWakeJS,
]);
})
......@@ -12,33 +12,18 @@
// Utils
var imports = UNDEFINED;
var imports_from_experimental = UNDEFINED;
var exports_container = %ExportFromRuntime({});
var typed_array_setup = UNDEFINED;
// Register context value to be initialized with a typed array in
// Genesis::InitializeBuiltinTypedArrays.
function SetupTypedArray(f) {
f.next = typed_array_setup;
typed_array_setup = f;
}
// Export to other scripts.
// In normal natives, this exports functions to other normal natives.
// In experimental natives, this exports to other experimental natives and
// to normal natives that import using utils.ImportFromExperimental.
function Export(f) {
f(exports_container);
}
// Import from other scripts. The actual importing happens in PostNatives and
// PostExperimental so that we can import from scripts executed later. However,
// that means that the import is not available until the very end. If the
// import needs to be available immediate, use ImportNow.
// In normal natives, this imports from other normal natives.
// In experimental natives, this imports from other experimental natives and
// whitelisted exports from normal natives.
// Import from other scripts. The actual importing happens in PostNatives so
// that we can import from scripts executed later. However, that means that
// the import is not available until the very end. If the import needs to be
// available immediately, use ImportNow.
function Import(f) {
f.next = imports;
imports = f;
......@@ -53,14 +38,6 @@ function ImportNow(name) {
}
// In normal natives, import from experimental natives.
// Not callable from experimental natives.
function ImportFromExperimental(f) {
f.next = imports_from_experimental;
imports_from_experimental = f;
}
function SetFunctionName(f, name, prefix) {
if (IS_SYMBOL(name)) {
name = "[" + %SymbolDescription(name) + "]";
......@@ -165,82 +142,13 @@ function PostNatives(utils) {
imports(exports_container);
}
// Whitelist of exports from normal natives to experimental natives and debug.
var expose_list = [
"FormatDateToParts",
"MapEntries",
"MapIteratorNext",
"MaxSimple",
"MinSimple",
"SetIteratorNext",
"SetValues",
"ToLocaleLowerCaseI18N",
"ToLocaleUpperCaseI18N",
"ToLowerCaseI18N",
"ToUpperCaseI18N",
// From runtime:
"promise_result_symbol",
"promise_state_symbol",
"reflect_apply",
"to_string_tag_symbol",
];
var filtered_exports = {};
%OptimizeObjectForAddingMultipleProperties(
filtered_exports, expose_list.length);
for (var key of expose_list) {
filtered_exports[key] = exports_container[key];
}
%ToFastProperties(filtered_exports);
exports_container = filtered_exports;
utils.PostNatives = UNDEFINED;
utils.ImportFromExperimental = UNDEFINED;
}
function PostExperimentals(utils) {
%CheckIsBootstrapping();
for ( ; !IS_UNDEFINED(imports); imports = imports.next) {
imports(exports_container);
}
for ( ; !IS_UNDEFINED(imports_from_experimental);
imports_from_experimental = imports_from_experimental.next) {
imports_from_experimental(exports_container);
}
utils.Export = UNDEFINED;
utils.PostDebug = UNDEFINED;
utils.PostExperimentals = UNDEFINED;
typed_array_setup = UNDEFINED;
}
function PostDebug(utils) {
for ( ; !IS_UNDEFINED(imports); imports = imports.next) {
imports(exports_container);
}
exports_container = UNDEFINED;
utils.Export = UNDEFINED;
utils.Import = UNDEFINED;
utils.ImportNow = UNDEFINED;
utils.PostDebug = UNDEFINED;
utils.PostExperimentals = UNDEFINED;
typed_array_setup = UNDEFINED;
}
function InitializeBuiltinTypedArrays(utils, rng_state, rempio2result) {
var setup_list = typed_array_setup;
for ( ; !IS_UNDEFINED(setup_list); setup_list = setup_list.next) {
setup_list(rng_state, rempio2result);
}
utils.PostNatives = UNDEFINED;
}
// -----------------------------------------------------------------------
%OptimizeObjectForAddingMultipleProperties(utils, 14);
......@@ -248,7 +156,6 @@ function InitializeBuiltinTypedArrays(utils, rng_state, rempio2result) {
utils.Import = Import;
utils.ImportNow = ImportNow;
utils.Export = Export;
utils.ImportFromExperimental = ImportFromExperimental;
utils.SetFunctionName = SetFunctionName;
utils.InstallConstants = InstallConstants;
utils.InstallFunctions = InstallFunctions;
......@@ -256,8 +163,6 @@ utils.InstallGetter = InstallGetter;
utils.OverrideFunction = OverrideFunction;
utils.SetUpLockedPrototype = SetUpLockedPrototype;
utils.PostNatives = PostNatives;
utils.PostExperimentals = PostExperimentals;
utils.PostDebug = PostDebug;
%ToFastProperties(utils);
......
......@@ -19,44 +19,8 @@ namespace internal {
namespace {
inline bool AtomicIsLockFree(uint32_t size) {
return size == 1 || size == 2 || size == 4;
}
#if V8_CC_GNU
template <typename T>
inline T CompareExchangeSeqCst(T* p, T oldval, T newval) {
(void)__atomic_compare_exchange_n(p, &oldval, newval, 0, __ATOMIC_SEQ_CST,
__ATOMIC_SEQ_CST);
return oldval;
}
template <typename T>
inline T AddSeqCst(T* p, T value) {
return __atomic_fetch_add(p, value, __ATOMIC_SEQ_CST);
}
template <typename T>
inline T SubSeqCst(T* p, T value) {
return __atomic_fetch_sub(p, value, __ATOMIC_SEQ_CST);
}
template <typename T>
inline T AndSeqCst(T* p, T value) {
return __atomic_fetch_and(p, value, __ATOMIC_SEQ_CST);
}
template <typename T>
inline T OrSeqCst(T* p, T value) {
return __atomic_fetch_or(p, value, __ATOMIC_SEQ_CST);
}
template <typename T>
inline T XorSeqCst(T* p, T value) {
return __atomic_fetch_xor(p, value, __ATOMIC_SEQ_CST);
}
template <typename T>
inline T ExchangeSeqCst(T* p, T value) {
return __atomic_exchange_n(p, value, __ATOMIC_SEQ_CST);
......@@ -64,47 +28,13 @@ inline T ExchangeSeqCst(T* p, T value) {
#elif V8_CC_MSVC
#define InterlockedCompareExchange32 _InterlockedCompareExchange
#define InterlockedExchange32 _InterlockedExchange
#define InterlockedExchangeAdd32 _InterlockedExchangeAdd
#define InterlockedAnd32 _InterlockedAnd
#define InterlockedOr32 _InterlockedOr
#define InterlockedXor32 _InterlockedXor
#define InterlockedExchangeAdd16 _InterlockedExchangeAdd16
#define InterlockedCompareExchange8 _InterlockedCompareExchange8
#define InterlockedExchangeAdd8 _InterlockedExchangeAdd8
#define ATOMIC_OPS(type, suffix, vctype) \
inline type AddSeqCst(type* p, type value) { \
return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(value)); \
} \
inline type SubSeqCst(type* p, type value) { \
return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
-bit_cast<vctype>(value)); \
} \
inline type AndSeqCst(type* p, type value) { \
return InterlockedAnd##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(value)); \
} \
inline type OrSeqCst(type* p, type value) { \
return InterlockedOr##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(value)); \
} \
inline type XorSeqCst(type* p, type value) { \
return InterlockedXor##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(value)); \
} \
inline type ExchangeSeqCst(type* p, type value) { \
return InterlockedExchange##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(value)); \
} \
\
inline type CompareExchangeSeqCst(type* p, type oldval, type newval) { \
return InterlockedCompareExchange##suffix(reinterpret_cast<vctype*>(p), \
bit_cast<vctype>(newval), \
bit_cast<vctype>(oldval)); \
}
ATOMIC_OPS(int8_t, 8, char)
ATOMIC_OPS(uint8_t, 8, char)
......@@ -116,15 +46,7 @@ ATOMIC_OPS(uint32_t, 32, long) /* NOLINT(runtime/int) */
#undef ATOMIC_OPS_INTEGER
#undef ATOMIC_OPS
#undef InterlockedCompareExchange32
#undef InterlockedExchange32
#undef InterlockedExchangeAdd32
#undef InterlockedAnd32
#undef InterlockedOr32
#undef InterlockedXor32
#undef InterlockedExchangeAdd16
#undef InterlockedCompareExchange8
#undef InterlockedExchangeAdd8
#else
......@@ -176,73 +98,14 @@ inline Object* ToObject(Isolate* isolate, uint16_t t) {
return Smi::FromInt(t);
}
inline Object* ToObject(Isolate* isolate, int32_t t) {
return *isolate->factory()->NewNumber(t);
}
inline Object* ToObject(Isolate* isolate, uint32_t t) {
return *isolate->factory()->NewNumber(t);
}
template <typename T>
inline Object* DoCompareExchange(Isolate* isolate, void* buffer, size_t index,
Handle<Object> oldobj, Handle<Object> newobj) {
T oldval = FromObject<T>(oldobj);
T newval = FromObject<T>(newobj);
T result =
CompareExchangeSeqCst(static_cast<T*>(buffer) + index, oldval, newval);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoAdd(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = AddSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoSub(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = SubSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoAnd(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = AndSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoOr(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = OrSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoXor(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
T value = FromObject<T>(obj);
T result = XorSeqCst(static_cast<T*>(buffer) + index, value);
return ToObject(isolate, result);
}
template <typename T>
inline Object* DoExchange(Isolate* isolate, void* buffer, size_t index,
Handle<Object> obj) {
......@@ -251,66 +114,6 @@ inline Object* DoExchange(Isolate* isolate, void* buffer, size_t index,
return ToObject(isolate, result);
}
// Uint8Clamped functions
uint8_t ClampToUint8(int32_t value) {
if (value < 0) return 0;
if (value > 255) return 255;
return value;
}
inline Object* DoCompareExchangeUint8Clamped(Isolate* isolate, void* buffer,
size_t index,
Handle<Object> oldobj,
Handle<Object> newobj) {
typedef int32_t convert_type;
uint8_t oldval = ClampToUint8(FromObject<convert_type>(oldobj));
uint8_t newval = ClampToUint8(FromObject<convert_type>(newobj));
uint8_t result = CompareExchangeSeqCst(static_cast<uint8_t*>(buffer) + index,
oldval, newval);
return ToObject(isolate, result);
}
#define DO_UINT8_CLAMPED_OP(name, op) \
inline Object* Do##name##Uint8Clamped(Isolate* isolate, void* buffer, \
size_t index, Handle<Object> obj) { \
typedef int32_t convert_type; \
uint8_t* p = static_cast<uint8_t*>(buffer) + index; \
convert_type operand = FromObject<convert_type>(obj); \
uint8_t expected; \
uint8_t result; \
do { \
expected = *p; \
result = ClampToUint8(static_cast<convert_type>(expected) op operand); \
} while (CompareExchangeSeqCst(p, expected, result) != expected); \
return ToObject(isolate, expected); \
}
DO_UINT8_CLAMPED_OP(Add, +)
DO_UINT8_CLAMPED_OP(Sub, -)
DO_UINT8_CLAMPED_OP(And, &)
DO_UINT8_CLAMPED_OP(Or, | )
DO_UINT8_CLAMPED_OP(Xor, ^)
#undef DO_UINT8_CLAMPED_OP
inline Object* DoExchangeUint8Clamped(Isolate* isolate, void* buffer,
size_t index, Handle<Object> obj) {
typedef int32_t convert_type;
uint8_t* p = static_cast<uint8_t*>(buffer) + index;
uint8_t result = ClampToUint8(FromObject<convert_type>(obj));
uint8_t expected;
do {
expected = *p;
} while (CompareExchangeSeqCst(p, expected, result) != expected);
return ToObject(isolate, expected);
}
} // anonymous namespace
// Duplicated from objects.h
......@@ -347,200 +150,6 @@ RUNTIME_FUNCTION(Runtime_ThrowInvalidAtomicAccessIndexError) {
isolate, NewRangeError(MessageTemplate::kInvalidAtomicAccessIndex));
}
RUNTIME_FUNCTION(Runtime_AtomicsCompareExchange) {
HandleScope scope(isolate);
DCHECK_EQ(4, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, sta, 0);
CONVERT_SIZE_ARG_CHECKED(index, 1);
CONVERT_NUMBER_ARG_HANDLE_CHECKED(oldobj, 2);
CONVERT_NUMBER_ARG_HANDLE_CHECKED(newobj, 3);
CHECK(sta->GetBuffer()->is_shared());
CHECK_LT(index, NumberToSize(sta->length()));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoCompareExchange<ctype>(isolate, source, index, oldobj, newobj);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
case kExternalUint8ClampedArray:
return DoCompareExchangeUint8Clamped(isolate, source, index, oldobj,
newobj);
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_AtomicsAdd) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, sta, 0);
CONVERT_SIZE_ARG_CHECKED(index, 1);
CONVERT_NUMBER_ARG_HANDLE_CHECKED(value, 2);
CHECK(sta->GetBuffer()->is_shared());
CHECK_LT(index, NumberToSize(sta->length()));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoAdd<ctype>(isolate, source, index, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
case kExternalUint8ClampedArray:
return DoAddUint8Clamped(isolate, source, index, value);
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_AtomicsSub) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, sta, 0);
CONVERT_SIZE_ARG_CHECKED(index, 1);
CONVERT_NUMBER_ARG_HANDLE_CHECKED(value, 2);
CHECK(sta->GetBuffer()->is_shared());
CHECK_LT(index, NumberToSize(sta->length()));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoSub<ctype>(isolate, source, index, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
case kExternalUint8ClampedArray:
return DoSubUint8Clamped(isolate, source, index, value);
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_AtomicsAnd) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, sta, 0);
CONVERT_SIZE_ARG_CHECKED(index, 1);
CONVERT_NUMBER_ARG_HANDLE_CHECKED(value, 2);
CHECK(sta->GetBuffer()->is_shared());
CHECK_LT(index, NumberToSize(sta->length()));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoAnd<ctype>(isolate, source, index, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
case kExternalUint8ClampedArray:
return DoAndUint8Clamped(isolate, source, index, value);
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_AtomicsOr) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, sta, 0);
CONVERT_SIZE_ARG_CHECKED(index, 1);
CONVERT_NUMBER_ARG_HANDLE_CHECKED(value, 2);
CHECK(sta->GetBuffer()->is_shared());
CHECK_LT(index, NumberToSize(sta->length()));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoOr<ctype>(isolate, source, index, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
case kExternalUint8ClampedArray:
return DoOrUint8Clamped(isolate, source, index, value);
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_AtomicsXor) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, sta, 0);
CONVERT_SIZE_ARG_CHECKED(index, 1);
CONVERT_NUMBER_ARG_HANDLE_CHECKED(value, 2);
CHECK(sta->GetBuffer()->is_shared());
CHECK_LT(index, NumberToSize(sta->length()));
uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
NumberToSize(sta->byte_offset());
switch (sta->type()) {
#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
case kExternal##Type##Array: \
return DoXor<ctype>(isolate, source, index, value);
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
case kExternalUint8ClampedArray:
return DoXorUint8Clamped(isolate, source, index, value);
default:
break;
}
UNREACHABLE();
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_AtomicsExchange) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
......@@ -561,9 +170,6 @@ RUNTIME_FUNCTION(Runtime_AtomicsExchange) {
INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
case kExternalUint8ClampedArray:
return DoExchangeUint8Clamped(isolate, source, index, value);
default:
break;
}
......@@ -573,12 +179,5 @@ RUNTIME_FUNCTION(Runtime_AtomicsExchange) {
}
RUNTIME_FUNCTION(Runtime_AtomicsIsLockFree) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_NUMBER_ARG_HANDLE_CHECKED(size, 0);
uint32_t usize = NumberToUint32(*size);
return isolate->heap()->ToBoolean(AtomicIsLockFree(usize));
}
} // namespace internal
} // namespace v8
......@@ -17,44 +17,6 @@
namespace v8 {
namespace internal {
RUNTIME_FUNCTION(Runtime_AtomicsWait) {
HandleScope scope(isolate);
DCHECK_EQ(4, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, sta, 0);
CONVERT_SIZE_ARG_CHECKED(index, 1);
CONVERT_INT32_ARG_CHECKED(value, 2);
CONVERT_DOUBLE_ARG_CHECKED(timeout, 3);
CHECK(sta->GetBuffer()->is_shared());
CHECK_LT(index, NumberToSize(sta->length()));
CHECK_EQ(sta->type(), kExternalInt32Array);
CHECK(timeout == V8_INFINITY || !std::isnan(timeout));
if (!isolate->allow_atomics_wait()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kAtomicsWaitNotAllowed));
}
Handle<JSArrayBuffer> array_buffer = sta->GetBuffer();
size_t addr = (index << 2) + NumberToSize(sta->byte_offset());
return FutexEmulation::Wait(isolate, array_buffer, addr, value, timeout);
}
RUNTIME_FUNCTION(Runtime_AtomicsWake) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, sta, 0);
CONVERT_SIZE_ARG_CHECKED(index, 1);
CONVERT_UINT32_ARG_CHECKED(count, 2);
CHECK(sta->GetBuffer()->is_shared());
CHECK_LT(index, NumberToSize(sta->length()));
CHECK_EQ(sta->type(), kExternalInt32Array);
Handle<JSArrayBuffer> array_buffer = sta->GetBuffer();
size_t addr = (index << 2) + NumberToSize(sta->byte_offset());
return FutexEmulation::Wake(isolate, array_buffer, addr, count);
}
RUNTIME_FUNCTION(Runtime_AtomicsNumWaitersForTesting) {
HandleScope scope(isolate);
......
......@@ -63,16 +63,7 @@ namespace internal {
F(ThrowNotIntegerSharedTypedArrayError, 1, 1) \
F(ThrowNotInt32SharedTypedArrayError, 1, 1) \
F(ThrowInvalidAtomicAccessIndexError, 0, 1) \
F(AtomicsCompareExchange, 4, 1) \
F(AtomicsAdd, 3, 1) \
F(AtomicsSub, 3, 1) \
F(AtomicsAnd, 3, 1) \
F(AtomicsOr, 3, 1) \
F(AtomicsXor, 3, 1) \
F(AtomicsExchange, 3, 1) \
F(AtomicsIsLockFree, 1, 1) \
F(AtomicsWait, 4, 1) \
F(AtomicsWake, 3, 1) \
F(AtomicsNumWaitersForTesting, 2, 1) \
F(SetAllowAtomicsWait, 1, 1)
......
......@@ -17,12 +17,6 @@ FixedArray* NativesCollection<CORE>::GetSourceCache(Heap* heap) {
}
template <>
FixedArray* NativesCollection<EXPERIMENTAL>::GetSourceCache(Heap* heap) {
return heap->experimental_natives_source_cache();
}
template <>
FixedArray* NativesCollection<EXTRAS>::GetSourceCache(Heap* heap) {
return heap->extra_natives_source_cache();
......
......@@ -151,8 +151,6 @@ void ReadNatives() {
if (natives_blob_ && NativesHolder<CORE>::empty()) {
SnapshotByteSource bytes(natives_blob_->data, natives_blob_->raw_size);
NativesHolder<CORE>::set(NativesStore::MakeFromScriptsSource(&bytes));
NativesHolder<EXPERIMENTAL>::set(
NativesStore::MakeFromScriptsSource(&bytes));
NativesHolder<EXTRAS>::set(NativesStore::MakeFromScriptsSource(&bytes));
NativesHolder<EXPERIMENTAL_EXTRAS>::set(
NativesStore::MakeFromScriptsSource(&bytes));
......@@ -181,7 +179,6 @@ void SetNativesFromFile(StartupData* natives_blob) {
*/
void DisposeNatives() {
NativesHolder<CORE>::Dispose();
NativesHolder<EXPERIMENTAL>::Dispose();
NativesHolder<EXTRAS>::Dispose();
NativesHolder<EXPERIMENTAL_EXTRAS>::Dispose();
}
......@@ -233,7 +230,6 @@ Vector<const char> NativesCollection<type>::GetScriptsSource() {
template Vector<const char> NativesCollection<T>::GetScriptName(int i); \
template Vector<const char> NativesCollection<T>::GetScriptsSource();
INSTANTIATE_TEMPLATES(CORE)
INSTANTIATE_TEMPLATES(EXPERIMENTAL)
INSTANTIATE_TEMPLATES(EXTRAS)
INSTANTIATE_TEMPLATES(EXPERIMENTAL_EXTRAS)
#undef INSTANTIATE_TEMPLATES
......
......@@ -15,7 +15,6 @@ namespace internal {
enum NativeType {
CORE,
EXPERIMENTAL,
EXTRAS,
EXPERIMENTAL_EXTRAS,
D8,
......@@ -54,7 +53,6 @@ class V8_EXPORT_PRIVATE NativesCollection {
};
typedef NativesCollection<CORE> Natives;
typedef NativesCollection<EXPERIMENTAL> ExperimentalNatives;
typedef NativesCollection<EXTRAS> ExtraNatives;
typedef NativesCollection<EXPERIMENTAL_EXTRAS> ExperimentalExtraNatives;
......
......@@ -171,7 +171,6 @@
],
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-extras-libraries.cc',
'<(INTERMEDIATE_DIR)/snapshot.cc',
......@@ -230,7 +229,6 @@
],
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-extras-libraries.cc',
'snapshot/snapshot-empty.cc',
......@@ -2221,7 +2219,6 @@
'inputs': [
'../tools/concatenate-files.py',
'<(SHARED_INTERMEDIATE_DIR)/libraries.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-experimental.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-extras.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-experimental-extras.bin',
],
......@@ -2295,13 +2292,7 @@
'debug/debug.js',
'debug/liveedit.js',
],
'experimental_library_files': [
'js/macros.py',
'messages.h',
'js/harmony-atomics.js',
],
'libraries_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries.bin',
'libraries_experimental_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-experimental.bin',
'libraries_extras_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-extras.bin',
'libraries_experimental_extras_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-experimental-extras.bin',
'conditions': [
......@@ -2343,38 +2334,6 @@
'--nojs',
],
},
{
'action_name': 'js2c_experimental',
'inputs': [
'../tools/js2c.py',
'<@(experimental_library_files)',
],
'outputs': ['<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc'],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'EXPERIMENTAL',
'<@(experimental_library_files)',
],
},
{
'action_name': 'js2c_experimental_bin',
'inputs': [
'../tools/js2c.py',
'<@(experimental_library_files)',
],
'outputs': ['<@(libraries_experimental_bin_file)'],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'EXPERIMENTAL',
'<@(experimental_library_files)',
'--startup_blob', '<@(libraries_experimental_bin_file)',
'--nojs',
],
},
{
'action_name': 'js2c_extras',
'inputs': [
......
......@@ -530,8 +530,8 @@ TEST(SizeOfInitialHeap) {
// requires no extra space.
CompileRun("/*empty*/");
for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) {
// Debug code can be very large, so skip CODE_SPACE if we are generating it.
if (i == CODE_SPACE && i::FLAG_debug_code) continue;
// Skip CODE_SPACE, since we had to generate code even for an empty script.
if (i == CODE_SPACE) continue;
CHECK_EQ(page_count[i], isolate->heap()->paged_space(i)->CountTotalPages());
}
......
......@@ -109,6 +109,14 @@
Atomics.wake(i32a, 0, Number.POSITIVE_INFINITY);
})();
// In a previous version, this test caused a check failure
(function TestObjectWaitValue() {
var sab = new SharedArrayBuffer(16);
var i32a = new Int32Array(sab);
assertEquals("timed-out", Atomics.wait(i32a, 0, Math, 0));
})();
//// WORKER ONLY TESTS
if (this.Worker) {
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --expose-natives-as natives
// Test the MaxSimple and MinSimple internal methods in runtime.js
var MaxSimple = natives.ImportNow("MaxSimple");
var MinSimple = natives.ImportNow("MinSimple");
function checkEvaluations(target) {
var evaluations = 0;
var observedNumber = {
valueOf: function() {
evaluations++;
return 0;
}
};
target(observedNumber, observedNumber);
return evaluations;
}
assertEquals(1, MaxSimple(-1, 1));
assertEquals(2, checkEvaluations(MaxSimple));
assertEquals(-1, MinSimple(-1, 1));
assertEquals(2, checkEvaluations(MinSimple));
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment