Commit 9468be4a authored by Santiago Aboy Solanes's avatar Santiago Aboy Solanes Committed by V8 LUCI CQ

[compiler] Add TSAN support for generated code tagged loads

In the same vein we did tagged stores, we can do tagged loads.

As a drive-by, move GetTSANRelaxedStoreStub to CodeFactory.

Bug: v8:7790, v8:11600
Change-Id: Ic1ef3245623756538eab64c3358047e3797195c1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2953162
Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Cr-Commit-Position: refs/heads/master@{#75145}
parent 3a6ace55
......@@ -50,6 +50,12 @@ namespace internal {
IF_TSAN(TFC, TSANRelaxedStore64IgnoreFP, TSANRelaxedStore) \
IF_TSAN(TFC, TSANRelaxedStore64SaveFP, TSANRelaxedStore) \
\
/* TSAN support for loads in generated code.*/ \
IF_TSAN(TFC, TSANRelaxedLoad32IgnoreFP, TSANRelaxedLoad) \
IF_TSAN(TFC, TSANRelaxedLoad32SaveFP, TSANRelaxedLoad) \
IF_TSAN(TFC, TSANRelaxedLoad64IgnoreFP, TSANRelaxedLoad) \
IF_TSAN(TFC, TSANRelaxedLoad64SaveFP, TSANRelaxedLoad) \
\
/* Adaptor for CPP builtin */ \
TFC(AdaptorWithBuiltinExitFrame, CppBuiltinAdaptor) \
\
......
......@@ -482,6 +482,49 @@ TF_BUILTIN(TSANRelaxedStore64IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
TF_BUILTIN(TSANRelaxedStore64SaveFP, TSANRelaxedStoreCodeStubAssembler) {
GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt64Size);
}
class TSANRelaxedLoadCodeStubAssembler : public CodeStubAssembler {
public:
explicit TSANRelaxedLoadCodeStubAssembler(compiler::CodeAssemblerState* state)
: CodeStubAssembler(state) {}
TNode<ExternalReference> GetExternalReference(int size) {
if (size == kInt32Size) {
return ExternalConstant(
ExternalReference::tsan_relaxed_load_function_32_bits());
} else {
CHECK_EQ(size, kInt64Size);
return ExternalConstant(
ExternalReference::tsan_relaxed_load_function_64_bits());
}
}
void GenerateTSANRelaxedLoad(SaveFPRegsMode fp_mode, int size) {
TNode<ExternalReference> function = GetExternalReference(size);
auto address =
UncheckedParameter<IntPtrT>(TSANRelaxedLoadDescriptor::kAddress);
CallCFunctionWithCallerSavedRegisters(
function, MachineType::Int32(), fp_mode,
std::make_pair(MachineType::IntPtr(), address));
Return(UndefinedConstant());
}
};
TF_BUILTIN(TSANRelaxedLoad32IgnoreFP, TSANRelaxedLoadCodeStubAssembler) {
GenerateTSANRelaxedLoad(SaveFPRegsMode::kIgnore, kInt32Size);
}
TF_BUILTIN(TSANRelaxedLoad32SaveFP, TSANRelaxedLoadCodeStubAssembler) {
GenerateTSANRelaxedLoad(SaveFPRegsMode::kSave, kInt32Size);
}
TF_BUILTIN(TSANRelaxedLoad64IgnoreFP, TSANRelaxedLoadCodeStubAssembler) {
GenerateTSANRelaxedLoad(SaveFPRegsMode::kIgnore, kInt64Size);
}
TF_BUILTIN(TSANRelaxedLoad64SaveFP, TSANRelaxedLoadCodeStubAssembler) {
GenerateTSANRelaxedLoad(SaveFPRegsMode::kSave, kInt64Size);
}
#endif // V8_IS_TSAN
class DeletePropertyBaseAssembler : public AccessorAssembler {
......
......@@ -137,29 +137,6 @@ class Builtins {
}
}
#ifdef V8_IS_TSAN
static Builtin GetTSANRelaxedStoreStub(SaveFPRegsMode fp_mode, int size) {
if (size == kInt8Size) {
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedStore8IgnoreFP
: Builtin::kTSANRelaxedStore8SaveFP;
} else if (size == kInt16Size) {
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedStore16IgnoreFP
: Builtin::kTSANRelaxedStore16SaveFP;
} else if (size == kInt32Size) {
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedStore32IgnoreFP
: Builtin::kTSANRelaxedStore32SaveFP;
} else {
CHECK_EQ(size, kInt64Size);
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedStore64IgnoreFP
: Builtin::kTSANRelaxedStore64SaveFP;
}
}
#endif // V8_IS_TSAN
// Convenience wrappers.
Handle<Code> CallFunction(ConvertReceiverMode = ConvertReceiverMode::kAny);
Handle<Code> Call(ConvertReceiverMode = ConvertReceiverMode::kAny);
......
......@@ -376,5 +376,43 @@ Callable CodeFactory::ArraySingleArgumentConstructor(
#undef CASE
}
#ifdef V8_IS_TSAN
// static
Builtin CodeFactory::GetTSANRelaxedStoreStub(SaveFPRegsMode fp_mode, int size) {
if (size == kInt8Size) {
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedStore8IgnoreFP
: Builtin::kTSANRelaxedStore8SaveFP;
} else if (size == kInt16Size) {
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedStore16IgnoreFP
: Builtin::kTSANRelaxedStore16SaveFP;
} else if (size == kInt32Size) {
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedStore32IgnoreFP
: Builtin::kTSANRelaxedStore32SaveFP;
} else {
CHECK_EQ(size, kInt64Size);
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedStore64IgnoreFP
: Builtin::kTSANRelaxedStore64SaveFP;
}
}
// static
Builtin CodeFactory::GetTSANRelaxedLoadStub(SaveFPRegsMode fp_mode, int size) {
if (size == kInt32Size) {
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedLoad32IgnoreFP
: Builtin::kTSANRelaxedLoad32SaveFP;
} else {
CHECK_EQ(size, kInt64Size);
return fp_mode == SaveFPRegsMode::kIgnore
? Builtin::kTSANRelaxedLoad64IgnoreFP
: Builtin::kTSANRelaxedLoad64SaveFP;
}
}
#endif // V8_IS_TSAN
} // namespace internal
} // namespace v8
......@@ -88,6 +88,11 @@ class V8_EXPORT_PRIVATE CodeFactory final {
static Callable ArraySingleArgumentConstructor(
Isolate* isolate, ElementsKind kind,
AllocationSiteOverrideMode override_mode);
#ifdef V8_IS_TSAN
static Builtin GetTSANRelaxedStoreStub(SaveFPRegsMode fp_mode, int size);
static Builtin GetTSANRelaxedLoadStub(SaveFPRegsMode fp_mode, int size);
#endif // V8_IS_TSAN
};
} // namespace internal
......
......@@ -1204,6 +1204,23 @@ void tsan_relaxed_store_64_bits(Address addr, int64_t value) {
UNREACHABLE();
#endif // V8_TARGET_ARCH_X64
}
base::Atomic32 tsan_relaxed_load_32_bits(Address addr, int64_t value) {
#if V8_TARGET_ARCH_X64
return base::Relaxed_Load(reinterpret_cast<base::Atomic32*>(addr));
#else
UNREACHABLE();
#endif // V8_TARGET_ARCH_X64
}
base::Atomic64 tsan_relaxed_load_64_bits(Address addr, int64_t value) {
#if V8_TARGET_ARCH_X64
return base::Relaxed_Load(reinterpret_cast<base::Atomic64*>(addr));
#else
UNREACHABLE();
#endif // V8_TARGET_ARCH_X64
}
} // namespace
#endif // V8_IS_TSAN
......@@ -1215,6 +1232,10 @@ IF_TSAN(FUNCTION_REFERENCE, tsan_relaxed_store_function_32_bits,
tsan_relaxed_store_32_bits)
IF_TSAN(FUNCTION_REFERENCE, tsan_relaxed_store_function_64_bits,
tsan_relaxed_store_64_bits)
IF_TSAN(FUNCTION_REFERENCE, tsan_relaxed_load_function_32_bits,
tsan_relaxed_load_32_bits)
IF_TSAN(FUNCTION_REFERENCE, tsan_relaxed_load_function_64_bits,
tsan_relaxed_load_64_bits)
static int EnterMicrotaskContextWrapper(HandleScopeImplementer* hsi,
Address raw_context) {
......
......@@ -274,6 +274,10 @@ class StatsCounter;
"tsan_relaxed_store_function_32_bits") \
IF_TSAN(V, tsan_relaxed_store_function_64_bits, \
"tsan_relaxed_store_function_64_bits") \
IF_TSAN(V, tsan_relaxed_load_function_32_bits, \
"tsan_relaxed_load_function_32_bits") \
IF_TSAN(V, tsan_relaxed_load_function_64_bits, \
"tsan_relaxed_load_function_64_bits") \
V(js_finalization_registry_remove_cell_from_unregister_token_map, \
"JSFinalizationRegistry::RemoveCellFromUnregisterTokenMap") \
V(re_match_for_call_from_js, "IrregexpInterpreter::MatchForCallFromJs") \
......
......@@ -111,6 +111,7 @@ namespace internal {
V(StringAtAsString) \
V(StringSubstring) \
IF_TSAN(V, TSANRelaxedStore) \
IF_TSAN(V, TSANRelaxedLoad) \
V(TypeConversion) \
V(TypeConversionNoContext) \
V(TypeConversion_Baseline) \
......@@ -1047,6 +1048,19 @@ class TSANRelaxedStoreDescriptor final
static constexpr auto registers();
static constexpr bool kRestrictAllocatableRegisters = true;
};
class TSANRelaxedLoadDescriptor final
: public StaticCallInterfaceDescriptor<TSANRelaxedLoadDescriptor> {
public:
DEFINE_PARAMETERS_NO_CONTEXT(kAddress)
DEFINE_PARAMETER_TYPES(MachineType::Pointer()) // kAddress
DECLARE_DESCRIPTOR(TSANRelaxedLoadDescriptor)
static constexpr auto registers();
static constexpr bool kRestrictAllocatableRegisters = true;
};
#endif // V8_IS_TSAN
class TypeConversionDescriptor final
......
......@@ -46,6 +46,11 @@ constexpr auto WriteBarrierDescriptor::registers() {
constexpr auto TSANRelaxedStoreDescriptor::registers() {
return RegisterArray(arg_reg_1, arg_reg_2, kReturnRegister0);
}
// static
constexpr auto TSANRelaxedLoadDescriptor::registers() {
return RegisterArray(arg_reg_1, kReturnRegister0);
}
#endif // V8_IS_TSAN
// static
......
......@@ -492,13 +492,11 @@ void TurboAssembler::CallTSANRelaxedStoreStub(Register address, Register value,
Register value_parameter(
descriptor.GetRegisterParameter(TSANRelaxedStoreDescriptor::kValue));
// Prepare argument registers for calling RecordWrite
// address_parameter <= address
// value_parameter <= value
// Prepare argument registers for calling GetTSANRelaxedStoreStub.
MovePair(address_parameter, address, value_parameter, value);
if (isolate()) {
Builtin builtin = Builtins::GetTSANRelaxedStoreStub(fp_mode, size);
Builtin builtin = CodeFactory::GetTSANRelaxedStoreStub(fp_mode, size);
Handle<Code> code_target = isolate()->builtins()->code_handle(builtin);
Call(code_target, RelocInfo::CODE_TARGET);
}
......@@ -523,6 +521,47 @@ void TurboAssembler::CallTSANRelaxedStoreStub(Register address, Register value,
MaybeRestoreRegisters(registers);
}
void TurboAssembler::CallTSANRelaxedLoadStub(Register address,
SaveFPRegsMode fp_mode, int size,
StubCallMode mode) {
TSANRelaxedLoadDescriptor descriptor;
RegList registers = descriptor.allocatable_registers();
MaybeSaveRegisters(registers);
Register address_parameter(
descriptor.GetRegisterParameter(TSANRelaxedLoadDescriptor::kAddress));
// Prepare argument registers for calling TSANRelaxedLoad.
Move(address_parameter, address);
if (isolate()) {
Builtin builtin = CodeFactory::GetTSANRelaxedLoadStub(fp_mode, size);
Handle<Code> code_target = isolate()->builtins()->code_handle(builtin);
Call(code_target, RelocInfo::CODE_TARGET);
}
#if V8_ENABLE_WEBASSEMBLY
// There are two different kinds of wasm-to-js functions: one lives in the
// wasm code space, and another one lives on the heap. Both of them have the
// same CodeKind (WASM_TO_JS_FUNCTION), but depending on where they are they
// have to either use the wasm stub calls, or call the builtin using the
// isolate like JS does. In order to know which wasm-to-js function we are
// compiling right now, we check if the isolate is null.
// TODO(solanes, v8:11600): Split CodeKind::WASM_TO_JS_FUNCTION into two
// different CodeKinds and pass the CodeKind as a parameter so that we can use
// that instead of a nullptr check.
// NOLINTNEXTLINE(readability/braces)
else {
DCHECK_EQ(mode, StubCallMode::kCallWasmRuntimeStub);
// Use {near_call} for direct Wasm call within a module.
auto wasm_target = wasm::WasmCode::GetTSANRelaxedLoadStub(fp_mode, size);
near_call(wasm_target, RelocInfo::WASM_STUB_CALL);
}
#endif // V8_ENABLE_WEBASSEMBLY
MaybeRestoreRegisters(registers);
}
#endif // V8_IS_TSAN
void MacroAssembler::RecordWrite(Register object, Register slot_address,
......
......@@ -508,9 +508,11 @@ class V8_EXPORT_PRIVATE TurboAssembler : public SharedTurboAssembler {
StubCallMode mode = StubCallMode::kCallBuiltinPointer);
#ifdef V8_IS_TSAN
void CallTSANRelaxedStoreStub(
Register address, Register value, SaveFPRegsMode fp_mode, int size,
StubCallMode mode = StubCallMode::kCallBuiltinPointer);
void CallTSANRelaxedStoreStub(Register address, Register value,
SaveFPRegsMode fp_mode, int size,
StubCallMode mode);
void CallTSANRelaxedLoadStub(Register address, SaveFPRegsMode fp_mode,
int size, StubCallMode mode);
#endif // V8_IS_TSAN
void MoveNumber(Register dst, double value);
......
......@@ -359,7 +359,8 @@ class OutOfLineTSANRelaxedStore final : public OutOfLineCode {
}
#endif // V8_ENABLE_WEBASSEMBLY
__ CallTSANRelaxedStoreStub(scratch0_, value_, save_fp_mode, size_);
__ CallTSANRelaxedStoreStub(scratch0_, value_, save_fp_mode, size_,
StubCallMode::kCallBuiltinPointer);
}
private:
......@@ -408,6 +409,69 @@ void EmitTSANStoreOOLIfNeeded(Zone* zone, CodeGenerator* codegen,
size);
}
class OutOfLineTSANRelaxedLoad final : public OutOfLineCode {
public:
OutOfLineTSANRelaxedLoad(CodeGenerator* gen, Operand operand,
Register scratch0, StubCallMode stub_mode, int size)
: OutOfLineCode(gen),
operand_(operand),
scratch0_(scratch0),
#if V8_ENABLE_WEBASSEMBLY
stub_mode_(stub_mode),
#endif // V8_ENABLE_WEBASSEMBLY
size_(size),
zone_(gen->zone()) {
}
void Generate() final {
const SaveFPRegsMode save_fp_mode = frame()->DidAllocateDoubleRegisters()
? SaveFPRegsMode::kSave
: SaveFPRegsMode::kIgnore;
__ leaq(scratch0_, operand_);
#if V8_ENABLE_WEBASSEMBLY
if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
// A direct call to a wasm runtime stub defined in this module.
// Just encode the stub index. This will be patched when the code
// is added to the native module and copied into wasm code space.
__ CallTSANRelaxedLoadStub(scratch0_, save_fp_mode, size_,
StubCallMode::kCallWasmRuntimeStub);
return;
}
#endif // V8_ENABLE_WEBASSEMBLY
__ CallTSANRelaxedLoadStub(scratch0_, save_fp_mode, size_,
StubCallMode::kCallBuiltinPointer);
}
private:
Operand const operand_;
Register const scratch0_;
#if V8_ENABLE_WEBASSEMBLY
StubCallMode const stub_mode_;
#endif // V8_ENABLE_WEBASSEMBLY
int size_;
Zone* zone_;
};
void EmitTSANLoadOOLIfNeeded(Zone* zone, CodeGenerator* codegen,
TurboAssembler* tasm, Operand operand,
X64OperandConverter& i, StubCallMode mode,
int size) {
// The FOR_TESTING code doesn't initialize the root register. We can't call
// the TSAN builtin since we need to load the external reference through the
// root register.
// TODO(solanes, v8:7790, v8:11600): See if we can support the FOR_TESTING
// path. It is not crucial, but it would be nice to remove this if.
if (codegen->code_kind() == CodeKind::FOR_TESTING) return;
Register scratch0 = i.TempRegister(0);
auto tsan_ool = zone->New<OutOfLineTSANRelaxedLoad>(codegen, operand,
scratch0, mode, size);
tasm->jmp(tsan_ool->entry());
tasm->bind(tsan_ool->exit());
}
#else
void EmitTSANStoreOOLIfNeeded(Zone* zone, CodeGenerator* codegen,
TurboAssembler* tasm, Operand operand,
......@@ -418,6 +482,11 @@ void EmitTSANStoreOOLIfNeeded(Zone* zone, CodeGenerator* codegen,
TurboAssembler* tasm, Operand operand,
Immediate value, X64OperandConverter& i,
StubCallMode mode, int size) {}
void EmitTSANLoadOOLIfNeeded(Zone* zone, CodeGenerator* codegen,
TurboAssembler* tasm, Operand operand,
X64OperandConverter& i, StubCallMode mode,
int size) {}
#endif // V8_IS_TSAN
#if V8_ENABLE_WEBASSEMBLY
......@@ -2226,19 +2295,28 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
case kX64MovqDecompressTaggedSigned: {
CHECK(instr->HasOutput());
__ DecompressTaggedSigned(i.OutputRegister(), i.MemoryOperand());
Operand address(i.MemoryOperand());
__ DecompressTaggedSigned(i.OutputRegister(), address);
EmitTSANLoadOOLIfNeeded(zone(), this, tasm(), address, i,
DetermineStubCallMode(), kTaggedSize);
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
}
case kX64MovqDecompressTaggedPointer: {
CHECK(instr->HasOutput());
__ DecompressTaggedPointer(i.OutputRegister(), i.MemoryOperand());
Operand address(i.MemoryOperand());
__ DecompressTaggedPointer(i.OutputRegister(), address);
EmitTSANLoadOOLIfNeeded(zone(), this, tasm(), address, i,
DetermineStubCallMode(), kTaggedSize);
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
}
case kX64MovqDecompressAnyTagged: {
CHECK(instr->HasOutput());
__ DecompressAnyTagged(i.OutputRegister(), i.MemoryOperand());
Operand address(i.MemoryOperand());
__ DecompressAnyTagged(i.OutputRegister(), address);
EmitTSANLoadOOLIfNeeded(zone(), this, tasm(), address, i,
DetermineStubCallMode(), kTaggedSize);
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
}
......
......@@ -451,11 +451,23 @@ void InstructionSelector::VisitLoadTransform(Node* node) {
void InstructionSelector::VisitLoad(Node* node, Node* value,
InstructionCode opcode) {
X64OperandGenerator g(this);
#ifdef V8_IS_TSAN
// On TSAN builds we require one scratch register. Because of this we also
// have to modify the inputs to take into account possible aliasing and use
// UseUniqueRegister which is not required for non-TSAN builds.
InstructionOperand temps[] = {g.TempRegister()};
size_t temp_count = arraysize(temps);
auto reg_kind = OperandGenerator::RegisterUseKind::kUseUniqueRegister;
#else
InstructionOperand* temps = nullptr;
size_t temp_count = 0;
auto reg_kind = OperandGenerator::RegisterUseKind::kUseRegister;
#endif // V8_IS_TSAN
InstructionOperand outputs[] = {g.DefineAsRegister(node)};
InstructionOperand inputs[3];
size_t input_count = 0;
AddressingMode mode =
g.GetEffectiveAddressMemoryOperand(value, inputs, &input_count);
g.GetEffectiveAddressMemoryOperand(value, inputs, &input_count, reg_kind);
InstructionCode code = opcode | AddressingModeField::encode(mode);
if (node->opcode() == IrOpcode::kProtectedLoad) {
code |= AccessModeField::encode(kMemoryAccessProtected);
......@@ -463,7 +475,7 @@ void InstructionSelector::VisitLoad(Node* node, Node* value,
CHECK_NE(poisoning_level_, PoisoningMitigationLevel::kDontPoison);
code |= AccessModeField::encode(kMemoryAccessPoisoned);
}
Emit(code, 1, outputs, input_count, inputs);
Emit(code, 1, outputs, input_count, inputs, temp_count, temps);
}
void InstructionSelector::VisitLoad(Node* node) {
......
......@@ -1061,6 +1061,10 @@ static bool TransitivelyCalledBuiltinHasNoSideEffect(Builtin caller,
case Builtin::kTSANRelaxedStore32SaveFP:
case Builtin::kTSANRelaxedStore64IgnoreFP:
case Builtin::kTSANRelaxedStore64SaveFP:
case Builtin::kTSANRelaxedLoad32IgnoreFP:
case Builtin::kTSANRelaxedLoad32SaveFP:
case Builtin::kTSANRelaxedLoad64IgnoreFP:
case Builtin::kTSANRelaxedLoad64SaveFP:
#endif // V8_IS_TSAN
case Builtin::kWeakMapLookupHashIndex:
return true;
......
......@@ -114,6 +114,10 @@ bool IsUnexpectedCodeObject(Isolate* isolate, HeapObject obj) {
case Builtin::kTSANRelaxedStore32SaveFP:
case Builtin::kTSANRelaxedStore64IgnoreFP:
case Builtin::kTSANRelaxedStore64SaveFP:
case Builtin::kTSANRelaxedLoad32IgnoreFP:
case Builtin::kTSANRelaxedLoad32SaveFP:
case Builtin::kTSANRelaxedLoad64IgnoreFP:
case Builtin::kTSANRelaxedLoad64SaveFP:
#endif // V8_IS_TSAN
return false;
default:
......
......@@ -103,6 +103,10 @@ struct WasmModule;
IF_TSAN(V, TSANRelaxedStore32SaveFP) \
IF_TSAN(V, TSANRelaxedStore64IgnoreFP) \
IF_TSAN(V, TSANRelaxedStore64SaveFP) \
IF_TSAN(V, TSANRelaxedLoad32IgnoreFP) \
IF_TSAN(V, TSANRelaxedLoad32SaveFP) \
IF_TSAN(V, TSANRelaxedLoad64IgnoreFP) \
IF_TSAN(V, TSANRelaxedLoad64SaveFP) \
V(WasmAllocateArrayWithRtt) \
V(WasmArrayCopy) \
V(WasmArrayCopyWithChecks) \
......@@ -204,6 +208,20 @@ class V8_EXPORT_PRIVATE WasmCode final {
: RuntimeStubId::kTSANRelaxedStore64SaveFP;
}
}
static RuntimeStubId GetTSANRelaxedLoadStub(SaveFPRegsMode fp_mode,
int size) {
if (size == kInt32Size) {
return fp_mode == SaveFPRegsMode::kIgnore
? RuntimeStubId::kTSANRelaxedLoad32IgnoreFP
: RuntimeStubId::kTSANRelaxedLoad32SaveFP;
} else {
CHECK_EQ(size, kInt64Size);
return fp_mode == SaveFPRegsMode::kIgnore
? RuntimeStubId::kTSANRelaxedLoad64IgnoreFP
: RuntimeStubId::kTSANRelaxedLoad64SaveFP;
}
}
#endif // V8_IS_TSAN
Vector<byte> instructions() const {
......
......@@ -137,7 +137,7 @@
'regress/regress-605470': [PASS, SLOW],
'regress/regress-655573': [PASS, SLOW],
'regress/regress-1200351': [PASS, SLOW],
'regress/regress-crbug-808192': [PASS, SLOW, NO_VARIANTS, ['arch not in (ia32, x64)', SKIP]],
'regress/regress-crbug-808192': [PASS, SLOW, NO_VARIANTS, ['arch not in (ia32, x64)', SKIP], ['tsan', SKIP]],
'regress/regress-crbug-918301': [PASS, SLOW, NO_VARIANTS, ['mode != release or dcheck_always_on', SKIP], ['(arch == arm or arch == arm64) and simulator_run', SKIP], ['tsan', SKIP]],
'regress/wasm/regress-810973': [PASS, SLOW],
'sealed-array-reduce': [PASS, SLOW],
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment