Commit b048c16b authored by Jaroslav Sevcik's avatar Jaroslav Sevcik Committed by Commit Bot

[turbofan] Remove branch_load_poisoning flag.

The goal is to remove CL to remove the confusing implications for
full poisoning.

This is an alternative to
https://chromium-review.googlesource.com/c/chromium/src/+/1253341
where chrome has to work around our implication system.

In the optimizing compiler, we already have a bottleneck for setting
mitigation level in src/compiler/pipeline.cc, so it is easy to change
back to partial mitigations.

Bug: chromium:888892
Change-Id: I01de7ed7bb91e8b06f8f79cc2d90657a0600892a
Reviewed-on: https://chromium-review.googlesource.com/c/1252985Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Commit-Queue: Jaroslav Sevcik <jarin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56374}
parent 48fd454c
...@@ -2373,9 +2373,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2373,9 +2373,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
__ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
// Reset the masking register. This is done independent of the underlying // Reset the masking register. This is done independent of the underlying
// feature flag {FLAG_branch_load_poisoning} to make the snapshot work with // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
// both configurations. It is safe to always do this, because the underlying // with both configurations. It is safe to always do this, because the
// register is caller-saved and can be arbitrarily clobbered. // underlying register is caller-saved and can be arbitrarily clobbered.
__ ResetSpeculationPoisonRegister(); __ ResetSpeculationPoisonRegister();
// Compute the handler entry address and jump to it. // Compute the handler entry address and jump to it.
......
...@@ -2895,9 +2895,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2895,9 +2895,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
__ Bind(&not_js_frame); __ Bind(&not_js_frame);
// Reset the masking register. This is done independent of the underlying // Reset the masking register. This is done independent of the underlying
// feature flag {FLAG_branch_load_poisoning} to make the snapshot work with // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
// both configurations. It is safe to always do this, because the underlying // with both configurations. It is safe to always do this, because the
// register is caller-saved and can be arbitrarily clobbered. // underlying register is caller-saved and can be arbitrarily clobbered.
__ ResetSpeculationPoisonRegister(); __ ResetSpeculationPoisonRegister();
// Compute the handler entry address and jump to it. // Compute the handler entry address and jump to it.
......
...@@ -2681,12 +2681,11 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2681,12 +2681,11 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
#ifdef V8_EMBEDDED_BUILTINS #ifdef V8_EMBEDDED_BUILTINS
STATIC_ASSERT(kRootRegister == kSpeculationPoisonRegister); STATIC_ASSERT(kRootRegister == kSpeculationPoisonRegister);
CHECK(!FLAG_untrusted_code_mitigations); CHECK(!FLAG_untrusted_code_mitigations);
CHECK(!FLAG_branch_load_poisoning);
#else #else
// Reset the masking register. This is done independent of the underlying // Reset the masking register. This is done independent of the underlying
// feature flag {FLAG_branch_load_poisoning} to make the snapshot work with // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
// both configurations. It is safe to always do this, because the underlying // with both configurations. It is safe to always do this, because the
// register is caller-saved and can be arbitrarily clobbered. // underlying register is caller-saved and can be arbitrarily clobbered.
__ ResetSpeculationPoisonRegister(); __ ResetSpeculationPoisonRegister();
#endif #endif
......
...@@ -2436,9 +2436,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2436,9 +2436,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
__ bind(&zero); __ bind(&zero);
// Reset the masking register. This is done independent of the underlying // Reset the masking register. This is done independent of the underlying
// feature flag {FLAG_branch_load_poisoning} to make the snapshot work with // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
// both configurations. It is safe to always do this, because the underlying // with both configurations. It is safe to always do this, because the
// register is caller-saved and can be arbitrarily clobbered. // underlying register is caller-saved and can be arbitrarily clobbered.
__ ResetSpeculationPoisonRegister(); __ ResetSpeculationPoisonRegister();
// Compute the handler entry address and jump to it. // Compute the handler entry address and jump to it.
......
...@@ -2454,9 +2454,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2454,9 +2454,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
__ bind(&zero); __ bind(&zero);
// Reset the masking register. This is done independent of the underlying // Reset the masking register. This is done independent of the underlying
// feature flag {FLAG_branch_load_poisoning} to make the snapshot work with // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
// both configurations. It is safe to always do this, because the underlying // with both configurations. It is safe to always do this, because the
// register is caller-saved and can be arbitrarily clobbered. // underlying register is caller-saved and can be arbitrarily clobbered.
__ ResetSpeculationPoisonRegister(); __ ResetSpeculationPoisonRegister();
// Compute the handler entry address and jump to it. // Compute the handler entry address and jump to it.
......
...@@ -2517,9 +2517,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ...@@ -2517,9 +2517,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
__ bind(&skip); __ bind(&skip);
// Reset the masking register. This is done independent of the underlying // Reset the masking register. This is done independent of the underlying
// feature flag {FLAG_branch_load_poisoning} to make the snapshot work with // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
// both configurations. It is safe to always do this, because the underlying // with both configurations. It is safe to always do this, because the
// register is caller-saved and can be arbitrarily clobbered. // underlying register is caller-saved and can be arbitrarily clobbered.
__ ResetSpeculationPoisonRegister(); __ ResetSpeculationPoisonRegister();
// Compute the handler entry address and jump to it. // Compute the handler entry address and jump to it.
......
...@@ -911,13 +911,14 @@ PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl( ...@@ -911,13 +911,14 @@ PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl(
compilation_info()->MarkAsAccessorInliningEnabled(); compilation_info()->MarkAsAccessorInliningEnabled();
} }
// Compute and set poisoning level. // This is the bottleneck for computing and setting poisoning level in the
// optimizing compiler.
PoisoningMitigationLevel load_poisoning = PoisoningMitigationLevel load_poisoning =
PoisoningMitigationLevel::kDontPoison; PoisoningMitigationLevel::kDontPoison;
if (FLAG_branch_load_poisoning) { if (FLAG_untrusted_code_mitigations) {
// For partial mitigations, this can be changed to
// PoisoningMitigationLevel::kPoisonCriticalOnly.
load_poisoning = PoisoningMitigationLevel::kPoisonAll; load_poisoning = PoisoningMitigationLevel::kPoisonAll;
} else if (FLAG_untrusted_code_mitigations) {
load_poisoning = PoisoningMitigationLevel::kPoisonCriticalOnly;
} }
compilation_info()->SetPoisoningMitigationLevel(load_poisoning); compilation_info()->SetPoisoningMitigationLevel(load_poisoning);
...@@ -2458,7 +2459,6 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) { ...@@ -2458,7 +2459,6 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) {
// it cooperates with restricted allocatable registers above. // it cooperates with restricted allocatable registers above.
static_assert(kRootRegister == kSpeculationPoisonRegister, static_assert(kRootRegister == kSpeculationPoisonRegister,
"The following checks assume root equals poison register"); "The following checks assume root equals poison register");
CHECK_IMPLIES(FLAG_embedded_builtins, !FLAG_branch_load_poisoning);
CHECK_IMPLIES(FLAG_embedded_builtins, !FLAG_untrusted_code_mitigations); CHECK_IMPLIES(FLAG_embedded_builtins, !FLAG_untrusted_code_mitigations);
AllocateRegisters(RegisterConfiguration::PreserveRootIA32(), AllocateRegisters(RegisterConfiguration::PreserveRootIA32(),
call_descriptor, run_verifier); call_descriptor, run_verifier);
......
...@@ -79,7 +79,7 @@ std::ostream& operator<<(std::ostream& os, FieldAccess const& access) { ...@@ -79,7 +79,7 @@ std::ostream& operator<<(std::ostream& os, FieldAccess const& access) {
#endif #endif
os << access.type << ", " << access.machine_type << ", " os << access.type << ", " << access.machine_type << ", "
<< access.write_barrier_kind; << access.write_barrier_kind;
if (FLAG_untrusted_code_mitigations || FLAG_branch_load_poisoning) { if (FLAG_untrusted_code_mitigations) {
os << ", " << access.load_sensitivity; os << ", " << access.load_sensitivity;
} }
os << "]"; os << "]";
...@@ -118,7 +118,7 @@ std::ostream& operator<<(std::ostream& os, ElementAccess const& access) { ...@@ -118,7 +118,7 @@ std::ostream& operator<<(std::ostream& os, ElementAccess const& access) {
os << access.base_is_tagged << ", " << access.header_size << ", " os << access.base_is_tagged << ", " << access.header_size << ", "
<< access.type << ", " << access.machine_type << ", " << access.type << ", " << access.machine_type << ", "
<< access.write_barrier_kind; << access.write_barrier_kind;
if (FLAG_untrusted_code_mitigations || FLAG_branch_load_poisoning) { if (FLAG_untrusted_code_mitigations) {
os << ", " << access.load_sensitivity; os << ", " << access.load_sensitivity;
} }
return os; return os;
......
...@@ -524,9 +524,6 @@ DEFINE_BOOL(untrusted_code_mitigations, V8_DEFAULT_UNTRUSTED_CODE_MITIGATIONS, ...@@ -524,9 +524,6 @@ DEFINE_BOOL(untrusted_code_mitigations, V8_DEFAULT_UNTRUSTED_CODE_MITIGATIONS,
"Enable mitigations for executing untrusted code") "Enable mitigations for executing untrusted code")
#undef V8_DEFAULT_UNTRUSTED_CODE_MITIGATIONS #undef V8_DEFAULT_UNTRUSTED_CODE_MITIGATIONS
DEFINE_BOOL(branch_load_poisoning, false, "Mask loads with branch conditions.")
DEFINE_IMPLICATION(untrusted_code_mitigations, branch_load_poisoning)
// Flags to help platform porters // Flags to help platform porters
DEFINE_BOOL(minimal, false, DEFINE_BOOL(minimal, false,
"simplifies execution model to make porting " "simplifies execution model to make porting "
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment