Commit 37d325a3 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Add flag for untrusted code mitigations

Add a flag which controls our mitigations for executing untrusted code,
in particular mitigations against speculative side channel attacks.
The new flag (--untrusted-code-mitigations) is enabled by default, and
currently only controls the mitigations we do in wasm.
In the future, it should also replace the --extra-masking flag
controlling our javascript mitigations.

R=bradnelson@chromium.org, bmeurer@chromium.org
CC=​hablich@chromium.org

Bug: chromium:798964
Change-Id: I94a008fdb95b6bce07d547a8b81ec9f23f392496
Reviewed-on: https://chromium-review.googlesource.com/850212Reviewed-by: 's avatarBrad Nelson <bradnelson@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#50370}
parent 3828ce0c
......@@ -85,6 +85,7 @@ WasmGraphBuilder::WasmGraphBuilder(
function_table_sizes_(zone),
cur_buffer_(def_buffer_),
cur_bufsize_(kDefaultBufferSize),
untrusted_code_mitigations_(FLAG_untrusted_code_mitigations),
runtime_exception_support_(exception_support),
sig_(sig),
source_position_table_(source_position_table) {
......@@ -3269,14 +3270,19 @@ void WasmGraphBuilder::InitContextCache(WasmContextCacheNodes* context_cache) {
*effect_ = mem_size;
context_cache->mem_size = mem_size;
// Load the memory mask.
Node* mem_mask = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::Uint32()), wasm_context_,
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, mem_mask))),
*effect_, *control_);
*effect_ = mem_mask;
context_cache->mem_mask = mem_mask;
if (untrusted_code_mitigations_) {
// Load the memory mask.
Node* mem_mask = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::Uint32()), wasm_context_,
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, mem_mask))),
*effect_, *control_);
*effect_ = mem_mask;
context_cache->mem_mask = mem_mask;
} else {
// Explicitly set to nullptr to ensure a SEGV when we try to use it.
context_cache->mem_mask = nullptr;
}
}
void WasmGraphBuilder::PrepareContextCacheForLoop(
......@@ -3286,7 +3292,9 @@ void WasmGraphBuilder::PrepareContextCacheForLoop(
INTRODUCE_PHI(mem_start, MachineType::PointerRepresentation());
INTRODUCE_PHI(mem_size, MachineRepresentation::kWord32);
INTRODUCE_PHI(mem_mask, MachineRepresentation::kWord32);
if (untrusted_code_mitigations_) {
INTRODUCE_PHI(mem_mask, MachineRepresentation::kWord32);
}
#undef INTRODUCE_PHI
}
......@@ -3302,7 +3310,9 @@ void WasmGraphBuilder::NewContextCacheMerge(WasmContextCacheNodes* to,
INTRODUCE_PHI(mem_start, MachineType::PointerRepresentation());
INTRODUCE_PHI(mem_size, MachineRepresentation::kWord32);
INTRODUCE_PHI(mem_mask, MachineRepresentation::kWord32);
if (untrusted_code_mitigations_) {
INTRODUCE_PHI(mem_mask, MachineRepresentation::kWord32);
}
#undef INTRODUCE_PHI
}
......@@ -3314,8 +3324,10 @@ void WasmGraphBuilder::MergeContextCacheInto(WasmContextCacheNodes* to,
to->mem_size, from->mem_size);
to->mem_start = CreateOrMergeIntoPhi(MachineType::PointerRepresentation(),
merge, to->mem_start, from->mem_start);
to->mem_mask = CreateOrMergeIntoPhi(MachineRepresentation::kWord32, merge,
to->mem_mask, from->mem_mask);
if (untrusted_code_mitigations_) {
to->mem_mask = CreateOrMergeIntoPhi(MachineRepresentation::kWord32, merge,
to->mem_mask, from->mem_mask);
}
}
Node* WasmGraphBuilder::CreateOrMergeIntoPhi(wasm::ValueType type, Node* merge,
......@@ -3524,9 +3536,7 @@ Node* WasmGraphBuilder::BoundsCheckMem(uint8_t access_size, Node* index,
if (FLAG_wasm_no_bounds_checks) return Uint32ToUintptr(index);
DCHECK_NOT_NULL(context_cache_);
Node* mem_size = context_cache_->mem_size;
Node* mem_mask = context_cache_->mem_mask;
DCHECK_NOT_NULL(mem_size);
DCHECK_NOT_NULL(mem_mask);
auto m = jsgraph()->machine();
if (use_trap_handler() && enforce_check == kCanOmitBoundsCheck) {
......@@ -3595,9 +3605,13 @@ Node* WasmGraphBuilder::BoundsCheckMem(uint8_t access_size, Node* index,
Node* cond = graph()->NewNode(m->Uint32LessThan(), index, effective_size);
TrapIfFalse(wasm::kTrapMemOutOfBounds, cond, position);
// In the fallthrough case, condition the index with the memory mask.
Node* masked_index = graph()->NewNode(m->Word32And(), index, mem_mask);
return Uint32ToUintptr(masked_index);
if (untrusted_code_mitigations_) {
// In the fallthrough case, condition the index with the memory mask.
Node* mem_mask = context_cache_->mem_mask;
DCHECK_NOT_NULL(mem_mask);
index = graph()->NewNode(m->Word32And(), index, mem_mask);
}
return Uint32ToUintptr(index);
}
const Operator* WasmGraphBuilder::GetSafeLoadOperator(int offset,
......@@ -3766,10 +3780,8 @@ Node* WasmGraphBuilder::BuildAsmjsLoadMem(MachineType type, Node* index) {
DCHECK_NOT_NULL(context_cache_);
Node* mem_start = context_cache_->mem_start;
Node* mem_size = context_cache_->mem_size;
Node* mem_mask = context_cache_->mem_mask;
DCHECK_NOT_NULL(mem_start);
DCHECK_NOT_NULL(mem_size);
DCHECK_NOT_NULL(mem_mask);
// Asm.js semantics are defined along the lines of typed arrays, hence OOB
// reads return {undefined} coerced to the result type (0 for integers, NaN
......@@ -3783,8 +3795,13 @@ Node* WasmGraphBuilder::BuildAsmjsLoadMem(MachineType type, Node* index) {
BranchHint::kTrue);
bounds_check.Chain(*control_);
// Condition the index with the memory mask.
index = graph()->NewNode(jsgraph()->machine()->Word32And(), index, mem_mask);
if (untrusted_code_mitigations_) {
// Condition the index with the memory mask.
Node* mem_mask = context_cache_->mem_mask;
DCHECK_NOT_NULL(mem_mask);
index =
graph()->NewNode(jsgraph()->machine()->Word32And(), index, mem_mask);
}
index = Uint32ToUintptr(index);
Node* load = graph()->NewNode(jsgraph()->machine()->Load(type), mem_start,
......@@ -3809,10 +3826,8 @@ Node* WasmGraphBuilder::BuildAsmjsStoreMem(MachineType type, Node* index,
DCHECK_NOT_NULL(context_cache_);
Node* mem_start = context_cache_->mem_start;
Node* mem_size = context_cache_->mem_size;
Node* mem_mask = context_cache_->mem_mask;
DCHECK_NOT_NULL(mem_start);
DCHECK_NOT_NULL(mem_size);
DCHECK_NOT_NULL(mem_mask);
// Asm.js semantics are to ignore OOB writes.
// Note that we check against the memory size ignoring the size of the
......@@ -3824,8 +3839,13 @@ Node* WasmGraphBuilder::BuildAsmjsStoreMem(MachineType type, Node* index,
BranchHint::kTrue);
bounds_check.Chain(*control_);
// Condition the index with the memory mask.
index = graph()->NewNode(jsgraph()->machine()->Word32And(), index, mem_mask);
if (untrusted_code_mitigations_) {
// Condition the index with the memory mask.
Node* mem_mask = context_cache_->mem_mask;
DCHECK_NOT_NULL(mem_mask);
index =
graph()->NewNode(jsgraph()->machine()->Word32And(), index, mem_mask);
}
index = Uint32ToUintptr(index);
const Operator* store_op = jsgraph()->machine()->Store(StoreRepresentation(
......
......@@ -444,6 +444,7 @@ class WasmGraphBuilder {
Node* def_buffer_[kDefaultBufferSize];
bool has_simd_ = false;
bool needs_stack_check_ = false;
bool untrusted_code_mitigations_ = true;
// If the runtime doesn't support exception propagation,
// we won't generate stack checks, and trap handling will also
// be generated differently.
......
......@@ -465,10 +465,13 @@ DEFINE_BOOL(turbo_store_elimination, true,
DEFINE_BOOL(trace_store_elimination, false, "trace store elimination")
DEFINE_BOOL(turbo_rewrite_far_jumps, true,
"rewrite far to near jumps (ia32,x64)")
// TODO(clemensh): Remove extra_masking, use untrusted_code_mitigations instead.
DEFINE_BOOL(extra_masking, false, "Extra mask for memory accesses")
DEFINE_BOOL(mask_array_index, false, "Mask array index with bound")
DEFINE_IMPLICATION(future, extra_masking)
DEFINE_IMPLICATION(extra_masking, mask_array_index)
DEFINE_BOOL(untrusted_code_mitigations, true,
"Enable mitigations for executing untrusted code")
// Flags to help platform porters
DEFINE_BOOL(minimal, false,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment