Commit c9f69db9 authored by Nico Hartmann's avatar Nico Hartmann Committed by V8 LUCI CQ

[turbofan] No speculative BigInt operations on 32 bit architectures

Bug: chromium:1254191, v8:9407
Change-Id: Ieb22063dad1ea8dfde359662d0330e689b6b2e05
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3193547Reviewed-by: 's avatarMaya Lekova <mslekova@chromium.org>
Commit-Queue: Nico Hartmann <nicohartmann@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77177}
parent 420228e4
...@@ -153,6 +153,7 @@ class JSSpeculativeBinopBuilder final { ...@@ -153,6 +153,7 @@ class JSSpeculativeBinopBuilder final {
} }
const Operator* SpeculativeBigIntOp(BigIntOperationHint hint) { const Operator* SpeculativeBigIntOp(BigIntOperationHint hint) {
DCHECK(jsgraph()->machine()->Is64());
switch (op_->opcode()) { switch (op_->opcode()) {
case IrOpcode::kJSAdd: case IrOpcode::kJSAdd:
return simplified()->SpeculativeBigIntAdd(hint); return simplified()->SpeculativeBigIntAdd(hint);
...@@ -206,6 +207,7 @@ class JSSpeculativeBinopBuilder final { ...@@ -206,6 +207,7 @@ class JSSpeculativeBinopBuilder final {
} }
Node* TryBuildBigIntBinop() { Node* TryBuildBigIntBinop() {
DCHECK(jsgraph()->machine()->Is64());
BigIntOperationHint hint; BigIntOperationHint hint;
if (GetBinaryBigIntOperationHint(&hint)) { if (GetBinaryBigIntOperationHint(&hint)) {
const Operator* op = SpeculativeBigIntOp(hint); const Operator* op = SpeculativeBigIntOp(hint);
...@@ -321,12 +323,15 @@ JSTypeHintLowering::LoweringResult JSTypeHintLowering::ReduceUnaryOperation( ...@@ -321,12 +323,15 @@ JSTypeHintLowering::LoweringResult JSTypeHintLowering::ReduceUnaryOperation(
jsgraph()->SmiConstant(-1), effect, control, slot); jsgraph()->SmiConstant(-1), effect, control, slot);
node = b.TryBuildNumberBinop(); node = b.TryBuildNumberBinop();
if (!node) { if (!node) {
if (jsgraph()->machine()->Is64()) {
if (GetBinaryOperationHint(slot) == BinaryOperationHint::kBigInt) { if (GetBinaryOperationHint(slot) == BinaryOperationHint::kBigInt) {
const Operator* op = jsgraph()->simplified()->SpeculativeBigIntNegate( const Operator* op =
jsgraph()->simplified()->SpeculativeBigIntNegate(
BigIntOperationHint::kBigInt); BigIntOperationHint::kBigInt);
node = jsgraph()->graph()->NewNode(op, operand, effect, control); node = jsgraph()->graph()->NewNode(op, operand, effect, control);
} }
} }
}
break; break;
} }
default: default:
...@@ -403,10 +408,12 @@ JSTypeHintLowering::LoweringResult JSTypeHintLowering::ReduceBinaryOperation( ...@@ -403,10 +408,12 @@ JSTypeHintLowering::LoweringResult JSTypeHintLowering::ReduceBinaryOperation(
} }
if (op->opcode() == IrOpcode::kJSAdd || if (op->opcode() == IrOpcode::kJSAdd ||
op->opcode() == IrOpcode::kJSSubtract) { op->opcode() == IrOpcode::kJSSubtract) {
if (jsgraph()->machine()->Is64()) {
if (Node* node = b.TryBuildBigIntBinop()) { if (Node* node = b.TryBuildBigIntBinop()) {
return LoweringResult::SideEffectFree(node, node, control); return LoweringResult::SideEffectFree(node, node, control);
} }
} }
}
break; break;
} }
default: default:
......
...@@ -24,7 +24,9 @@ assertEquals(testAdd(6n, 2n), 8n); ...@@ -24,7 +24,9 @@ assertEquals(testAdd(6n, 2n), 8n);
assertOptimized(testAdd); assertOptimized(testAdd);
assertThrows(() => testAdd(big, big), RangeError); assertThrows(() => testAdd(big, big), RangeError);
assertUnoptimized(testAdd); if (%Is64Bit()) {
assertUnoptimized(testAdd);
}
testAdd(30n, -50n); testAdd(30n, -50n);
testAdd(23n, 5n); testAdd(23n, 5n);
......
...@@ -26,4 +26,6 @@ assertEquals(17n, f(2n)); ...@@ -26,4 +26,6 @@ assertEquals(17n, f(2n));
assertEquals(16n, f(1n)); assertEquals(16n, f(1n));
assertOptimized(f); assertOptimized(f);
assertEquals(15n, f(0)); assertEquals(15n, f(0));
assertUnoptimized(f); if (%Is64Bit()) {
assertUnoptimized(f);
}
...@@ -25,7 +25,9 @@ assertEquals(foo(1), 0); ...@@ -25,7 +25,9 @@ assertEquals(foo(1), 0);
assertOptimized(foo); assertOptimized(foo);
%PrepareFunctionForOptimization(foo); %PrepareFunctionForOptimization(foo);
assertEquals(foo(2), 1); assertEquals(foo(2), 1);
assertUnoptimized(foo); if (%Is64Bit()) {
assertUnoptimized(foo);
}
// Check that we learned something and do not loop deoptimizations. // Check that we learned something and do not loop deoptimizations.
%OptimizeFunctionOnNextCall(foo); %OptimizeFunctionOnNextCall(foo);
assertEquals(foo(1), 0); assertEquals(foo(1), 0);
......
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --opt
function f(a) {
let x = -1n;
if (!a) {
x = a;
}
x|0;
}
%PrepareFunctionForOptimization(f);
f(false);
%OptimizeFunctionOnNextCall(f);
assertThrows(() => f(true), TypeError);
...@@ -14,7 +14,9 @@ assertEquals(-1n, foo(1n, 2n)); ...@@ -14,7 +14,9 @@ assertEquals(-1n, foo(1n, 2n));
assertEquals(1n, foo(2n, 1n)); assertEquals(1n, foo(2n, 1n));
assertOptimized(foo); assertOptimized(foo);
assertThrows(() => foo(2n, undefined)); assertThrows(() => foo(2n, undefined));
assertUnoptimized(foo); if (%Is64Bit()) {
assertUnoptimized(foo);
}
%PrepareFunctionForOptimization(foo); %PrepareFunctionForOptimization(foo);
%OptimizeFunctionOnNextCall(foo); %OptimizeFunctionOnNextCall(foo);
assertEquals(-1n, foo(1n, 2n)); assertEquals(-1n, foo(1n, 2n));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment