Commit 1d08ecaf authored by Daniel Clifford's avatar Daniel Clifford Committed by Commit Bot

[torque] Simplify and cleanup Cast and UnsafeCast

Change-Id: I57e21c5bc754ca07f52032f85ec8aeff96448dd0
Reviewed-on: https://chromium-review.googlesource.com/c/1342929
Commit-Queue: Daniel Clifford <danno@chromium.org>
Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#57855}
parent 524ab4e1
......@@ -115,7 +115,7 @@ namespace array {
const a: JSArray = Cast<JSArray>(o) otherwise Slow;
const map: Map = a.map;
if (!IsPrototypeInitialArrayPrototype(context, map)) goto Slow;
if (!IsPrototypeInitialArrayPrototype(map)) goto Slow;
const elementsKind: ElementsKind = map.elements_kind;
if (!IsFastElementsKind(elementsKind)) goto Slow;
......
......@@ -323,7 +323,7 @@ namespace array {
try {
const array: JSArray = Cast<JSArray>(receiver) otherwise IfSlowPath;
if (array.length != lenNumber) goto IfSlowPath;
if (!IsPrototypeInitialArrayPrototype(context, map)) goto IfSlowPath;
if (!IsPrototypeInitialArrayPrototype(map)) goto IfSlowPath;
if (IsNoElementsProtectorCellInvalid()) goto IfSlowPath;
if (IsElementsKindLessThanOrEqual(kind, HOLEY_ELEMENTS)) {
......
......@@ -7,7 +7,8 @@ namespace array {
elements: FixedArrayBase, index: Smi): Object
labels IfHole;
LoadWithHoleCheck<FixedArray>(elements: FixedArrayBase, index: Smi): Object
LoadWithHoleCheck<FixedArray>(implicit context: Context)(
elements: FixedArrayBase, index: Smi): Object
labels IfHole {
const elements: FixedArray = UnsafeCast<FixedArray>(elements);
const element: Object = elements[index];
......@@ -15,8 +16,8 @@ namespace array {
return element;
}
LoadWithHoleCheck<FixedDoubleArray>(elements: FixedArrayBase, index: Smi):
Object
LoadWithHoleCheck<FixedDoubleArray>(implicit context: Context)(
elements: FixedArrayBase, index: Smi): Object
labels IfHole {
const elements: FixedDoubleArray = UnsafeCast<FixedDoubleArray>(elements);
const element: float64 = LoadDoubleWithHoleCheck(elements, index)
......@@ -44,7 +45,7 @@ namespace array {
const same: Boolean = StrictEqual(searchElement, element);
if (same == True) {
assert(IsFastJSArray(array, context));
assert(Is<FastJSArray>(array));
return k;
}
}
......@@ -53,7 +54,7 @@ namespace array {
--k;
}
assert(IsFastJSArray(array, context));
assert(Is<FastJSArray>(array));
return -1;
}
......
......@@ -6,19 +6,19 @@ namespace array {
macro LoadElement<ElementsAccessor: type, T: type>(
elements: FixedArrayBase, index: Smi): T;
LoadElement<FastPackedSmiElements, Smi>(elements: FixedArrayBase, index: Smi):
Smi {
LoadElement<FastPackedSmiElements, Smi>(implicit context: Context)(
elements: FixedArrayBase, index: Smi): Smi {
const elems: FixedArray = UnsafeCast<FixedArray>(elements);
return UnsafeCast<Smi>(elems[index]);
}
LoadElement<FastPackedObjectElements, Object>(
LoadElement<FastPackedObjectElements, Object>(implicit context: Context)(
elements: FixedArrayBase, index: Smi): Object {
const elems: FixedArray = UnsafeCast<FixedArray>(elements);
return elems[index];
}
LoadElement<FastPackedDoubleElements, float64>(
LoadElement<FastPackedDoubleElements, float64>(implicit context: Context)(
elements: FixedArrayBase, index: Smi): float64 {
try {
const elems: FixedDoubleArray = UnsafeCast<FixedDoubleArray>(elements);
......@@ -32,21 +32,22 @@ namespace array {
}
macro StoreElement<ElementsAccessor: type, T: type>(
elements: FixedArrayBase, index: Smi, value: T);
implicit context:
Context)(elements: FixedArrayBase, index: Smi, value: T);
StoreElement<FastPackedSmiElements, Smi>(
StoreElement<FastPackedSmiElements, Smi>(implicit context: Context)(
elements: FixedArrayBase, index: Smi, value: Smi) {
const elems: FixedArray = UnsafeCast<FixedArray>(elements);
StoreFixedArrayElementSmi(elems, index, value, SKIP_WRITE_BARRIER);
}
StoreElement<FastPackedObjectElements, Object>(
StoreElement<FastPackedObjectElements, Object>(implicit context: Context)(
elements: FixedArrayBase, index: Smi, value: Object) {
const elems: FixedArray = UnsafeCast<FixedArray>(elements);
elems[index] = value;
}
StoreElement<FastPackedDoubleElements, float64>(
StoreElement<FastPackedDoubleElements, float64>(implicit context: Context)(
elements: FixedArrayBase, index: Smi, value: float64) {
const elems: FixedDoubleArray = UnsafeCast<FixedDoubleArray>(elements);
StoreFixedDoubleArrayElementWithSmiIndex(elems, index, value);
......@@ -56,7 +57,7 @@ namespace array {
// whether a property is present, so we can simply swap them using fast
// FixedArray loads/stores.
macro FastPackedArrayReverse<Accessor: type, T: type>(
elements: FixedArrayBase, length: Smi) {
implicit context: Context)(elements: FixedArrayBase, length: Smi) {
let lower: Smi = 0;
let upper: Smi = length - 1;
......
......@@ -11,14 +11,14 @@ namespace array {
elements: FixedArrayBase, first: Smi, count: Smi,
capacity: Smi): FixedArrayType;
Extract<FixedArray>(
Extract<FixedArray>(implicit context: Context)(
elements: FixedArrayBase, first: Smi, count: Smi,
capacity: Smi): FixedArray {
return UnsafeCast<FixedArray>(
ExtractFixedArray(elements, first, count, capacity));
}
Extract<FixedDoubleArray>(
Extract<FixedDoubleArray>(implicit context: Context)(
elements: FixedArrayBase, first: Smi, count: Smi,
capacity: Smi): FixedDoubleArray {
if (elements == kEmptyFixedArray) {
......@@ -51,7 +51,8 @@ namespace array {
Convert<intptr>(srcIndex), Convert<intptr>(count));
}
macro FastSplice<FixedArrayType: type, ElementType: type>(
macro FastSplice<FixedArrayType: type, ElementType: type>(implicit context:
Context)(
args: constexpr Arguments, a: JSArray, length: Smi, newLength: Smi,
lengthDelta: Smi, actualStart: Smi, insertCount: Smi,
actualDeleteCount: Smi): void labels Bailout {
......@@ -121,7 +122,7 @@ namespace array {
const a: JSArray = Cast<JSArray>(o) otherwise Bailout;
const map: Map = a.map;
if (!IsPrototypeInitialArrayPrototype(context, map)) goto Bailout;
if (!IsPrototypeInitialArrayPrototype(map)) goto Bailout;
if (IsNoElementsProtectorCellInvalid()) goto Bailout;
if (IsArraySpeciesProtectorCellInvalid()) goto Bailout;
......
......@@ -14,7 +14,7 @@ namespace array {
type FastDoubleElements;
type DictionaryElements;
macro EnsureWriteableFastElements(array: JSArray) {
macro EnsureWriteableFastElements(implicit context: Context)(array: JSArray) {
assert(IsFastElementsKind(array.map.elements_kind));
const elements: FixedArrayBase = array.elements;
......@@ -30,7 +30,7 @@ namespace array {
assert(array.elements.map != kCOWMap);
}
macro IsJSArray(o: Object): bool {
macro IsJSArray(implicit context: Context)(o: Object): bool {
try {
const array: JSArray = Cast<JSArray>(o) otherwise NotArray;
return true;
......
This diff is collapsed.
......@@ -148,7 +148,7 @@ Node* ArrayBuiltinsAssembler::FindProcessor(Node* k_value, Node* k) {
Label fast(this);
Label runtime(this);
Label object_push_pre(this), object_push(this), double_push(this);
BranchIfFastJSArray(a(), context(), &fast, &runtime);
BranchIfFastJSArray(CAST(a()), context(), &fast, &runtime);
BIND(&fast);
{
......
......@@ -21,7 +21,7 @@ namespace object {
// Bail out if ToPropertyKey will attempt to load and call
// Symbol.toPrimitive, toString, and valueOf, which could
// invalidate assumptions about the iterable.
if (IsJSReceiver(pair.key)) goto IfSlow;
if (Is<JSReceiver>(pair.key)) goto IfSlow;
CreateDataProperty(result, pair.key, pair.value);
}
return result;
......
......@@ -16,6 +16,18 @@ namespace typed_array {
type LoadFn = builtin(Context, JSTypedArray, Smi) => Object;
type StoreFn = builtin(Context, JSTypedArray, Smi, Object) => Object;
// These UnsafeCast specializations are necessary becuase there is no
// way to definitively test whether an Object is a Torque function
// with a specific signature, and the default UnsafeCast implementation
// would try to check this through an assert(Is<>), so the test
// is bypassed in this specialization.
UnsafeCast<LoadFn>(implicit context: Context)(o: Object): LoadFn {
return %RawCast<LoadFn>(o);
}
UnsafeCast<StoreFn>(implicit context: Context)(o: Object): StoreFn {
return %RawCast<StoreFn>(o);
}
macro KindForArrayType<T: type>(): constexpr ElementsKind;
KindForArrayType<FixedUint8Array>(): constexpr ElementsKind {
return UINT8_ELEMENTS;
......
......@@ -1037,90 +1037,6 @@ void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
Branch(IsJSReceiver(object), if_true, if_false);
}
TNode<BoolT> CodeStubAssembler::IsFastJSArray(SloppyTNode<Object> object,
SloppyTNode<Context> context) {
Label if_true(this), if_false(this, Label::kDeferred), exit(this);
BranchIfFastJSArray(object, context, &if_true, &if_false);
TVARIABLE(BoolT, var_result);
BIND(&if_true);
{
var_result = Int32TrueConstant();
Goto(&exit);
}
BIND(&if_false);
{
var_result = Int32FalseConstant();
Goto(&exit);
}
BIND(&exit);
return var_result.value();
}
void CodeStubAssembler::BranchIfFastJSArrayWithNoCustomIteration(
TNode<Context> context, TNode<Object> object, Label* if_true,
Label* if_false) {
Label if_fast(this);
BranchIfFastJSArray(object, context, &if_fast, if_false, true);
BIND(&if_fast);
{
// Check that the Array.prototype hasn't been modified in a way that would
// affect iteration.
Node* protector_cell = LoadRoot(RootIndex::kArrayIteratorProtector);
DCHECK(isolate()->heap()->array_iterator_protector()->IsPropertyCell());
Branch(
WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
SmiConstant(Isolate::kProtectorValid)),
if_true, if_false);
}
}
TNode<BoolT> CodeStubAssembler::IsFastJSArrayWithNoCustomIteration(
TNode<Context> context, TNode<Object> object) {
Label if_false(this, Label::kDeferred), exit(this);
TVARIABLE(BoolT, var_result, Int32TrueConstant());
BranchIfFastJSArrayWithNoCustomIteration(context, object, &exit, &if_false);
BIND(&if_false);
{
var_result = Int32FalseConstant();
Goto(&exit);
}
BIND(&exit);
return var_result.value();
}
void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
Label* if_true, Label* if_false,
bool iteration_only) {
GotoIfForceSlowPath(if_false);
// Bailout if receiver is a Smi.
GotoIf(TaggedIsSmi(object), if_false);
Node* map = LoadMap(object);
GotoIfNot(IsJSArrayMap(map), if_false);
// Bailout if receiver has slow elements.
Node* elements_kind = LoadMapElementsKind(map);
GotoIfNot(IsFastElementsKind(elements_kind), if_false);
// Verify that our prototype is the initial array prototype.
GotoIfNot(IsPrototypeInitialArrayPrototype(context, map), if_false);
if (iteration_only) {
// If we are only iterating over the array, there is no need to check
// the NoElements protector if the array is not holey.
GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true);
}
Branch(IsNoElementsProtectorCellInvalid(), if_false, if_true);
}
void CodeStubAssembler::BranchIfFastJSArrayForCopy(Node* object, Node* context,
Label* if_true,
Label* if_false) {
GotoIf(IsArraySpeciesProtectorCellInvalid(), if_false);
BranchIfFastJSArray(object, context, if_true, if_false);
}
void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
#ifdef V8_ENABLE_FORCE_SLOW_PATH
Node* const force_slow_path_addr =
......@@ -5958,6 +5874,13 @@ TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
return WordEqual(cell_value, invalid);
}
TNode<BoolT> CodeStubAssembler::IsArrayIteratorProtectorCellInvalid() {
Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
Node* cell = LoadRoot(RootIndex::kArrayIteratorProtector);
Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
return WordEqual(cell_value, invalid);
}
TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
Node* cell = LoadRoot(RootIndex::kPromiseResolveProtector);
......
......@@ -326,105 +326,6 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
return CAST(heap_object);
}
TNode<HeapNumber> UnsafeCastNumberToHeapNumber(TNode<Number> p_n) {
return CAST(p_n);
}
TNode<FixedArrayBase> UnsafeCastObjectToFixedArrayBase(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<FixedArray> UnsafeCastObjectToFixedArray(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<Context> UnsafeCastObjectToContext(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<FixedDoubleArray> UnsafeCastObjectToFixedDoubleArray(
TNode<Object> p_o) {
return CAST(p_o);
}
TNode<HeapNumber> UnsafeCastObjectToHeapNumber(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<HeapObject> UnsafeCastObjectToCallable(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<Smi> UnsafeCastObjectToSmi(TNode<Object> p_o) { return CAST(p_o); }
TNode<Number> UnsafeCastObjectToNumber(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<HeapObject> UnsafeCastObjectToHeapObject(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<JSArray> UnsafeCastObjectToJSArray(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<FixedTypedArrayBase> UnsafeCastObjectToFixedTypedArrayBase(
TNode<Object> p_o) {
return CAST(p_o);
}
TNode<Object> UnsafeCastObjectToCompareBuiltinFn(TNode<Object> p_o) {
return p_o;
}
TNode<Object> UnsafeCastObjectToLoadFn(TNode<Object> p_o) { return p_o; }
TNode<Object> UnsafeCastObjectToStoreFn(TNode<Object> p_o) { return p_o; }
TNode<Object> UnsafeCastObjectToCanUseSameAccessorFn(TNode<Object> p_o) {
return p_o;
}
TNode<NumberDictionary> UnsafeCastObjectToNumberDictionary(
TNode<Object> p_o) {
return CAST(p_o);
}
TNode<JSReceiver> UnsafeCastObjectToJSReceiver(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<JSObject> UnsafeCastObjectToJSObject(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<Map> UnsafeCastObjectToMap(TNode<Object> p_o) { return CAST(p_o); }
TNode<String> UnsafeCastObjectToString(TNode<Object> p_o) {
return CAST(p_o);
}
TNode<JSArgumentsObjectWithLength> RawCastObjectToJSArgumentsObjectWithLength(
TNode<Object> p_o) {
return TNode<JSArgumentsObjectWithLength>::UncheckedCast(p_o);
}
TNode<JSArray> RawCastObjectToFastJSArray(TNode<Object> p_o) {
return TNode<JSArray>::UncheckedCast(p_o);
}
TNode<JSArray> RawCastObjectToFastJSArrayForCopy(TNode<Object> p_o) {
return TNode<JSArray>::UncheckedCast(p_o);
}
TNode<JSArray> RawCastObjectToFastJSArrayWithNoCustomIteration(
TNode<Object> p_o) {
return TNode<JSArray>::UncheckedCast(p_o);
}
TNode<JSFunction> RawCastObjectToJSFunction(TNode<Object> p_o) {
return TNode<JSFunction>::UncheckedCast(p_o);
}
Node* MatchesParameterMode(Node* value, ParameterMode mode);
#define PARAMETER_BINOP(OpName, IntPtrOpName, SmiOpName) \
......@@ -804,19 +705,6 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
void BranchIfJSReceiver(Node* object, Label* if_true, Label* if_false);
void BranchIfFastJSArray(Node* object, Node* context, Label* if_true,
Label* if_false, bool iteration_only = false);
void BranchIfNotFastJSArray(Node* object, Node* context, Label* if_true,
Label* if_false) {
BranchIfFastJSArray(object, context, if_false, if_true);
}
void BranchIfFastJSArrayForCopy(Node* object, Node* context, Label* if_true,
Label* if_false);
void BranchIfFastJSArrayWithNoCustomIteration(TNode<Context> context,
TNode<Object> object,
Label* if_true,
Label* if_false);
// Branches to {if_true} when --force-slow-path flag has been passed.
// It's used for testing to ensure that slow path implementation behave
// equivalent to corresponding fast paths (where applicable).
......@@ -2022,6 +1910,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<BoolT> IsAllocationSite(SloppyTNode<HeapObject> object);
TNode<BoolT> IsAnyHeapNumber(SloppyTNode<HeapObject> object);
TNode<BoolT> IsNoElementsProtectorCellInvalid();
TNode<BoolT> IsArrayIteratorProtectorCellInvalid();
TNode<BoolT> IsBigIntInstanceType(SloppyTNode<Int32T> instance_type);
TNode<BoolT> IsBigInt(SloppyTNode<HeapObject> object);
TNode<BoolT> IsBoolean(SloppyTNode<HeapObject> object);
......@@ -2039,10 +1928,6 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<BoolT> IsExtensibleMap(SloppyTNode<Map> map);
TNode<BoolT> IsExtensibleNonPrototypeMap(TNode<Map> map);
TNode<BoolT> IsExternalStringInstanceType(SloppyTNode<Int32T> instance_type);
TNode<BoolT> IsFastJSArray(SloppyTNode<Object> object,
SloppyTNode<Context> context);
TNode<BoolT> IsFastJSArrayWithNoCustomIteration(TNode<Context> context,
TNode<Object> object);
TNode<BoolT> IsFeedbackCell(SloppyTNode<HeapObject> object);
TNode<BoolT> IsFeedbackVector(SloppyTNode<HeapObject> object);
TNode<BoolT> IsContext(SloppyTNode<HeapObject> object);
......
......@@ -244,11 +244,15 @@ TEST(TestForLoop) {
}
TEST(TestTypeswitch) {
Isolate* isolate(CcTest::InitIsolateOnce());
CcTest::InitializeVM();
Isolate* isolate(CcTest::i_isolate());
i::HandleScope scope(isolate);
Handle<Context> context =
Utils::OpenHandle(*v8::Isolate::GetCurrent()->GetCurrentContext());
CodeAssemblerTester asm_tester(isolate, 0);
TestTorqueAssembler m(asm_tester.state());
{
m.TestTypeswitch();
m.TestTypeswitch(m.UncheckedCast<Context>(m.HeapConstant(context)));
m.Return(m.UndefinedConstant());
}
FunctionTester ft(asm_tester.GenerateCode(), 0);
......@@ -256,11 +260,15 @@ TEST(TestTypeswitch) {
}
TEST(TestGenericOverload) {
Isolate* isolate(CcTest::InitIsolateOnce());
CcTest::InitializeVM();
Isolate* isolate(CcTest::i_isolate());
i::HandleScope scope(isolate);
Handle<Context> context =
Utils::OpenHandle(*v8::Isolate::GetCurrent()->GetCurrentContext());
CodeAssemblerTester asm_tester(isolate, 0);
TestTorqueAssembler m(asm_tester.state());
{
m.TestGenericOverload();
m.TestGenericOverload(m.UncheckedCast<Context>(m.HeapConstant(context)));
m.Return(m.UndefinedConstant());
}
FunctionTester ft(asm_tester.GenerateCode(), 0);
......@@ -352,11 +360,15 @@ TEST(TestCatch3) {
}
TEST(TestLookup) {
Isolate* isolate(CcTest::InitIsolateOnce());
CcTest::InitializeVM();
Isolate* isolate(CcTest::i_isolate());
i::HandleScope scope(isolate);
Handle<Context> context =
Utils::OpenHandle(*v8::Isolate::GetCurrent()->GetCurrentContext());
CodeAssemblerTester asm_tester(isolate, 0);
TestTorqueAssembler m(asm_tester.state());
{
m.TestQualifiedAccess();
m.TestQualifiedAccess(m.UncheckedCast<Context>(m.HeapConstant(context)));
m.Return(m.UndefinedConstant());
}
FunctionTester ft(asm_tester.GenerateCode(), 0);
......
......@@ -174,7 +174,7 @@ namespace test {
return x + 2;
}
macro TestFunctionPointers(context: Context): Boolean {
macro TestFunctionPointers(implicit context: Context)(): Boolean {
let fptr: builtin(Context, Smi) => Smi = TestHelperPlus1;
check(fptr(context, 42) == 43);
fptr = TestHelperPlus2;
......@@ -182,7 +182,7 @@ namespace test {
return True;
}
macro TestVariableRedeclaration(context: Context): Boolean {
macro TestVariableRedeclaration(implicit context: Context)(): Boolean {
let var1: int31 = FromConstexpr<bool>(42 == 0) ? 0 : 1;
let var2: int31 = FromConstexpr<bool>(42 == 0) ? 1 : 0;
return True;
......@@ -208,11 +208,11 @@ namespace test {
return x;
}
macro TestUnsafeCast(c: Context, n: Number): Boolean {
macro TestUnsafeCast(implicit context: Context)(n: Number): Boolean {
if (TaggedIsSmi(n)) {
let m: Smi = UnsafeCast<Smi>(n);
check(TestHelperPlus1(c, m) == 11);
check(TestHelperPlus1(context, m) == 11);
return True;
}
return False;
......@@ -223,7 +223,7 @@ namespace test {
check(Convert<intptr>(-0xffff) == -65535);
}
macro TestLargeIntegerLiterals(c: Context) {
macro TestLargeIntegerLiterals(implicit c: Context)() {
let x: int32 = 0x40000000;
let y: int32 = 0x7fffffff;
}
......@@ -278,11 +278,11 @@ namespace test {
return i.i;
}
macro TestStruct2(): TestStructA {
macro TestStruct2(implicit context: Context)(): TestStructA {
return TestStructA{UnsafeCast<FixedArray>(kEmptyFixedArray), 27, 31};
}
macro TestStruct3(): TestStructA {
macro TestStruct3(implicit context: Context)(): TestStructA {
let a: TestStructA =
TestStructA{UnsafeCast<FixedArray>(kEmptyFixedArray), 13, 5};
let b: TestStructA = a;
......@@ -304,7 +304,7 @@ namespace test {
y: TestStructA;
}
macro TestStruct4(): TestStructC {
macro TestStruct4(implicit context: Context)(): TestStructC {
return TestStructC{TestStruct2(), TestStruct2()};
}
......@@ -419,7 +419,9 @@ namespace test {
}
}
macro TypeswitchExample(x: Number | FixedArray): int32 {
type NumberOrFixedArray = Number | FixedArray;
macro TypeswitchExample(implicit context: Context)(x: NumberOrFixedArray):
int32 {
let result: int32 = 0;
typeswitch (IncrementIfSmi(x)) {
case (x: FixedArray): {
......@@ -447,7 +449,7 @@ namespace test {
return result;
}
macro TestTypeswitch() {
macro TestTypeswitch(implicit context: Context)() {
check(TypeswitchExample(FromConstexpr<Smi>(5)) == 26);
const a: FixedArray = AllocateZeroedFixedArray(3);
check(TypeswitchExample(a) == 13);
......@@ -461,7 +463,7 @@ namespace test {
return o + 1;
}
macro TestGenericOverload() {
macro TestGenericOverload(implicit context: Context)() {
const xSmi: Smi = 5;
const xObject: Object = xSmi;
check(ExampleGenericOverload<Smi>(xSmi) == 6);
......@@ -599,12 +601,12 @@ namespace test {
}
}
macro TestQualifiedAccess() {
macro TestQualifiedAccess(implicit context: Context)() {
let s: Smi = 0;
check(!array::IsJSArray(s));
}
macro TestCatch1(context: Context): Smi {
macro TestCatch1(implicit context: Context)(): Smi {
let r: Smi = 0;
try {
ThrowTypeError(context, kInvalidArrayLength);
......@@ -614,29 +616,29 @@ namespace test {
}
}
macro TestCatch2Wrapper(context: Context): never {
macro TestCatch2Wrapper(implicit context: Context)(): never {
ThrowTypeError(context, kInvalidArrayLength);
}
macro TestCatch2(context: Context): Smi {
macro TestCatch2(implicit context: Context)(): Smi {
let r: Smi = 0;
try {
TestCatch2Wrapper(context);
TestCatch2Wrapper();
} catch (e) {
r = 2;
return r;
}
}
macro TestCatch3WrapperWithLabel(context: Context): never
macro TestCatch3WrapperWithLabel(implicit context: Context)(): never
labels Abort {
ThrowTypeError(context, kInvalidArrayLength);
}
macro TestCatch3(context: Context): Smi {
macro TestCatch3(implicit context: Context)(): Smi {
let r: Smi = 0;
try {
TestCatch3WrapperWithLabel(context) otherwise Abort;
TestCatch3WrapperWithLabel() otherwise Abort;
}
label Abort {
return -1;
......
......@@ -257,25 +257,19 @@ namespace array {
return kSuccess;
}
extern macro UnsafeCastObjectToCompareBuiltinFn(Object): CompareBuiltinFn;
UnsafeCast<CompareBuiltinFn>(o: Object): CompareBuiltinFn {
return UnsafeCastObjectToCompareBuiltinFn(o);
UnsafeCast<CompareBuiltinFn>(implicit context: Context)(o: Object):
CompareBuiltinFn {
return %RawCast<CompareBuiltinFn>(o);
}
extern macro UnsafeCastObjectToLoadFn(Object): LoadFn;
UnsafeCast<LoadFn>(o: Object): LoadFn {
return UnsafeCastObjectToLoadFn(o);
UnsafeCast<LoadFn>(implicit context: Context)(o: Object): LoadFn {
return %RawCast<LoadFn>(o);
}
extern macro UnsafeCastObjectToStoreFn(Object): StoreFn;
UnsafeCast<StoreFn>(o: Object): StoreFn {
return UnsafeCastObjectToStoreFn(o);
UnsafeCast<StoreFn>(implicit context: Context)(o: Object): StoreFn {
return %RawCast<StoreFn>(o);
}
extern macro UnsafeCastObjectToCanUseSameAccessorFn(Object):
CanUseSameAccessorFn;
UnsafeCast<CanUseSameAccessorFn>(o: Object): CanUseSameAccessorFn {
return UnsafeCastObjectToCanUseSameAccessorFn(o);
UnsafeCast<CanUseSameAccessorFn>(implicit context: Context)(o: Object):
CanUseSameAccessorFn {
return %RawCast<CanUseSameAccessorFn>(o);
}
builtin SortCompareDefault(
......@@ -375,7 +369,8 @@ namespace array {
// might have occurred. This means we cannot leave any pointer to the elements
// backing store on the stack (since it would point to the filler object).
// TODO(v8:7995): Remove reloading once left-trimming is removed.
macro ReloadElements(sortState: FixedArray): HeapObject {
macro ReloadElements(implicit context: Context)(sortState: FixedArray):
HeapObject {
const receiver: JSReceiver = GetReceiver(sortState);
if (sortState[kAccessorIdx] == kGenericElementsAccessorId) return receiver;
......@@ -383,35 +378,40 @@ namespace array {
return object.elements;
}
macro GetInitialReceiverLength(sortState: FixedArray): Number {
macro GetInitialReceiverLength(implicit context:
Context)(sortState: FixedArray): Number {
return UnsafeCast<Number>(sortState[kInitialReceiverLengthIdx]);
}
macro GetLoadFn(sortState: FixedArray): LoadFn {
macro GetLoadFn(implicit context: Context)(sortState: FixedArray): LoadFn {
return UnsafeCast<LoadFn>(sortState[kLoadFnIdx]);
}
macro GetStoreFn(sortState: FixedArray): StoreFn {
macro GetStoreFn(implicit context: Context)(sortState: FixedArray): StoreFn {
return UnsafeCast<StoreFn>(sortState[kStoreFnIdx]);
}
macro GetCanUseSameAccessorFn(sortState: FixedArray): CanUseSameAccessorFn {
macro GetCanUseSameAccessorFn(implicit context: Context)(
sortState: FixedArray): CanUseSameAccessorFn {
return UnsafeCast<CanUseSameAccessorFn>(
sortState[kCanUseSameAccessorFnIdx]);
}
macro GetReceiver(sortState: FixedArray): JSReceiver {
macro GetReceiver(implicit context: Context)(sortState: FixedArray):
JSReceiver {
return UnsafeCast<JSReceiver>(sortState[kReceiverIdx]);
}
// Returns the temporary array without changing its size.
macro GetTempArray(sortState: FixedArray): FixedArray {
macro GetTempArray(implicit context: Context)(sortState: FixedArray):
FixedArray {
return UnsafeCast<FixedArray>(sortState[kTempArrayIdx]);
}
// Re-loading the stack-size is done in a few places. The small macro allows
// for easier invariant checks at all use sites.
macro GetPendingRunsSize(sortState: FixedArray): Smi {
macro GetPendingRunsSize(implicit context: Context)(sortState: FixedArray):
Smi {
assert(TaggedIsSmi(sortState[kPendingRunsSizeIdx]));
const stackSize: Smi = UnsafeCast<Smi>(sortState[kPendingRunsSizeIdx]);
......@@ -423,7 +423,8 @@ namespace array {
sortState[kPendingRunsSizeIdx] = value;
}
macro GetPendingRunBase(pendingRuns: FixedArray, run: Smi): Smi {
macro GetPendingRunBase(implicit context:
Context)(pendingRuns: FixedArray, run: Smi): Smi {
return UnsafeCast<Smi>(pendingRuns[run << 1]);
}
......@@ -431,7 +432,8 @@ namespace array {
pendingRuns[run << 1] = value;
}
macro GetPendingRunLength(pendingRuns: FixedArray, run: Smi): Smi {
macro GetPendingRunLength(implicit context: Context)(
pendingRuns: FixedArray, run: Smi): Smi {
return UnsafeCast<Smi>(pendingRuns[(run << 1) + 1]);
}
......@@ -439,7 +441,8 @@ namespace array {
pendingRuns[(run << 1) + 1] = value;
}
macro PushRun(sortState: FixedArray, base: Smi, length: Smi) {
macro PushRun(implicit context:
Context)(sortState: FixedArray, base: Smi, length: Smi) {
assert(GetPendingRunsSize(sortState) < kMaxMergePending);
const stackSize: Smi = GetPendingRunsSize(sortState);
......@@ -454,7 +457,8 @@ namespace array {
// Returns the temporary array and makes sure that it is big enough.
// TODO(szuend): Implement a better re-size strategy.
macro GetTempArray(sortState: FixedArray, requestedSize: Smi): FixedArray {
macro GetTempArray(implicit context: Context)(
sortState: FixedArray, requestedSize: Smi): FixedArray {
const minSize: Smi = SmiMax(kSortStateTempSize, requestedSize);
const currentSize: Smi = UnsafeCast<Smi>(sortState[kTempArraySizeIdx]);
......@@ -471,7 +475,8 @@ namespace array {
}
// This macro jumps to the Bailout label iff kBailoutStatus is kFailure.
macro EnsureSuccess(sortState: FixedArray) labels Bailout {
macro EnsureSuccess(implicit context:
Context)(sortState: FixedArray) labels Bailout {
const status: Smi = UnsafeCast<Smi>(sortState[kBailoutStatusIdx]);
if (status == kFailure) goto Bailout;
}
......@@ -882,8 +887,8 @@ namespace array {
}
}
macro LoadElementsOrTempArray(useTempArray: Boolean, sortState: FixedArray):
HeapObject {
macro LoadElementsOrTempArray(implicit context: Context)(
useTempArray: Boolean, sortState: FixedArray): HeapObject {
return useTempArray == True ? GetTempArray(sortState) :
ReloadElements(sortState);
}
......@@ -1535,7 +1540,8 @@ namespace array {
}
// Returns true iff run_length(n - 2) > run_length(n - 1) + run_length(n).
macro RunInvariantEstablished(pendingRuns: FixedArray, n: Smi): bool {
macro RunInvariantEstablished(implicit context: Context)(
pendingRuns: FixedArray, n: Smi): bool {
if (n < 2) return true;
const runLengthN: Smi = GetPendingRunLength(pendingRuns, n);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment