Commit 08b7872a authored by Daniel Clifford's avatar Daniel Clifford Committed by Commit Bot

Modify BranchIfFastJSArray to use array/prototype validation cells

Change-Id: Idd8b5c30607575e3cb5e54177308e68183aa78bb
Reviewed-on: https://chromium-review.googlesource.com/700676Reviewed-by: 's avatarBenedikt Meurer <bmeurer@chromium.org>
Commit-Queue: Daniel Clifford <danno@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48304}
parent ba44981a
......@@ -112,8 +112,7 @@ class ArrayBuiltinCodeStubAssembler : public CodeStubAssembler {
Label fast(this);
Label runtime(this);
Label object_push_pre(this), object_push(this), double_push(this);
BranchIfFastJSArray(a(), context(), FastJSArrayAccessMode::ANY_ACCESS,
&fast, &runtime);
BranchIfFastJSArray(a(), context(), &fast, &runtime);
BIND(&fast);
{
......@@ -705,7 +704,6 @@ class ArrayBuiltinCodeStubAssembler : public CodeStubAssembler {
GotoIf(TaggedIsNotSmi(len()), slow);
BranchIfFastJSArray(o(), context(),
CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ,
&switch_on_elements_kind, slow);
BIND(&switch_on_elements_kind);
......@@ -827,8 +825,7 @@ TF_BUILTIN(FastArrayPop, CodeStubAssembler) {
// 4) we aren't supposed to shrink the backing store.
// 1) Check that the array has fast elements.
BranchIfFastJSArray(receiver, context, FastJSArrayAccessMode::INBOUNDS_READ,
&fast, &runtime);
BranchIfFastJSArray(receiver, context, &fast, &runtime);
BIND(&fast);
{
......@@ -931,8 +928,7 @@ TF_BUILTIN(FastArrayPush, CodeStubAssembler) {
Node* kind = nullptr;
Label fast(this);
BranchIfFastJSArray(receiver, context, FastJSArrayAccessMode::ANY_ACCESS,
&fast, &runtime);
BranchIfFastJSArray(receiver, context, &fast, &runtime);
BIND(&fast);
{
......@@ -1060,8 +1056,7 @@ TF_BUILTIN(FastArrayShift, CodeStubAssembler) {
// 5) we aren't supposed to left-trim the backing store.
// 1) Check that the array has fast elements.
BranchIfFastJSArray(receiver, context, FastJSArrayAccessMode::INBOUNDS_READ,
&fast, &runtime);
BranchIfFastJSArray(receiver, context, &fast, &runtime);
BIND(&fast);
{
......@@ -1741,8 +1736,7 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) {
// Take slow path if not a JSArray, if retrieving elements requires
// traversing prototype, or if access checks are required.
BranchIfFastJSArray(receiver, context, FastJSArrayAccessMode::INBOUNDS_READ,
&init_index, &call_runtime);
BranchIfFastJSArray(receiver, context, &init_index, &call_runtime);
BIND(&init_index);
VARIABLE(index_var, MachineType::PointerRepresentation(), intptr_zero);
......
......@@ -788,28 +788,29 @@ void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true,
if_true, if_false);
}
void CodeStubAssembler::BranchIfFastJSArray(
Node* object, Node* context, CodeStubAssembler::FastJSArrayAccessMode mode,
Label* if_true, Label* if_false) {
void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
Label* if_true, Label* if_false) {
// Bailout if receiver is a Smi.
GotoIf(TaggedIsSmi(object), if_false);
Node* map = LoadMap(object);
// Bailout if instance type is not JS_ARRAY_TYPE.
GotoIf(Word32NotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)),
if_false);
Node* elements_kind = LoadMapElementsKind(map);
GotoIfNot(IsJSArrayMap(map), if_false);
// Bailout if receiver has slow elements.
Node* elements_kind = LoadMapElementsKind(map);
GotoIfNot(IsFastElementsKind(elements_kind), if_false);
// Check prototype chain if receiver does not have packed elements.
if (mode == FastJSArrayAccessMode::INBOUNDS_READ) {
GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true);
}
BranchIfPrototypesHaveNoElements(map, if_true, if_false);
// Check prototype chain if receiver does not have packed elements
GotoIfNot(IsPrototypeInitialArrayPrototype(context, map), if_false);
Branch(IsArrayProtectorCellInvalid(), if_false, if_true);
}
void CodeStubAssembler::BranchIfFastJSArrayForCopy(Node* object, Node* context,
Label* if_true,
Label* if_false) {
GotoIf(IsSpeciesProtectorCellInvalid(), if_false);
BranchIfFastJSArray(object, context, if_true, if_false);
}
Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags,
......@@ -9632,8 +9633,7 @@ Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map,
BIND(&if_isgeneric);
{
Label if_isfast(this), if_isslow(this);
BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
&if_isfast, &if_isslow);
BranchIfFastJSArray(array, context, &if_isfast, &if_isslow);
BIND(&if_isfast);
{
......@@ -9667,8 +9667,7 @@ Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map,
BIND(&if_isgeneric);
{
Label if_isfast(this), if_isslow(this);
BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
&if_isfast, &if_isslow);
BranchIfFastJSArray(array, context, &if_isfast, &if_isslow);
BIND(&if_isfast);
{
......
......@@ -397,10 +397,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
void BranchIfJSReceiver(Node* object, Label* if_true, Label* if_false);
void BranchIfJSObject(Node* object, Label* if_true, Label* if_false);
enum class FastJSArrayAccessMode { INBOUNDS_READ, ANY_ACCESS };
void BranchIfFastJSArray(Node* object, Node* context,
FastJSArrayAccessMode mode, Label* if_true,
void BranchIfFastJSArray(Node* object, Node* context, Label* if_true,
Label* if_false);
void BranchIfFastJSArrayForCopy(Node* object, Node* context, Label* if_true,
Label* if_false);
// Load value from current frame by given offset in bytes.
Node* LoadFromFrame(int offset, MachineType rep = MachineType::AnyTagged());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment