Commit 271ffdb0 authored by Jakob Gruber's avatar Jakob Gruber Committed by Commit Bot

[collections] Allocate large collections in large object space

The backing store fixed array for collections needs to be allocated
in LOS if it exceeds the maximum regular heap object size.

Drive-by-fix: Only store fixed array map once as per TODO.

Bug: chromium:784862
Change-Id: I6b4dd2e45153ae107171e21bc7448e0d9b54b0ed
Reviewed-on: https://chromium-review.googlesource.com/771150Reviewed-by: 's avatarSathya Gunasekaran <gsathya@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49378}
parent 9e922895
......@@ -584,14 +584,10 @@ Node* CollectionsBuiltinsAssembler::AllocateOrderedHashTable() {
// Allocate the table and add the proper map.
const ElementsKind elements_kind = HOLEY_ELEMENTS;
Node* const length_intptr = IntPtrConstant(kFixedArrayLength);
Node* const table = AllocateFixedArray(elements_kind, length_intptr);
CSA_ASSERT(this,
IntPtrLessThanOrEqual(
length_intptr, IntPtrConstant(FixedArray::kMaxRegularLength)));
Heap::RootListIndex map_index = Heap::kOrderedHashTableMapRootIndex;
// TODO(gsathya): Directly store correct in AllocateFixedArray,
// instead of overwriting here.
StoreMapNoWriteBarrier(table, map_index);
Node* const fixed_array_map = LoadRoot(Heap::kOrderedHashTableMapRootIndex);
Node* const table =
AllocateFixedArray(elements_kind, length_intptr, INTPTR_PARAMETERS,
kAllowLargeObjectAllocation, fixed_array_map);
// Initialize the OrderedHashTable fields.
const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
......@@ -2004,7 +2000,8 @@ TNode<Object> WeakCollectionsBuiltinsAssembler::AllocateTable(
// See HashTable::NewInternal().
TNode<IntPtrT> length = KeyIndexFromEntry(capacity);
TNode<Object> table = CAST(AllocateFixedArray(HOLEY_ELEMENTS, length));
TNode<Object> table = CAST(AllocateFixedArray(
HOLEY_ELEMENTS, length, INTPTR_PARAMETERS, kAllowLargeObjectAllocation));
// See BaseShape::GetMap().
StoreMapNoWriteBarrier(table, Heap::kHashTableMapRootIndex);
......
......@@ -2820,7 +2820,7 @@ Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
if (fixed_array_map != nullptr) {
// Conservatively only skip the write barrier if there are no allocation
// flags, this ensures that the object hasn't ended up in LOS. Note that the
// fixed array map is currently alwasys immortal and technically wouldn't
// fixed array map is currently always immortal and technically wouldn't
// need the write barrier even in LOS, but it's better to not take chances
// in case this invariant changes later, since it's difficult to enforce
// locally here.
......
......@@ -314,6 +314,7 @@ bool IntrinsicHasNoSideEffect(Runtime::FunctionId id) {
V(GenerateRandomNumbers) \
V(GlobalPrint) \
V(AllocateInNewSpace) \
V(AllocateInTargetSpace) \
V(AllocateSeqOneByteString) \
V(AllocateSeqTwoByteString) \
V(ObjectCreate) \
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This test triggers table allocation in large object space. We don't care
// about the result as long as we don't crash.
const array = new Array();
array[0x80000] = 1;
array.unshift({});
assertThrows(() => new WeakMap(array));
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment