Commit 4c156936 authored by Jakob Gruber's avatar Jakob Gruber Committed by Commit Bot

[regexp] Allow JSRegExpResult allocations in large object space

Large regexp results may exceed kMaxRegularHeapObjectSize and must
thus be allocated in large object space.

Drive-by: Rename '%InNewSpace' to '%InYoungGeneration'.

Bug: chromium:976627
Change-Id: I38b5aecb95a95cf2fdbb24d19550cec34361a09d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1674027Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62368}
parent d1054287
......@@ -36,61 +36,45 @@ TNode<IntPtrT> RegExpBuiltinsAssembler::IntPtrZero() {
TNode<JSRegExpResult> RegExpBuiltinsAssembler::AllocateRegExpResult(
TNode<Context> context, TNode<Smi> length, TNode<Smi> index,
TNode<String> input, TNode<FixedArray>* elements_out) {
#ifdef DEBUG
TNode<Smi> max_length = SmiConstant(JSArray::kInitialMaxFastElementArray);
CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length));
#endif // DEBUG
CSA_ASSERT(this, SmiLessThanOrEqual(
length, SmiConstant(JSArray::kMaxFastArrayLength)));
CSA_ASSERT(this, SmiGreaterThan(length, SmiConstant(0)));
// Allocate the JSRegExpResult together with its elements fixed array.
// Initial preparations first.
// Allocate.
TNode<IntPtrT> length_intptr = SmiUntag(length);
const ElementsKind elements_kind = PACKED_ELEMENTS;
TNode<Map> map = CAST(LoadContextElement(LoadNativeContext(context),
Context::REGEXP_RESULT_MAP_INDEX));
Node* no_allocation_site = nullptr;
TNode<IntPtrT> length_intptr = SmiUntag(length);
TNode<IntPtrT> capacity = length_intptr;
TNode<IntPtrT> elements_size = GetFixedArrayAllocationSize(
length_intptr, elements_kind, INTPTR_PARAMETERS);
TNode<IntPtrT> total_size =
IntPtrAdd(elements_size, IntPtrConstant(JSRegExpResult::kSize));
static const int kRegExpResultOffset = 0;
static const int kElementsOffset =
kRegExpResultOffset + JSRegExpResult::kSize;
// The folded allocation.
TNode<HeapObject> result = Allocate(total_size);
TNode<HeapObject> elements = InnerAllocate(result, kElementsOffset);
// Initialize the JSRegExpResult.
// Note: The returned `elements` may be in young large object space, but
// `array` is guaranteed to be in new space so we could skip write barriers
// below.
TNode<JSArray> array;
TNode<FixedArrayBase> elements;
std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
elements_kind, map, length, no_allocation_site, capacity,
INTPTR_PARAMETERS, kAllowLargeObjectAllocation, JSRegExpResult::kSize);
TNode<Context> native_context = LoadNativeContext(context);
TNode<Map> map = CAST(
LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX));
StoreMapNoWriteBarrier(result, map);
// Finish result initialization.
StoreObjectFieldNoWriteBarrier(result, JSArray::kPropertiesOrHashOffset,
EmptyFixedArrayConstant());
StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, elements);
StoreObjectFieldNoWriteBarrier(result, JSArray::kLengthOffset, length);
TNode<JSRegExpResult> result = CAST(array);
StoreObjectFieldNoWriteBarrier(result, JSRegExpResult::kIndexOffset, index);
StoreObjectFieldNoWriteBarrier(result, JSRegExpResult::kInputOffset, input);
// TODO(jgruber,tebbi): Could skip barrier but the MemoryOptimizer complains.
StoreObjectField(result, JSRegExpResult::kInputOffset, input);
StoreObjectFieldNoWriteBarrier(result, JSRegExpResult::kGroupsOffset,
UndefinedConstant());
// Initialize the elements.
DCHECK(!IsDoubleElementsKind(elements_kind));
const RootIndex map_index = RootIndex::kFixedArrayMap;
DCHECK(RootsTable::IsImmortalImmovable(map_index));
StoreMapNoWriteBarrier(elements, map_index);
StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
// Finish elements initialization.
FillFixedArrayWithValue(elements_kind, elements, IntPtrZero(), length_intptr,
RootIndex::kUndefinedValue);
if (elements_out) *elements_out = CAST(elements);
return CAST(result);
return result;
}
TNode<Object> RegExpBuiltinsAssembler::RegExpCreate(
......@@ -186,7 +170,7 @@ TNode<JSRegExpResult> RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
TNode<JSRegExpResult> result = AllocateRegExpResult(
context, num_results, start, string, &result_elements);
UnsafeStoreFixedArrayElement(result_elements, 0, first, SKIP_WRITE_BARRIER);
UnsafeStoreFixedArrayElement(result_elements, 0, first);
// If no captures exist we can skip named capture handling as well.
GotoIf(SmiEqual(num_results, SmiConstant(1)), &out);
......
......@@ -1265,6 +1265,8 @@ TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
intptr_t size_constant;
if (ToIntPtrConstant(size_in_bytes, size_constant)) {
CHECK_LE(size_constant, kMaxRegularHeapObjectSize);
} else {
CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
}
}
if (!(flags & kDoubleAlignment) && always_allocated_in_requested_space) {
......@@ -3944,11 +3946,11 @@ TNode<BoolT> CodeStubAssembler::IsValidFastJSArrayCapacity(
TNode<JSArray> CodeStubAssembler::AllocateJSArray(
TNode<Map> array_map, TNode<FixedArrayBase> elements, TNode<Smi> length,
Node* allocation_site) {
Node* allocation_site, int array_header_size) {
Comment("begin allocation of JSArray passing in elements");
CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
int base_size = JSArray::kSize;
int base_size = array_header_size;
if (allocation_site != nullptr) {
base_size += AllocationMemento::kSize;
}
......@@ -3964,7 +3966,7 @@ std::pair<TNode<JSArray>, TNode<FixedArrayBase>>
CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
Node* allocation_site, Node* capacity, ParameterMode capacity_mode,
AllocationFlags allocation_flags) {
AllocationFlags allocation_flags, int array_header_size) {
Comment("begin allocation of JSArray with elements");
CHECK_EQ(allocation_flags & ~kAllowLargeObjectAllocation, 0);
CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
......@@ -3978,7 +3980,8 @@ CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_int, capacity_mode)) {
if (capacity_int == 0) {
TNode<FixedArrayBase> empty_array = EmptyFixedArrayConstant();
array = AllocateJSArray(array_map, empty_array, length, allocation_site);
array = AllocateJSArray(array_map, empty_array, length, allocation_site,
array_header_size);
return {array.value(), empty_array};
} else {
Goto(&nonempty);
......@@ -3990,7 +3993,8 @@ CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
BIND(&empty);
{
TNode<FixedArrayBase> empty_array = EmptyFixedArrayConstant();
array = AllocateJSArray(array_map, empty_array, length, allocation_site);
array = AllocateJSArray(array_map, empty_array, length, allocation_site,
array_header_size);
elements = empty_array;
Goto(&out);
}
......@@ -3998,7 +4002,7 @@ CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
BIND(&nonempty);
{
int base_size = JSArray::kSize;
int base_size = array_header_size;
if (allocation_site != nullptr) base_size += AllocationMemento::kSize;
const int elements_offset = base_size;
......@@ -4035,8 +4039,8 @@ CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
// The JSArray and possibly allocation memento next. Note that
// allocation_flags are *not* passed on here and the resulting JSArray
// will always be in new space.
array =
AllocateJSArray(array_map, elements.value(), length, allocation_site);
array = AllocateJSArray(array_map, elements.value(), length,
allocation_site, array_header_size);
Goto(&out);
......
......@@ -1655,7 +1655,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
Node* allocation_site, Node* capacity,
ParameterMode capacity_mode = INTPTR_PARAMETERS,
AllocationFlags allocation_flags = kNone);
AllocationFlags allocation_flags = kNone,
int array_header_size = JSArray::kSize);
// Allocate a JSArray and fill elements with the hole.
// The ParameterMode argument is only used for the capacity parameter.
......@@ -1682,7 +1683,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<JSArray> AllocateJSArray(TNode<Map> array_map,
TNode<FixedArrayBase> elements,
TNode<Smi> length,
Node* allocation_site = nullptr);
Node* allocation_site = nullptr,
int array_header_size = JSArray::kSize);
enum class HoleConversionMode { kDontConvert, kConvertToUndefined };
// Clone a fast JSArray |array| into a new fast JSArray.
......
......@@ -22,6 +22,7 @@
#include "src/ic/stub-cache.h"
#include "src/logging/counters.h"
#include "src/objects/heap-object-inl.h"
#include "src/objects/js-array-inl.h"
#include "src/objects/smi.h"
#include "src/snapshot/natives.h"
#include "src/trap-handler/trap-handler.h"
......@@ -924,8 +925,17 @@ RUNTIME_FUNCTION(Runtime_HaveSameMap) {
return isolate->heap()->ToBoolean(obj1.map() == obj2.map());
}
RUNTIME_FUNCTION(Runtime_HasElementsInALargeObjectSpace) {
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(JSArray, array, 0);
FixedArrayBase elements = array.elements();
return isolate->heap()->ToBoolean(
isolate->heap()->new_lo_space()->Contains(elements) ||
isolate->heap()->lo_space()->Contains(elements));
}
RUNTIME_FUNCTION(Runtime_InNewSpace) {
RUNTIME_FUNCTION(Runtime_InYoungGeneration) {
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, obj, 0);
......
......@@ -430,24 +430,26 @@ namespace internal {
F(SymbolIsPrivate, 1, 1)
#define FOR_EACH_INTRINSIC_TEST(F, I) \
F(ClearMegamorphicStubCache, 0, 1) \
F(Abort, 1, 1) \
F(AbortJS, 1, 1) \
F(ArraySpeciesProtector, 0, 1) \
F(ClearFunctionFeedback, 1, 1) \
F(ClearMegamorphicStubCache, 0, 1) \
F(CloneWasmModule, 1, 1) \
F(CompleteInobjectSlackTracking, 1, 1) \
F(ConstructConsString, 2, 1) \
F(ConstructSlicedString, 2, 1) \
F(ConstructDouble, 2, 1) \
F(ConstructSlicedString, 2, 1) \
F(DebugPrint, 1, 1) \
F(DebugTrace, 0, 1) \
F(DebugTrackRetainingPath, -1, 1) \
F(DeoptimizeFunction, 1, 1) \
I(DeoptimizeNow, 0, 1) \
F(DeserializeWasmModule, 2, 1) \
F(DisallowCodegenFromStrings, 1, 1) \
F(DisallowWasmCodegen, 1, 1) \
F(DisassembleFunction, 1, 1) \
F(EnableCodeLoggingForTesting, 0, 1) \
F(EnsureFeedbackVectorForFunction, 1, 1) \
F(FreezeWasmLazyCompilation, 1, 1) \
F(GetCallable, 0, 1) \
F(GetInitializerFunction, 1, 1) \
......@@ -458,8 +460,8 @@ namespace internal {
F(GetWasmRecoveredTrapCount, 0, 1) \
F(GlobalPrint, 1, 1) \
F(HasDictionaryElements, 1, 1) \
F(HasPackedElements, 1, 1) \
F(HasDoubleElements, 1, 1) \
F(HasElementsInALargeObjectSpace, 1, 1) \
F(HasFastElements, 1, 1) \
F(HasFastProperties, 1, 1) \
F(HasFixedBigInt64Elements, 1, 1) \
......@@ -475,25 +477,25 @@ namespace internal {
F(HasFixedUint8Elements, 1, 1) \
F(HasHoleyElements, 1, 1) \
F(HasObjectElements, 1, 1) \
F(HasPackedElements, 1, 1) \
F(HasSloppyArgumentsElements, 1, 1) \
F(HasSmiElements, 1, 1) \
F(HasSmiOrObjectElements, 1, 1) \
F(HaveSameMap, 2, 1) \
F(HeapObjectVerify, 1, 1) \
F(ICsAreEnabled, 0, 1) \
F(InNewSpace, 1, 1) \
F(InYoungGeneration, 1, 1) \
F(IsAsmWasmCode, 1, 1) \
F(IsConcurrentRecompilationSupported, 0, 1) \
F(WasmTierUpFunction, 2, 1) \
F(IsLiftoffFunction, 1, 1) \
F(IsThreadInWasm, 0, 1) \
F(IsWasmCode, 1, 1) \
F(IsWasmTrapHandlerEnabled, 0, 1) \
F(IsThreadInWasm, 0, 1) \
F(MapIteratorProtector, 0, 1) \
F(NeverOptimizeFunction, 1, 1) \
F(NotifyContextDisposed, 0, 1) \
F(OptimizeFunctionOnNextCall, -1, 1) \
F(OptimizeOsr, -1, 1) \
F(EnsureFeedbackVectorForFunction, 1, 1) \
F(PrepareFunctionForOptimization, 1, 1) \
F(PrintWithNameForAssert, 2, 1) \
F(RedirectToWasmInterpreter, 2, 1) \
......@@ -501,22 +503,21 @@ namespace internal {
F(SerializeWasmModule, 1, 1) \
F(SetAllocationTimeout, -1 /* 2 || 3 */, 1) \
F(SetForceSlowPath, 1, 1) \
F(SetIteratorProtector, 0, 1) \
F(SetWasmCompileControls, 2, 1) \
F(SetWasmInstantiateControls, 0, 1) \
F(ArraySpeciesProtector, 0, 1) \
F(MapIteratorProtector, 0, 1) \
F(SetIteratorProtector, 0, 1) \
F(SetWasmThreadsEnabled, 1, 1) \
F(StringIteratorProtector, 0, 1) \
F(SystemBreak, 0, 1) \
F(TraceEnter, 0, 1) \
F(TraceExit, 1, 1) \
F(TurbofanStaticAssert, 1, 1) \
F(UnblockConcurrentRecompilation, 0, 1) \
F(WasmGetNumberOfInstances, 1, 1) \
F(WasmNumInterpretedCalls, 1, 1) \
F(WasmTierUpFunction, 2, 1) \
F(WasmTraceMemory, 1, 1) \
F(SetWasmThreadsEnabled, 1, 1) \
F(TurbofanStaticAssert, 1, 1) \
F(EnableCodeLoggingForTesting, 0, 1)
I(DeoptimizeNow, 0, 1)
#define FOR_EACH_INTRINSIC_TYPEDARRAY(F, I) \
F(ArrayBufferDetach, 1, 1) \
......
......@@ -2275,7 +2275,7 @@ HEAP_TEST(Regress845060) {
// Run the test (which allocates results) until the original string was
// promoted to old space. Unmapping of from_space causes accesses to any
// stale raw pointers to crash.
CompileRun("while (%InNewSpace(str)) { str.split(''); }");
CompileRun("while (%InYoungGeneration(str)) { str.split(''); }");
CHECK(!Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
}
......
......@@ -242,6 +242,9 @@
# BUG(v8:9260)
'tools/profviz': [SKIP],
# Test doesn't work on 32-bit architectures (it would require a
# regexp pattern with too many captures).
'regress/regress-976627': [FAIL, ['arch == x64 or arch == arm64', PASS]],
}], # ALWAYS
['novfp3 == True', {
......
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --young-generation-large-objects
function v2() {
const v8 = Symbol || 9007199254740991;
function v9(v10,v11,v12) {
}
const v16 = String();
const v100 = String();//add
const v106 = String();// add
const v116 = String();// add
const v17 = Int32Array();
const v18 = Map();
const v19 = [];
const v20 = v18.values();
function v21(v22,v23,v24,v25,v26) {
}
function v28(v29,v30,v31) {
function v32(v33,v34,v35,v36) {
}
let v39 = 0;
do {
const v40 = v32();
function v99() {
}
} while (v39 < 8);
}
const v41 = Promise();
}
const v46 = ["has",13.37,-9007199254740991,Reflect];
for (let v50 = 64; v50 <= 2000; v50++) {
v46.push(v50,v2);
}
const v54 = RegExp(v46);
const v55 = v54.exec();
assertTrue(%HasElementsInALargeObjectSpace(v55));
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment