Commit cd33ec55 authored by Tobias Tebbi's avatar Tobias Tebbi Committed by Commit Bot

[runtime] avoid trim/grow loop when adding and removing one element

We currently grow the backing store to (old_capacity*1.5)+16 if we exceed capacity, 
but shrink the capacity to the current length when 2*length <= capacity.
For short arrays (up to length 32), this can lead to a copy on every operation when using push/pop or push/shift.

Example:
Array of length 32, capacity 32
push
Array grown to length 33, capacity 32*1.5+16 = 64
pop
Array trimmed to length 32, capacity 32 because 2*32 <= 64
...

This CL leaves additional slag space when calling pop and restricts the trimming to backing stores with at least 16 elements to prevent excessive re-trimming on short arrays.

Bug: 
Change-Id: I9dd13e5e2550c7ac819294c8e29f04c8855e02a4
Reviewed-on: https://chromium-review.googlesource.com/502911
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/master@{#45324}
parent 63984db0
...@@ -786,9 +786,13 @@ TF_BUILTIN(FastArrayPop, CodeStubAssembler) { ...@@ -786,9 +786,13 @@ TF_BUILTIN(FastArrayPop, CodeStubAssembler) {
Node* new_length = IntPtrSub(length, IntPtrConstant(1)); Node* new_length = IntPtrSub(length, IntPtrConstant(1));
// 4) Check that we're not supposed to shrink the backing store. // 4) Check that we're not supposed to shrink the backing store, as
// implemented in elements.cc:ElementsAccessorBase::SetLengthImpl.
Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements)); Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
GotoIf(IntPtrLessThan(IntPtrAdd(new_length, new_length), capacity), GotoIf(IntPtrLessThan(
IntPtrAdd(IntPtrAdd(new_length, new_length),
IntPtrConstant(JSObject::kMinAddedElementsCapacity)),
capacity),
&runtime); &runtime);
StoreObjectFieldNoWriteBarrier(receiver, JSArray::kLengthOffset, StoreObjectFieldNoWriteBarrier(receiver, JSArray::kLengthOffset,
......
...@@ -873,6 +873,9 @@ Reduction JSBuiltinReducer::ReduceArrayPop(Node* node) { ...@@ -873,6 +873,9 @@ Reduction JSBuiltinReducer::ReduceArrayPop(Node* node) {
Node* efalse = effect; Node* efalse = effect;
Node* vfalse; Node* vfalse;
{ {
// TODO(tebbi): We should trim the backing store if the capacity is too
// big, as implemented in elements.cc:ElementsAccessorBase::SetLengthImpl.
// Load the elements backing store from the {receiver}. // Load the elements backing store from the {receiver}.
Node* elements = efalse = graph()->NewNode( Node* elements = efalse = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSObjectElements()), simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
......
...@@ -769,9 +769,15 @@ class ElementsAccessorBase : public ElementsAccessor { ...@@ -769,9 +769,15 @@ class ElementsAccessorBase : public ElementsAccessor {
backing_store = handle(array->elements(), isolate); backing_store = handle(array->elements(), isolate);
} }
} }
if (2 * length <= capacity) { if (2 * length + JSObject::kMinAddedElementsCapacity <= capacity) {
// If more than half the elements won't be used, trim the array. // If more than half the elements won't be used, trim the array.
isolate->heap()->RightTrimFixedArray(*backing_store, capacity - length); // Do not trim from short arrays to prevent frequent trimming on
// repeated pop operations.
// Leave some space to allow for subsequent push operations.
int elements_to_trim = length + 1 == old_length
? (capacity - length) / 2
: capacity - length;
isolate->heap()->RightTrimFixedArray(*backing_store, elements_to_trim);
} else { } else {
// Otherwise, fill the unused tail with holes. // Otherwise, fill the unused tail with holes.
BackingStore::cast(*backing_store)->FillWithHoles(length, old_length); BackingStore::cast(*backing_store)->FillWithHoles(length, old_length);
...@@ -2150,8 +2156,8 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> { ...@@ -2150,8 +2156,8 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
int hole_end) { int hole_end) {
Heap* heap = isolate->heap(); Heap* heap = isolate->heap();
Handle<BackingStore> dst_elms = Handle<BackingStore>::cast(backing_store); Handle<BackingStore> dst_elms = Handle<BackingStore>::cast(backing_store);
if (heap->CanMoveObjectStart(*dst_elms) && dst_index == 0 && if (len > JSArray::kMaxCopyElements && dst_index == 0 &&
len > JSArray::kMaxCopyElements) { heap->CanMoveObjectStart(*dst_elms)) {
// Update all the copies of this backing_store handle. // Update all the copies of this backing_store handle.
*dst_elms.location() = *dst_elms.location() =
BackingStore::cast(heap->LeftTrimFixedArray(*dst_elms, src_index)); BackingStore::cast(heap->LeftTrimFixedArray(*dst_elms, src_index));
......
...@@ -2357,10 +2357,12 @@ class JSObject: public JSReceiver { ...@@ -2357,10 +2357,12 @@ class JSObject: public JSReceiver {
// an access at key? // an access at key?
bool WouldConvertToSlowElements(uint32_t index); bool WouldConvertToSlowElements(uint32_t index);
static const uint32_t kMinAddedElementsCapacity = 16;
// Computes the new capacity when expanding the elements of a JSObject. // Computes the new capacity when expanding the elements of a JSObject.
static uint32_t NewElementsCapacity(uint32_t old_capacity) { static uint32_t NewElementsCapacity(uint32_t old_capacity) {
// (old_capacity + 50%) + 16 // (old_capacity + 50%) + kMinAddedElementsCapacity
return old_capacity + (old_capacity >> 1) + 16; return old_capacity + (old_capacity >> 1) + kMinAddedElementsCapacity;
} }
// These methods do not perform access checks! // These methods do not perform access checks!
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment