Commit 306bb1ff authored by Camillo Bruni's avatar Camillo Bruni Committed by Commit Bot

[runtime] Merge HandleSlackTracking and AllocateJSObjectFromMap

This CL fixes uses of HandleSlackTracking which previously would write
fields twice. Additional checks ensure that only proper initial maps are
used with slack tracking.

Change-Id: Ifb03297635ed6b873eb8b27fec9794c9d36b71b6
Reviewed-on: https://chromium-review.googlesource.com/664810Reviewed-by: 's avatarMichael Stanton <mvstanton@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Commit-Queue: Camillo Bruni <cbruni@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49331}
parent 5aff110b
...@@ -66,8 +66,7 @@ Node* AsyncBuiltinsAssembler::Await( ...@@ -66,8 +66,7 @@ Node* AsyncBuiltinsAssembler::Await(
StoreMapNoWriteBarrier(wrapped_value, promise_map); StoreMapNoWriteBarrier(wrapped_value, promise_map);
InitializeJSObjectFromMap( InitializeJSObjectFromMap(
wrapped_value, promise_map, wrapped_value, promise_map,
IntPtrConstant(JSPromise::kSizeWithEmbedderFields), IntPtrConstant(JSPromise::kSizeWithEmbedderFields));
EmptyFixedArrayConstant(), EmptyFixedArrayConstant());
PromiseInit(wrapped_value); PromiseInit(wrapped_value);
} }
...@@ -77,8 +76,7 @@ Node* AsyncBuiltinsAssembler::Await( ...@@ -77,8 +76,7 @@ Node* AsyncBuiltinsAssembler::Await(
StoreMapNoWriteBarrier(throwaway, promise_map); StoreMapNoWriteBarrier(throwaway, promise_map);
InitializeJSObjectFromMap( InitializeJSObjectFromMap(
throwaway, promise_map, throwaway, promise_map,
IntPtrConstant(JSPromise::kSizeWithEmbedderFields), IntPtrConstant(JSPromise::kSizeWithEmbedderFields));
EmptyFixedArrayConstant(), EmptyFixedArrayConstant());
PromiseInit(throwaway); PromiseInit(throwaway);
} }
......
...@@ -87,7 +87,8 @@ Node* ConstructorBuiltinsAssembler::EmitFastNewClosure(Node* shared_info, ...@@ -87,7 +87,8 @@ Node* ConstructorBuiltinsAssembler::EmitFastNewClosure(Node* shared_info,
TimesPointerSize(LoadMapInstanceSize(function_map)); TimesPointerSize(LoadMapInstanceSize(function_map));
Node* result = Allocate(instance_size_in_bytes); Node* result = Allocate(instance_size_in_bytes);
StoreMapNoWriteBarrier(result, function_map); StoreMapNoWriteBarrier(result, function_map);
InitializeJSObjectBody(result, function_map, instance_size_in_bytes, InitializeJSObjectBodyNoSlackTracking(result, function_map,
instance_size_in_bytes,
JSFunction::kSizeWithoutPrototype); JSFunction::kSizeWithoutPrototype);
// Initialize the rest of the function. // Initialize the rest of the function.
...@@ -236,12 +237,8 @@ Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context, ...@@ -236,12 +237,8 @@ Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
} }
BIND(&instantiate_map); BIND(&instantiate_map);
return AllocateJSObjectFromMap(initial_map, properties.value(), nullptr,
Node* object = AllocateJSObjectFromMap(initial_map, properties.value()); kNone, kWithSlackTracking);
// Perform in-object slack tracking if requested.
HandleSlackTracking(context, object, initial_map, JSObject::kHeaderSize);
return object;
} }
Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext( Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
......
...@@ -760,9 +760,9 @@ TF_BUILTIN(CreateGeneratorObject, ObjectBuiltinsAssembler) { ...@@ -760,9 +760,9 @@ TF_BUILTIN(CreateGeneratorObject, ObjectBuiltinsAssembler) {
Node* register_file = AllocateFixedArray(HOLEY_ELEMENTS, size); Node* register_file = AllocateFixedArray(HOLEY_ELEMENTS, size);
FillFixedArrayWithValue(HOLEY_ELEMENTS, register_file, IntPtrConstant(0), FillFixedArrayWithValue(HOLEY_ELEMENTS, register_file, IntPtrConstant(0),
size, Heap::kUndefinedValueRootIndex); size, Heap::kUndefinedValueRootIndex);
// TODO(cbruni): support start_offset to avoid double initialization.
Node* const result = AllocateJSObjectFromMap(maybe_map); Node* result = AllocateJSObjectFromMap(maybe_map, nullptr, nullptr, kNone,
kWithSlackTracking);
StoreObjectFieldNoWriteBarrier(result, JSGeneratorObject::kFunctionOffset, StoreObjectFieldNoWriteBarrier(result, JSGeneratorObject::kFunctionOffset,
closure); closure);
StoreObjectFieldNoWriteBarrier(result, JSGeneratorObject::kContextOffset, StoreObjectFieldNoWriteBarrier(result, JSGeneratorObject::kContextOffset,
...@@ -774,7 +774,6 @@ TF_BUILTIN(CreateGeneratorObject, ObjectBuiltinsAssembler) { ...@@ -774,7 +774,6 @@ TF_BUILTIN(CreateGeneratorObject, ObjectBuiltinsAssembler) {
Node* executing = SmiConstant(JSGeneratorObject::kGeneratorExecuting); Node* executing = SmiConstant(JSGeneratorObject::kGeneratorExecuting);
StoreObjectFieldNoWriteBarrier(result, JSGeneratorObject::kContinuationOffset, StoreObjectFieldNoWriteBarrier(result, JSGeneratorObject::kContinuationOffset,
executing); executing);
HandleSlackTracking(context, result, maybe_map, JSGeneratorObject::kSize);
Return(result); Return(result);
BIND(&runtime); BIND(&runtime);
......
...@@ -1399,6 +1399,13 @@ Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) { ...@@ -1399,6 +1399,13 @@ Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3); return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
} }
Node* CodeStubAssembler::LoadMapBackPointer(SloppyTNode<Map> map) {
Node* object = LoadObjectField(map, Map::kConstructorOrBackPointerOffset);
return Select(IsMap(object), [=] { return object; },
[=] { return UndefinedConstant(); },
MachineRepresentation::kTagged);
}
TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash( TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
SloppyTNode<Object> receiver, Label* if_no_hash) { SloppyTNode<Object> receiver, Label* if_no_hash) {
TVARIABLE(IntPtrT, var_hash); TVARIABLE(IntPtrT, var_hash);
...@@ -2468,20 +2475,24 @@ void CodeStubAssembler::InitializeStructBody(Node* object, Node* map, ...@@ -2468,20 +2475,24 @@ void CodeStubAssembler::InitializeStructBody(Node* object, Node* map,
StoreFieldsNoWriteBarrier(start_address, end_address, filler); StoreFieldsNoWriteBarrier(start_address, end_address, filler);
} }
Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties, Node* CodeStubAssembler::AllocateJSObjectFromMap(
Node* elements, Node* map, Node* properties, Node* elements, AllocationFlags flags,
AllocationFlags flags) { SlackTrackingMode slack_tracking_mode) {
CSA_ASSERT(this, IsMap(map)); CSA_ASSERT(this, IsMap(map));
Node* size = TimesPointerSize(LoadMapInstanceSize(map)); CSA_ASSERT(this, Word32BinaryNot(IsJSFunctionMap(map)));
Node* object = AllocateInNewSpace(size, flags); CSA_ASSERT(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
JS_GLOBAL_OBJECT_TYPE)));
Node* instance_size = TimesPointerSize(LoadMapInstanceSize(map));
Node* object = AllocateInNewSpace(instance_size, flags);
StoreMapNoWriteBarrier(object, map); StoreMapNoWriteBarrier(object, map);
InitializeJSObjectFromMap(object, map, size, properties, elements); InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
slack_tracking_mode);
return object; return object;
} }
void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map, void CodeStubAssembler::InitializeJSObjectFromMap(
Node* size, Node* properties, Node* object, Node* map, Node* instance_size, Node* properties,
Node* elements) { Node* elements, SlackTrackingMode slack_tracking_mode) {
CSA_SLOW_ASSERT(this, IsMap(map)); CSA_SLOW_ASSERT(this, IsMap(map));
// This helper assumes that the object is in new-space, as guarded by the // This helper assumes that the object is in new-space, as guarded by the
// check in AllocatedJSObjectFromMap. // check in AllocatedJSObjectFromMap.
...@@ -2503,22 +2514,78 @@ void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map, ...@@ -2503,22 +2514,78 @@ void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
CSA_ASSERT(this, IsFixedArray(elements)); CSA_ASSERT(this, IsFixedArray(elements));
StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements); StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
} }
InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize); if (slack_tracking_mode == kNoSlackTracking) {
InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
} else {
DCHECK_EQ(slack_tracking_mode, kWithSlackTracking);
InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
}
} }
void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map, void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
Node* size, int start_offset) { Node* object, Node* map, Node* instance_size, int start_offset) {
STATIC_ASSERT(Map::kNoSlackTracking == 0);
CSA_ASSERT(this,
IsClearWord32<Map::ConstructionCounter>(LoadMapBitField3(map)));
InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
Heap::kUndefinedValueRootIndex);
}
void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
Node* object, Node* map, Node* instance_size) {
CSA_SLOW_ASSERT(this, IsMap(map)); CSA_SLOW_ASSERT(this, IsMap(map));
// TODO(cbruni): activate in-object slack tracking machinery. Comment("InitializeJSObjectBodyNoSlackTracking");
Comment("InitializeJSObjectBody");
Node* filler = UndefinedConstant(); // Perform in-object slack tracking if requested.
// Calculate the untagged field addresses. int start_offset = JSObject::kHeaderSize;
object = BitcastTaggedToWord(object); Node* bit_field3 = LoadMapBitField3(map);
Node* start_address = Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag)); STATIC_ASSERT(Map::kNoSlackTracking == 0);
Node* end_address = GotoIf(IsSetWord32<Map::ConstructionCounter>(bit_field3), &slack_tracking);
IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag)); Comment("No slack tracking");
StoreFieldsNoWriteBarrier(start_address, end_address, filler); InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
Goto(&end);
BIND(&slack_tracking);
{
Comment("Decrease construction counter");
// Slack tracking is only done on initial maps.
CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
Node* new_bit_field3 = Int32Sub(
bit_field3, Int32Constant(1 << Map::ConstructionCounter::kShift));
StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3,
MachineRepresentation::kWord32);
STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
Node* unused_fields = LoadObjectField(map, Map::kUnusedPropertyFieldsOffset,
MachineType::Uint8());
Node* used_size = IntPtrSub(
instance_size, TimesPointerSize(ChangeUint32ToWord(unused_fields)));
Comment("iInitialize filler fields");
InitializeFieldsWithRoot(object, used_size, instance_size,
Heap::kOnePointerFillerMapRootIndex);
Comment("Initialize undefined fields");
InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
Heap::kUndefinedValueRootIndex);
GotoIf(IsClearWord32<Map::ConstructionCounter>(new_bit_field3), &complete);
Goto(&end);
}
// Finalize the instance size.
BIND(&complete);
{
// ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
// context.
CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
NoContextConstant(), map);
Goto(&end);
}
BIND(&end);
} }
void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address, void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
...@@ -7942,79 +8009,6 @@ Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector, ...@@ -7942,79 +8009,6 @@ Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
return cell; return cell;
} }
void CodeStubAssembler::HandleSlackTracking(Node* context, Node* object,
Node* initial_map,
int start_offset) {
Node* instance_size_words = ChangeUint32ToWord(LoadObjectField(
initial_map, Map::kInstanceSizeOffset, MachineType::Uint8()));
Node* instance_size = TimesPointerSize(instance_size_words);
// Perform in-object slack tracking if requested.
Node* bit_field3 = LoadMapBitField3(initial_map);
Label end(this), slack_tracking(this), finalize(this, Label::kDeferred);
STATIC_ASSERT(Map::kNoSlackTracking == 0);
GotoIf(IsSetWord32<Map::ConstructionCounter>(bit_field3), &slack_tracking);
// Initialize remaining fields.
{
Comment("no slack tracking");
InitializeFieldsWithRoot(object, IntPtrConstant(start_offset),
instance_size, Heap::kUndefinedValueRootIndex);
Goto(&end);
}
{
BIND(&slack_tracking);
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
Comment("update allocation count");
Node* new_bit_field3 = Int32Sub(
bit_field3, Int32Constant(1 << Map::ConstructionCounter::kShift));
StoreObjectFieldNoWriteBarrier(initial_map, Map::kBitField3Offset,
new_bit_field3,
MachineRepresentation::kWord32);
GotoIf(IsClearWord32<Map::ConstructionCounter>(new_bit_field3), &finalize);
Node* unused_fields = LoadObjectField(
initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
Node* used_size = IntPtrSub(
instance_size, TimesPointerSize(ChangeUint32ToWord(unused_fields)));
Comment("initialize filler fields (no finalize)");
InitializeFieldsWithRoot(object, used_size, instance_size,
Heap::kOnePointerFillerMapRootIndex);
Comment("initialize undefined fields (no finalize)");
InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
Heap::kUndefinedValueRootIndex);
Goto(&end);
}
{
// Finalize the instance size.
BIND(&finalize);
Node* unused_fields = LoadObjectField(
initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
Node* used_size = IntPtrSub(
instance_size, TimesPointerSize(ChangeUint32ToWord(unused_fields)));
Comment("initialize filler fields (finalize)");
InitializeFieldsWithRoot(object, used_size, instance_size,
Heap::kOnePointerFillerMapRootIndex);
Comment("initialize undefined fields (finalize)");
InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
Heap::kUndefinedValueRootIndex);
CallRuntime(Runtime::kFinalizeInstanceSize, context, initial_map);
Goto(&end);
}
BIND(&end);
}
Node* CodeStubAssembler::BuildFastLoop( Node* CodeStubAssembler::BuildFastLoop(
const CodeStubAssembler::VariableList& vars, Node* start_index, const CodeStubAssembler::VariableList& vars, Node* start_index,
Node* end_index, const FastLoopBody& body, int increment, Node* end_index, const FastLoopBody& body, int increment,
......
...@@ -87,6 +87,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler { ...@@ -87,6 +87,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
kAllowLargeObjectAllocation = 1 << 2, kAllowLargeObjectAllocation = 1 << 2,
}; };
enum SlackTrackingMode { kWithSlackTracking, kNoSlackTracking };
typedef base::Flags<AllocationFlag> AllocationFlags; typedef base::Flags<AllocationFlag> AllocationFlags;
enum ParameterMode { SMI_PARAMETERS, INTPTR_PARAMETERS }; enum ParameterMode { SMI_PARAMETERS, INTPTR_PARAMETERS };
...@@ -512,6 +514,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler { ...@@ -512,6 +514,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
TNode<Object> LoadMapConstructor(SloppyTNode<Map> map); TNode<Object> LoadMapConstructor(SloppyTNode<Map> map);
// Load the EnumLength of a Map. // Load the EnumLength of a Map.
Node* LoadMapEnumLength(SloppyTNode<Map> map); Node* LoadMapEnumLength(SloppyTNode<Map> map);
// Load the back-pointer of a Map.
Node* LoadMapBackPointer(SloppyTNode<Map> map);
// Load the identity hash of a JSRececiver. // Load the identity hash of a JSRececiver.
TNode<IntPtrT> LoadJSReceiverIdentityHash(SloppyTNode<Object> receiver, TNode<IntPtrT> LoadJSReceiverIdentityHash(SloppyTNode<Object> receiver,
Label* if_no_hash = nullptr); Label* if_no_hash = nullptr);
...@@ -737,15 +741,21 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler { ...@@ -737,15 +741,21 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* AllocateStruct(Node* map, AllocationFlags flags = kNone); Node* AllocateStruct(Node* map, AllocationFlags flags = kNone);
void InitializeStructBody(Node* object, Node* map, Node* size, void InitializeStructBody(Node* object, Node* map, Node* size,
int start_offset = Struct::kHeaderSize); int start_offset = Struct::kHeaderSize);
Node* AllocateJSObjectFromMap(Node* map, Node* properties = nullptr,
Node* elements = nullptr,
AllocationFlags flags = kNone);
void InitializeJSObjectFromMap(Node* object, Node* map, Node* size, Node* AllocateJSObjectFromMap(
Node* properties = nullptr, Node* map, Node* properties = nullptr, Node* elements = nullptr,
Node* elements = nullptr); AllocationFlags flags = kNone,
SlackTrackingMode slack_tracking_mode = kNoSlackTracking);
void InitializeJSObjectBody(Node* object, Node* map, Node* size, void InitializeJSObjectFromMap(
Node* object, Node* map, Node* instance_size, Node* properties = nullptr,
Node* elements = nullptr,
SlackTrackingMode slack_tracking_mode = kNoSlackTracking);
void InitializeJSObjectBodyWithSlackTracking(Node* object, Node* map,
Node* instance_size);
void InitializeJSObjectBodyNoSlackTracking(
Node* object, Node* map, Node* instance_size,
int start_offset = JSObject::kHeaderSize); int start_offset = JSObject::kHeaderSize);
// Allocate a JSArray without elements and initialize the header fields. // Allocate a JSArray without elements and initialize the header fields.
...@@ -1670,11 +1680,6 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler { ...@@ -1670,11 +1680,6 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Create a new AllocationSite and install it into a feedback vector. // Create a new AllocationSite and install it into a feedback vector.
Node* CreateAllocationSiteInFeedbackVector(Node* feedback_vector, Node* slot); Node* CreateAllocationSiteInFeedbackVector(Node* feedback_vector, Node* slot);
// Given a recently allocated object {object}, with map {initial_map},
// initialize remaining fields appropriately to comply with slack tracking.
void HandleSlackTracking(Node* context, Node* object, Node* initial_map,
int start_offset);
enum class IndexAdvanceMode { kPre, kPost }; enum class IndexAdvanceMode { kPre, kPost };
typedef std::function<void(Node* index)> FastLoopBody; typedef std::function<void(Node* index)> FastLoopBody;
......
...@@ -333,7 +333,7 @@ bool IntrinsicHasNoSideEffect(Runtime::FunctionId id) { ...@@ -333,7 +333,7 @@ bool IntrinsicHasNoSideEffect(Runtime::FunctionId id) {
V(Call) \ V(Call) \
V(MaxSmi) \ V(MaxSmi) \
V(NewObject) \ V(NewObject) \
V(FinalizeInstanceSize) \ V(CompleteInobjectSlackTrackingForMap) \
V(HasInPrototypeChain) \ V(HasInPrototypeChain) \
V(StringMaxLength) V(StringMaxLength)
......
...@@ -4097,6 +4097,8 @@ bool Map::IsInobjectSlackTrackingInProgress() const { ...@@ -4097,6 +4097,8 @@ bool Map::IsInobjectSlackTrackingInProgress() const {
void Map::InobjectSlackTrackingStep() { void Map::InobjectSlackTrackingStep() {
// Slack tracking should only be performed on an initial map.
DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
if (!IsInobjectSlackTrackingInProgress()) return; if (!IsInobjectSlackTrackingInProgress()) return;
int counter = construction_counter(); int counter = construction_counter();
set_construction_counter(counter - 1); set_construction_counter(counter - 1);
......
...@@ -12328,11 +12328,11 @@ static void StopSlackTracking(Map* map, void* data) { ...@@ -12328,11 +12328,11 @@ static void StopSlackTracking(Map* map, void* data) {
} }
void Map::CompleteInobjectSlackTracking() { void Map::CompleteInobjectSlackTracking() {
DisallowHeapAllocation no_gc;
// Has to be an initial map. // Has to be an initial map.
DCHECK(GetBackPointer()->IsUndefined(GetIsolate())); DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
int slack = UnusedPropertyFields(); int slack = UnusedPropertyFields();
DisallowHeapAllocation no_gc;
TransitionsAccessor transitions(this, &no_gc); TransitionsAccessor transitions(this, &no_gc);
transitions.TraverseTransitionTree(&GetMinInobjectSlack, &slack); transitions.TraverseTransitionTree(&GetMinInobjectSlack, &slack);
if (slack != 0) { if (slack != 0) {
......
...@@ -675,8 +675,8 @@ RUNTIME_FUNCTION(Runtime_NewObject) { ...@@ -675,8 +675,8 @@ RUNTIME_FUNCTION(Runtime_NewObject) {
RETURN_RESULT_OR_FAILURE(isolate, JSObject::New(target, new_target)); RETURN_RESULT_OR_FAILURE(isolate, JSObject::New(target, new_target));
} }
RUNTIME_FUNCTION(Runtime_CompleteInobjectSlackTrackingForMap) {
RUNTIME_FUNCTION(Runtime_FinalizeInstanceSize) { DisallowHeapAllocation no_gc;
HandleScope scope(isolate); HandleScope scope(isolate);
DCHECK_EQ(1, args.length()); DCHECK_EQ(1, args.length());
......
...@@ -407,7 +407,7 @@ namespace internal { ...@@ -407,7 +407,7 @@ namespace internal {
F(ToFastProperties, 1, 1) \ F(ToFastProperties, 1, 1) \
F(AllocateHeapNumber, 0, 1) \ F(AllocateHeapNumber, 0, 1) \
F(NewObject, 2, 1) \ F(NewObject, 2, 1) \
F(FinalizeInstanceSize, 1, 1) \ F(CompleteInobjectSlackTrackingForMap, 1, 1) \
F(LoadMutableDouble, 2, 1) \ F(LoadMutableDouble, 2, 1) \
F(TryMigrateInstance, 1, 1) \ F(TryMigrateInstance, 1, 1) \
F(IsJSGlobalProxy, 1, 1) \ F(IsJSGlobalProxy, 1, 1) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment