Commit c9616b0f authored by Mythri's avatar Mythri Committed by Commit Bot

Infer the language mode instead of passing it as a parameter to builtins

It is better to infer the language mode from the context and the closure
instead of getting it from the feedback vector. This will allow us to use
some of these builtins even when feedback vectors are not allocated.
Language mode is only needed to decide if we need to throw an exception
when a store fails. This is on a slow path and hence deriving the language
mode is not on critical path.

Bug: v8:8580
Change-Id: Id0d8e78d35046f015b5cdc15d5fc3f8a17dd8757
Reviewed-on: https://chromium-review.googlesource.com/c/1421924
Commit-Queue: Mythri Alle <mythria@chromium.org>
Reviewed-by: 's avatarToon Verwaest <verwaest@chromium.org>
Cr-Commit-Position: refs/heads/master@{#59113}
parent c3504034
......@@ -922,7 +922,7 @@ namespace internal {
TFJ(ProxyRevoke, 0, kReceiver) \
TFS(ProxyGetProperty, kProxy, kName, kReceiverValue, kOnNonExistent) \
TFS(ProxyHasProperty, kProxy, kName) \
TFS(ProxySetProperty, kProxy, kName, kValue, kReceiverValue, kLanguageMode) \
TFS(ProxySetProperty, kProxy, kName, kValue, kReceiverValue) \
\
/* Reflect */ \
ASM(ReflectApply, Dummy) \
......
......@@ -545,7 +545,6 @@ TF_BUILTIN(ProxySetProperty, ProxiesCodeStubAssembler) {
Node* name = Parameter(Descriptor::kName);
Node* value = Parameter(Descriptor::kValue);
Node* receiver = Parameter(Descriptor::kReceiverValue);
TNode<Smi> language_mode = CAST(Parameter(Descriptor::kLanguageMode));
CSA_ASSERT(this, IsJSProxy(proxy));
......@@ -597,8 +596,8 @@ TF_BUILTIN(ProxySetProperty, ProxiesCodeStubAssembler) {
BIND(&failure);
{
Label if_throw(this, Label::kDeferred);
Branch(SmiEqual(language_mode, SmiConstant(LanguageMode::kStrict)),
&if_throw, &success);
BranchIfStrictMode(context, &if_throw);
Goto(&success);
BIND(&if_throw);
ThrowTypeError(context, MessageTemplate::kProxyTrapReturnedFalsishFor,
......@@ -613,10 +612,7 @@ TF_BUILTIN(ProxySetProperty, ProxiesCodeStubAssembler) {
{
Label failure(this), throw_error(this, Label::kDeferred);
Branch(SmiEqual(language_mode, SmiConstant(LanguageMode::kStrict)),
&throw_error, &failure);
BIND(&failure);
BranchIfStrictMode(context, &throw_error);
Return(UndefinedConstant());
BIND(&throw_error);
......
......@@ -9965,19 +9965,50 @@ void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* maybe_vector,
BIND(&end);
}
Node* CodeStubAssembler::GetLanguageMode(
TNode<SharedFunctionInfo> shared_function_info, Node* context) {
VARIABLE(var_language_mode, MachineRepresentation::kTaggedSigned,
SmiConstant(LanguageMode::kStrict));
Label language_mode_determined(this), language_mode_sloppy(this);
void CodeStubAssembler::BranchIfStrictMode(Node* context, Label* is_strict) {
// Load the closure from the stack. This function can be called from builtins
// that are called from bytecode handlers / TurboFan. So essentially we can
// have
// IET -> BytecodeHandler -> Builtins* or TF -> Builtins*. So, Iterate over
// frames as long as CommonFrameConstants::kContextOrFrameTypeOffset is
// FrameType. If it is context, that means we are either in optimized /
// interpreted frame and safe to load the closure from
// JavaScriptFrameConstants::kFunctionOffset. In the current code we only have
// a one or two builtin frames on the top of BytecodeHandler/TF frame. So this
// could be optimizied further if needed. It is only used when throwing
// an exception so not on the critical path.
// The current frame is always a builtin frame, so safe to start from here.
VARIABLE(current_frame_pointer, MachineType::PointerRepresentation(),
LoadFramePointer());
Label loop(this, &current_frame_pointer), js_frame(this);
Goto(&loop);
BIND(&loop);
Node* frame_pointer =
Load(MachineType::Pointer(), current_frame_pointer.value());
Node* marker_or_context =
Load(MachineType::AnyTagged(), frame_pointer,
IntPtrConstant(CommonFrameConstants::kContextOrFrameTypeOffset));
current_frame_pointer.Bind(frame_pointer);
// Smi means it is a marker.
Branch(TaggedIsSmi(marker_or_context), &loop, &js_frame);
BIND(&js_frame);
// If it isn't an SMI then it should be an Interpreter or optimized frame.
TNode<JSFunction> closure =
CAST(Load(MachineType::AnyTagged(), current_frame_pointer.value(),
IntPtrConstant(JavaScriptFrameConstants::kFunctionOffset)));
TNode<SharedFunctionInfo> sfi =
CAST(LoadObjectField(closure, JSFunction::kSharedFunctionInfoOffset));
Label not_strict(this);
// Get the language mode from SFI
TNode<Uint32T> closure_is_strict =
DecodeWord32<SharedFunctionInfo::IsStrictBit>(LoadObjectField(
shared_function_info, SharedFunctionInfo::kFlagsOffset,
MachineType::Uint32()));
sfi, SharedFunctionInfo::kFlagsOffset, MachineType::Uint32()));
// It is already strict, we need not check context's language mode.
GotoIf(closure_is_strict, &language_mode_determined);
GotoIf(closure_is_strict, is_strict);
// SFI::LanguageMode is sloppy, check if context has a stricter mode.
TNode<ScopeInfo> scope_info =
......@@ -9985,35 +10016,15 @@ Node* CodeStubAssembler::GetLanguageMode(
// If no flags field assume sloppy
GotoIf(SmiLessThanOrEqual(LoadFixedArrayBaseLength(scope_info),
SmiConstant(ScopeInfo::Fields::kFlags)),
&language_mode_sloppy);
&not_strict);
TNode<Smi> flags = CAST(LoadFixedArrayElement(
scope_info, SmiConstant(ScopeInfo::Fields::kFlags)));
TNode<Uint32T> context_is_strict =
DecodeWord32<ScopeInfo::LanguageModeField>(SmiToInt32(flags));
GotoIf(context_is_strict, &language_mode_determined);
Goto(&language_mode_sloppy);
// Both Context::ScopeInfo::LanguageMode and SFI::LanguageMode are sloppy.
BIND(&language_mode_sloppy);
var_language_mode.Bind(SmiConstant(LanguageMode::kSloppy));
Goto(&language_mode_determined);
GotoIf(context_is_strict, is_strict);
Goto(&not_strict);
BIND(&language_mode_determined);
return var_language_mode.value();
}
Node* CodeStubAssembler::GetLanguageMode(TNode<JSFunction> closure,
Node* context) {
TNode<SharedFunctionInfo> sfi =
CAST(LoadObjectField(closure, JSFunction::kSharedFunctionInfoOffset));
return GetLanguageMode(sfi, context);
}
Node* CodeStubAssembler::GetLanguageMode(TNode<FeedbackVector> vector,
Node* context) {
TNode<SharedFunctionInfo> sfi =
CAST(LoadObjectField(vector, FeedbackVector::kSharedFunctionInfoOffset));
return GetLanguageMode(sfi, context);
BIND(&not_strict);
}
void CodeStubAssembler::ReportFeedbackUpdate(
......
......@@ -2773,11 +2773,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// Update the type feedback vector.
void UpdateFeedback(Node* feedback, Node* feedback_vector, Node* slot_id);
// Returns the stricter of the Context::ScopeInfo::LanguageMode and
// the language mode on the SFI.
Node* GetLanguageMode(TNode<SharedFunctionInfo> sfi, Node* context);
Node* GetLanguageMode(TNode<JSFunction> closure, Node* context);
Node* GetLanguageMode(TNode<FeedbackVector> vector, Node* context);
// Branch if either Context::ScopeInfo::LanguageMode or
// Closure::SFI::LanguageMode is strict.
void BranchIfStrictMode(Node* context, Label* if_strict);
// Report that there was a feedback update, performing any tasks that should
// be done after a feedback update.
......
......@@ -1429,17 +1429,6 @@ void AccessorAssembler::HandleStoreICProtoHandler(
}
}
Node* AccessorAssembler::GetLanguageMode(Node* vector, Node* slot) {
VARIABLE(var_language_mode, MachineRepresentation::kTaggedSigned,
SmiConstant(LanguageMode::kStrict));
Label language_mode_determined(this);
BranchIfStrictMode(vector, slot, &language_mode_determined);
var_language_mode.Bind(SmiConstant(LanguageMode::kSloppy));
Goto(&language_mode_determined);
BIND(&language_mode_determined);
return var_language_mode.value();
}
void AccessorAssembler::HandleStoreToProxy(const StoreICParameters* p,
Node* proxy, Label* miss,
ElementSupport support_elements) {
......@@ -1449,18 +1438,13 @@ void AccessorAssembler::HandleStoreToProxy(const StoreICParameters* p,
Label if_index(this), if_unique_name(this),
to_name_failed(this, Label::kDeferred);
// TODO(8580): Get the language mode lazily when required to avoid the
// computation of GetLanguageMode here. Also make the computation of
// language mode not dependent on vector.
Node* language_mode = GetLanguageMode(p->vector, p->slot);
if (support_elements == kSupportElements) {
TryToName(p->name, &if_index, &var_index, &if_unique_name, &var_unique,
&to_name_failed);
BIND(&if_unique_name);
CallBuiltin(Builtins::kProxySetProperty, p->context, proxy,
var_unique.value(), p->value, p->receiver, language_mode);
var_unique.value(), p->value, p->receiver);
Return(p->value);
// The index case is handled earlier by the runtime.
......@@ -1475,7 +1459,7 @@ void AccessorAssembler::HandleStoreToProxy(const StoreICParameters* p,
} else {
Node* name = CallBuiltin(Builtins::kToName, p->context, p->name);
TailCallBuiltin(Builtins::kProxySetProperty, p->context, proxy, name,
p->value, p->receiver, language_mode);
p->value, p->receiver);
}
}
......@@ -1985,43 +1969,6 @@ void AccessorAssembler::NameDictionaryNegativeLookup(Node* object,
BIND(&done);
}
void AccessorAssembler::BranchIfStrictMode(Node* vector, Node* slot,
Label* if_strict) {
Node* sfi =
LoadObjectField(vector, FeedbackVector::kSharedFunctionInfoOffset);
TNode<FeedbackMetadata> metadata = CAST(LoadObjectField(
sfi, SharedFunctionInfo::kOuterScopeInfoOrFeedbackMetadataOffset));
Node* slot_int = SmiToInt32(slot);
// See VectorICComputer::index().
const int kItemsPerWord = FeedbackMetadata::VectorICComputer::kItemsPerWord;
Node* word_index = Int32Div(slot_int, Int32Constant(kItemsPerWord));
Node* word_offset = Int32Mod(slot_int, Int32Constant(kItemsPerWord));
int32_t first_item = FeedbackMetadata::kHeaderSize - kHeapObjectTag;
Node* offset =
ElementOffsetFromIndex(ChangeInt32ToIntPtr(word_index), UINT32_ELEMENTS,
INTPTR_PARAMETERS, first_item);
Node* data = Load(MachineType::Int32(), metadata, offset);
// See VectorICComputer::decode().
const int kBitsPerItem = FeedbackMetadata::kFeedbackSlotKindBits;
Node* shift = Int32Mul(word_offset, Int32Constant(kBitsPerItem));
const int kMask = FeedbackMetadata::VectorICComputer::kMask;
Node* kind = Word32And(Word32Shr(data, shift), Int32Constant(kMask));
STATIC_ASSERT(FeedbackSlotKind::kStoreGlobalSloppy <=
FeedbackSlotKind::kLastSloppyKind);
STATIC_ASSERT(FeedbackSlotKind::kStoreKeyedSloppy <=
FeedbackSlotKind::kLastSloppyKind);
STATIC_ASSERT(FeedbackSlotKind::kStoreNamedSloppy <=
FeedbackSlotKind::kLastSloppyKind);
GotoIfNot(Int32LessThanOrEqual(kind, Int32Constant(static_cast<int>(
FeedbackSlotKind::kLastSloppyKind))),
if_strict);
}
void AccessorAssembler::InvalidateValidityCellIfPrototype(Node* map,
Node* bitfield2) {
Label is_prototype(this), cont(this);
......
......@@ -123,8 +123,6 @@ class AccessorAssembler : public CodeStubAssembler {
void JumpIfDataProperty(Node* details, Label* writable, Label* readonly);
void BranchIfStrictMode(Node* vector, Node* slot, Label* if_strict);
void InvalidateValidityCellIfPrototype(Node* map, Node* bitfield2 = nullptr);
void OverwriteExistingFastDataProperty(Node* object, Node* object_map,
......@@ -274,8 +272,6 @@ class AccessorAssembler : public CodeStubAssembler {
const OnFoundOnReceiver& on_found_on_receiver,
Label* miss, ICMode ic_mode);
Node* GetLanguageMode(Node* vector, Node* slot);
Node* PrepareValueForStore(Node* handler_word, Node* holder,
Representation representation, Node* value,
Label* bailout);
......
......@@ -923,7 +923,7 @@ void KeyedStoreGenericAssembler::EmitGenericPropertyStore(
exit_point->Return(p->value);
}
} else {
BranchIfStrictMode(p->vector, p->slot, &strict);
BranchIfStrictMode(p->context, &strict);
exit_point->Return(p->value);
}
......@@ -954,7 +954,7 @@ void KeyedStoreGenericAssembler::EmitGenericPropertyStore(
exit_point->Return(p->value);
}
} else {
BranchIfStrictMode(p->vector, p->slot, &strict);
BranchIfStrictMode(p->context, &strict);
exit_point->Return(p->value);
}
if (handle_strict) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment