Commit d5c19aa9 authored by Benedikt Meurer's avatar Benedikt Meurer Committed by Commit Bot

[ic] Ensure that we make progress on KeyedLoadIC polymorphic name.

In the special case of KeyedLoadIC, where the key that is passed in is a
Name that is always the same we only checked for identity in both the
stub and the TurboFan case, which works fine for symbols and internalized
strings, but doesn't really work with non-internalized strings, where
the identity check will fail, the runtime will internalize the string,
and the IC will then see the original internalized string again and not
progress in the feedback lattice. This leads to tricky deoptimization
loops in TurboFan and constantly missing ICs.

This adds fixes the stub to always try to internalize strings first
when the identity check fails and then doing the check again. If the
name is not found in the string table we miss, since in that case the
string cannot match the previously recorded feedback name (which is
always a unique name).

In TurboFan we represent this checks with new CheckEqualsSymbol and
CheckEqualsInternalizedString operators, which validate the previously
recorded feedback, and the CheckEqualsInternalizedString operator does
the attempt to internalize the input.

Bug: v8:6936, v8:6948, v8:6969
Change-Id: I3f3b4a587c67f00f7c4b60d239eb98a9626fe04a
Reviewed-on: https://chromium-review.googlesource.com/730224Reviewed-by: 's avatarToon Verwaest <verwaest@chromium.org>
Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Commit-Queue: Benedikt Meurer <bmeurer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48784}
parent 4cb88e3a
......@@ -201,6 +201,7 @@ namespace internal {
TFH(LoadICProtoArrayThrowIfNonexistent, LoadICProtoArray) \
TFH(KeyedLoadIC_Megamorphic, LoadWithVector) \
TFH(KeyedLoadIC_Miss, LoadWithVector) \
TFH(KeyedLoadIC_PolymorphicName, LoadWithVector) \
TFH(KeyedLoadIC_Slow, LoadWithVector) \
TFH(KeyedLoadIC_IndexedString, LoadWithVector) \
TFH(KeyedStoreIC_Megamorphic, StoreWithVector) \
......
......@@ -28,6 +28,7 @@ IC_BUILTIN(LoadICTrampoline)
IC_BUILTIN(LoadField)
IC_BUILTIN(KeyedLoadICTrampoline)
IC_BUILTIN(KeyedLoadIC_Megamorphic)
IC_BUILTIN(KeyedLoadIC_PolymorphicName)
IC_BUILTIN(StoreIC)
IC_BUILTIN(StoreICTrampoline)
IC_BUILTIN(KeyedStoreIC)
......
......@@ -235,7 +235,7 @@ CallDescriptor* Linkage::GetSimplifiedCDescriptor(
target_loc, // target location
locations.Build(), // location_sig
0, // stack_parameter_count
Operator::kNoProperties, // properties
Operator::kNoThrow, // properties
kCalleeSaveRegisters, // callee-saved registers
kCalleeSaveFPRegisters, // callee-saved fp regs
flags, "c-call");
......
......@@ -820,6 +820,12 @@ bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
case IrOpcode::kConvertTaggedHoleToUndefined:
result = LowerConvertTaggedHoleToUndefined(node);
break;
case IrOpcode::kCheckEqualsInternalizedString:
LowerCheckEqualsInternalizedString(node, frame_state);
break;
case IrOpcode::kCheckEqualsSymbol:
LowerCheckEqualsSymbol(node, frame_state);
break;
case IrOpcode::kPlainPrimitiveToNumber:
result = LowerPlainPrimitiveToNumber(node);
break;
......@@ -2781,6 +2787,87 @@ Node* EffectControlLinearizer::LowerConvertTaggedHoleToUndefined(Node* node) {
return done.PhiAt(0);
}
void EffectControlLinearizer::LowerCheckEqualsInternalizedString(
Node* node, Node* frame_state) {
Node* exp = node->InputAt(0);
Node* val = node->InputAt(1);
auto if_same = __ MakeLabel();
auto if_notsame = __ MakeDeferredLabel();
auto if_thinstring = __ MakeLabel();
auto if_notthinstring = __ MakeLabel();
// Check if {exp} and {val} are the same, which is the likely case.
__ Branch(__ WordEqual(exp, val), &if_same, &if_notsame);
__ Bind(&if_notsame);
{
// Now {val} could still be a non-internalized String that matches {exp}.
__ DeoptimizeIf(DeoptimizeReason::kWrongName, ObjectIsSmi(val),
frame_state);
Node* val_map = __ LoadField(AccessBuilder::ForMap(), val);
Node* val_instance_type =
__ LoadField(AccessBuilder::ForMapInstanceType(), val_map);
// Check for the common case of ThinString first.
__ GotoIf(__ Word32Equal(val_instance_type,
__ Int32Constant(THIN_ONE_BYTE_STRING_TYPE)),
&if_thinstring);
__ Branch(
__ Word32Equal(val_instance_type, __ Int32Constant(THIN_STRING_TYPE)),
&if_thinstring, &if_notthinstring);
__ Bind(&if_notthinstring);
{
// Check that the {val} is a non-internalized String, if it's anything
// else it cannot match the recorded feedback {exp} anyways.
__ DeoptimizeIfNot(
DeoptimizeReason::kWrongName,
__ Word32Equal(__ Word32And(val_instance_type,
__ Int32Constant(kIsNotStringMask |
kIsNotInternalizedMask)),
__ Int32Constant(kStringTag | kNotInternalizedTag)),
frame_state);
// Try to find the {val} in the string table.
MachineSignature::Builder builder(graph()->zone(), 1, 1);
builder.AddReturn(MachineType::AnyTagged());
builder.AddParam(MachineType::AnyTagged());
Node* try_internalize_string_function = __ ExternalConstant(
ExternalReference::try_internalize_string_function(isolate()));
CallDescriptor const* const desc =
Linkage::GetSimplifiedCDescriptor(graph()->zone(), builder.Build());
Node* val_internalized =
__ Call(common()->Call(desc), try_internalize_string_function, val);
// Now see if the results match.
__ DeoptimizeIfNot(DeoptimizeReason::kWrongName,
__ WordEqual(exp, val_internalized), frame_state);
__ Goto(&if_same);
}
__ Bind(&if_thinstring);
{
// The {val} is a ThinString, let's check the actual value.
Node* val_actual =
__ LoadField(AccessBuilder::ForThinStringActual(), val);
__ DeoptimizeIfNot(DeoptimizeReason::kWrongName,
__ WordEqual(exp, val_actual), frame_state);
__ Goto(&if_same);
}
}
__ Bind(&if_same);
}
void EffectControlLinearizer::LowerCheckEqualsSymbol(Node* node,
Node* frame_state) {
Node* exp = node->InputAt(0);
Node* val = node->InputAt(1);
Node* check = __ WordEqual(exp, val);
__ DeoptimizeIfNot(DeoptimizeReason::kWrongName, check, frame_state);
}
Node* EffectControlLinearizer::AllocateHeapNumberWithValue(Node* value) {
Node* result = __ Allocate(NOT_TENURED, __ Int32Constant(HeapNumber::kSize));
__ StoreField(AccessBuilder::ForMap(), result, __ HeapNumberMapConstant());
......
......@@ -118,6 +118,8 @@ class V8_EXPORT_PRIVATE EffectControlLinearizer {
Node* LowerCheckFloat64Hole(Node* node, Node* frame_state);
Node* LowerCheckNotTaggedHole(Node* node, Node* frame_state);
Node* LowerConvertTaggedHoleToUndefined(Node* node);
void LowerCheckEqualsInternalizedString(Node* node, Node* frame_state);
void LowerCheckEqualsSymbol(Node* node, Node* frame_state);
Node* LowerTypeOf(Node* node);
Node* LowerToBoolean(Node* node);
Node* LowerPlainPrimitiveToNumber(Node* node);
......
......@@ -464,11 +464,7 @@ Reduction JSNativeContextSpecialization::ReduceGlobalAccess(
// Ensure that {index} matches the specified {name} (if {index} is given).
if (index != nullptr) {
Node* check = graph()->NewNode(simplified()->ReferenceEqual(), index,
jsgraph()->HeapConstant(name));
effect = graph()->NewNode(
simplified()->CheckIf(DeoptimizeReason::kIndexNameMismatch), check,
effect, control);
effect = BuildCheckEqualsName(name, index, effect, control);
}
// Check if we have a {receiver} to validate. If so, we need to check that
......@@ -706,11 +702,7 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
// Ensure that {index} matches the specified {name} (if {index} is given).
if (index != nullptr) {
Node* check = graph()->NewNode(simplified()->ReferenceEqual(), index,
jsgraph()->HeapConstant(name));
effect = graph()->NewNode(
simplified()->CheckIf(DeoptimizeReason::kIndexNameMismatch), check,
effect, control);
effect = BuildCheckEqualsName(name, index, effect, control);
}
// Collect call nodes to rewire exception edges.
......@@ -2454,6 +2446,18 @@ Node* JSNativeContextSpecialization::BuildExtendPropertiesBackingStore(
return a.Finish();
}
Node* JSNativeContextSpecialization::BuildCheckEqualsName(Handle<Name> name,
Node* value,
Node* effect,
Node* control) {
DCHECK(name->IsUniqueName());
Operator const* const op =
name->IsSymbol() ? simplified()->CheckEqualsSymbol()
: simplified()->CheckEqualsInternalizedString();
return graph()->NewNode(op, jsgraph()->HeapConstant(name), value, effect,
control);
}
bool JSNativeContextSpecialization::CanTreatHoleAsUndefined(
MapHandles const& receiver_maps) {
// Check if all {receiver_maps} either have one of the initial Array.prototype
......
......@@ -162,6 +162,11 @@ class JSNativeContextSpecialization final : public AdvancedReducer {
Node* BuildExtendPropertiesBackingStore(Handle<Map> map, Node* properties,
Node* effect, Node* control);
// Construct appropriate subgraph to check that the {value} matches
// the previously recorded {name} feedback.
Node* BuildCheckEqualsName(Handle<Name> name, Node* value, Node* effect,
Node* control);
// Checks if we can turn the hole into undefined when loading an element
// from an object with one of the {receiver_maps}; sets up appropriate
// code dependencies and might use the array protector cell.
......
......@@ -340,6 +340,8 @@
V(CheckHeapObject) \
V(CheckFloat64Hole) \
V(CheckNotTaggedHole) \
V(CheckEqualsInternalizedString) \
V(CheckEqualsSymbol) \
V(CompareMaps) \
V(ConvertTaggedHoleToUndefined) \
V(TypeOf) \
......
......@@ -2887,6 +2887,10 @@ class RepresentationSelector {
}
return;
}
case IrOpcode::kCheckEqualsSymbol:
case IrOpcode::kCheckEqualsInternalizedString:
return VisitBinop(node, UseInfo::AnyTagged(),
MachineRepresentation::kNone);
case IrOpcode::kMapGuard:
// Eliminate MapGuard nodes here.
return VisitUnused(node);
......
......@@ -601,28 +601,30 @@ DeoptimizeReason DeoptimizeReasonOf(const Operator* op) {
V(SpeculativeNumberLessThan) \
V(SpeculativeNumberLessThanOrEqual)
#define CHECKED_OP_LIST(V) \
V(CheckBounds, 2, 1) \
V(CheckHeapObject, 1, 1) \
V(CheckInternalizedString, 1, 1) \
V(CheckNumber, 1, 1) \
V(CheckReceiver, 1, 1) \
V(CheckSmi, 1, 1) \
V(CheckString, 1, 1) \
V(CheckSeqString, 1, 1) \
V(CheckSymbol, 1, 1) \
V(CheckNotTaggedHole, 1, 1) \
V(CheckedInt32Add, 2, 1) \
V(CheckedInt32Sub, 2, 1) \
V(CheckedInt32Div, 2, 1) \
V(CheckedInt32Mod, 2, 1) \
V(CheckedUint32Div, 2, 1) \
V(CheckedUint32Mod, 2, 1) \
V(CheckedUint32ToInt32, 1, 1) \
V(CheckedUint32ToTaggedSigned, 1, 1) \
V(CheckedInt32ToTaggedSigned, 1, 1) \
V(CheckedTaggedSignedToInt32, 1, 1) \
V(CheckedTaggedToTaggedSigned, 1, 1) \
#define CHECKED_OP_LIST(V) \
V(CheckBounds, 2, 1) \
V(CheckHeapObject, 1, 1) \
V(CheckInternalizedString, 1, 1) \
V(CheckNumber, 1, 1) \
V(CheckReceiver, 1, 1) \
V(CheckSmi, 1, 1) \
V(CheckString, 1, 1) \
V(CheckSeqString, 1, 1) \
V(CheckSymbol, 1, 1) \
V(CheckNotTaggedHole, 1, 1) \
V(CheckEqualsInternalizedString, 2, 0) \
V(CheckEqualsSymbol, 2, 0) \
V(CheckedInt32Add, 2, 1) \
V(CheckedInt32Sub, 2, 1) \
V(CheckedInt32Div, 2, 1) \
V(CheckedInt32Mod, 2, 1) \
V(CheckedUint32Div, 2, 1) \
V(CheckedUint32Mod, 2, 1) \
V(CheckedUint32ToInt32, 1, 1) \
V(CheckedUint32ToTaggedSigned, 1, 1) \
V(CheckedInt32ToTaggedSigned, 1, 1) \
V(CheckedTaggedSignedToInt32, 1, 1) \
V(CheckedTaggedToTaggedSigned, 1, 1) \
V(CheckedTaggedToTaggedPointer, 1, 1)
struct SimplifiedOperatorGlobalCache final {
......
......@@ -447,6 +447,9 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
const Operator* CheckNotTaggedHole();
const Operator* ConvertTaggedHoleToUndefined();
const Operator* CheckEqualsInternalizedString();
const Operator* CheckEqualsSymbol();
const Operator* ObjectIsArrayBufferView();
const Operator* ObjectIsCallable();
const Operator* ObjectIsConstructor();
......
......@@ -85,6 +85,10 @@ Reduction TypedOptimization::Reduce(Node* node) {
return ReduceCheckString(node);
case IrOpcode::kCheckSeqString:
return ReduceCheckSeqString(node);
case IrOpcode::kCheckEqualsInternalizedString:
return ReduceCheckEqualsInternalizedString(node);
case IrOpcode::kCheckEqualsSymbol:
return ReduceCheckEqualsSymbol(node);
case IrOpcode::kLoadField:
return ReduceLoadField(node);
case IrOpcode::kNumberCeil:
......@@ -201,6 +205,28 @@ Reduction TypedOptimization::ReduceCheckSeqString(Node* node) {
return NoChange();
}
Reduction TypedOptimization::ReduceCheckEqualsInternalizedString(Node* node) {
Node* const exp = NodeProperties::GetValueInput(node, 0);
Type* const exp_type = NodeProperties::GetType(exp);
Node* const val = NodeProperties::GetValueInput(node, 1);
Type* const val_type = NodeProperties::GetType(val);
Node* const effect = NodeProperties::GetEffectInput(node);
if (val_type->Is(exp_type)) return Replace(effect);
// TODO(turbofan): Should we also try to optimize the
// non-internalized String case for {val} here?
return NoChange();
}
Reduction TypedOptimization::ReduceCheckEqualsSymbol(Node* node) {
Node* const exp = NodeProperties::GetValueInput(node, 0);
Type* const exp_type = NodeProperties::GetType(exp);
Node* const val = NodeProperties::GetValueInput(node, 1);
Type* const val_type = NodeProperties::GetType(val);
Node* const effect = NodeProperties::GetEffectInput(node);
if (val_type->Is(exp_type)) return Replace(effect);
return NoChange();
}
Reduction TypedOptimization::ReduceLoadField(Node* node) {
Node* const object = NodeProperties::GetValueInput(node, 0);
Type* const object_type = NodeProperties::GetType(object);
......
......@@ -41,6 +41,8 @@ class V8_EXPORT_PRIVATE TypedOptimization final
Reduction ReduceCheckNumber(Node* node);
Reduction ReduceCheckString(Node* node);
Reduction ReduceCheckSeqString(Node* node);
Reduction ReduceCheckEqualsInternalizedString(Node* node);
Reduction ReduceCheckEqualsSymbol(Node* node);
Reduction ReduceLoadField(Node* node);
Reduction ReduceNumberFloor(Node* node);
Reduction ReduceNumberRoundop(Node* node);
......
......@@ -1944,6 +1944,12 @@ Type* Typer::Visitor::TypeConvertTaggedHoleToUndefined(Node* node) {
return typer_->operation_typer()->ConvertTaggedHoleToUndefined(type);
}
Type* Typer::Visitor::TypeCheckEqualsInternalizedString(Node* node) {
UNREACHABLE();
}
Type* Typer::Visitor::TypeCheckEqualsSymbol(Node* node) { UNREACHABLE(); }
Type* Typer::Visitor::TypeAllocate(Node* node) {
return AllocateTypeOf(node->op());
}
......
......@@ -1242,6 +1242,7 @@ void Verifier::Visitor::Check(Node* node) {
case IrOpcode::kCheckSymbol:
CheckValueInputIs(node, 0, Type::Any());
CheckTypeIs(node, Type::Symbol());
break;
case IrOpcode::kCheckedInt32Add:
case IrOpcode::kCheckedInt32Sub:
......@@ -1275,6 +1276,17 @@ void Verifier::Visitor::Check(Node* node) {
CheckTypeIs(node, Type::NonInternal());
break;
case IrOpcode::kCheckEqualsInternalizedString:
CheckValueInputIs(node, 0, Type::InternalizedString());
CheckValueInputIs(node, 1, Type::Any());
CheckNotTyped(node);
break;
case IrOpcode::kCheckEqualsSymbol:
CheckValueInputIs(node, 0, Type::Symbol());
CheckValueInputIs(node, 1, Type::Any());
CheckNotTyped(node);
break;
case IrOpcode::kLoadFieldByIndex:
CheckValueInputIs(node, 0, Type::Any());
CheckValueInputIs(node, 1, Type::SignedSmall());
......
......@@ -22,7 +22,6 @@ namespace internal {
V(ExpectedSmi, "Expected smi") \
V(ForcedDeoptToRuntime, "Forced deopt to runtime") \
V(Hole, "hole") \
V(IndexNameMismatch, "index and name do not match in access") \
V(InstanceMigrationFailed, "instance migration failed") \
V(InsufficientTypeFeedbackForCall, "Insufficient type feedback for call") \
V(InsufficientTypeFeedbackForCallWithArguments, \
......@@ -78,6 +77,7 @@ namespace internal {
V(ValueMismatch, "value mismatch") \
V(WrongInstanceType, "wrong instance type") \
V(WrongMap, "wrong map") \
V(WrongName, "wrong name") \
V(UndefinedOrNullInForIn, "null or undefined in for-in") \
V(UndefinedOrNullInToObject, "null or undefined in ToObject")
......
......@@ -2285,15 +2285,47 @@ void AccessorAssembler::KeyedLoadIC(const LoadICParameters* p) {
BIND(&try_polymorphic_name);
{
// We might have a name in feedback, and a fixed array in the next slot.
Node* name = p->name;
Comment("KeyedLoadIC_try_polymorphic_name");
GotoIfNot(WordEqual(feedback, p->name), &miss);
// If the name comparison succeeded, we know we have a fixed array with
// at least one map/handler pair.
Node* array = LoadFeedbackVectorSlot(p->vector, p->slot, kPointerSize,
SMI_PARAMETERS);
HandlePolymorphicCase(receiver_map, array, &if_handler, &var_handler, &miss,
1);
VARIABLE(var_name, MachineRepresentation::kTagged, name);
VARIABLE(var_index, MachineType::PointerRepresentation());
Label if_polymorphic_name(this, &var_name), if_internalized(this),
if_notinternalized(this, Label::kDeferred);
// Fast-case: The recorded {feedback} matches the {name}.
GotoIf(WordEqual(feedback, name), &if_polymorphic_name);
// Try to internalize the {name} if it isn't already.
TryToName(name, &miss, &var_index, &if_internalized, &var_name, &miss,
&if_notinternalized);
BIND(&if_internalized);
{
// The {var_name} now contains a unique name.
Branch(WordEqual(feedback, var_name.value()), &if_polymorphic_name,
&miss);
}
BIND(&if_notinternalized);
{
// Try to internalize the {name}.
Node* function = ExternalConstant(
ExternalReference::try_internalize_string_function(isolate()));
var_name.Bind(CallCFunction1(MachineType::AnyTagged(),
MachineType::AnyTagged(), function, name));
Goto(&if_internalized);
}
BIND(&if_polymorphic_name);
{
// If the name comparison succeeded, we know we have a fixed array with
// at least one map/handler pair.
Node* name = var_name.value();
TailCallBuiltin(Builtins::kKeyedLoadIC_PolymorphicName, p->context,
p->receiver, name, p->slot, p->vector);
}
}
BIND(&miss);
{
Comment("KeyedLoadIC_miss");
......@@ -2355,6 +2387,46 @@ void AccessorAssembler::KeyedLoadICGeneric(const LoadICParameters* p) {
}
}
void AccessorAssembler::KeyedLoadICPolymorphicName(const LoadICParameters* p) {
VARIABLE(var_handler, MachineRepresentation::kTagged);
Label if_handler(this, &var_handler), miss(this, Label::kDeferred);
Node* receiver = p->receiver;
Node* receiver_map = LoadReceiverMap(receiver);
Node* name = p->name;
Node* vector = p->vector;
Node* slot = p->slot;
Node* context = p->context;
// When we get here, we know that the {name} matches the recorded
// feedback name in the {vector} and can safely be used for the
// LoadIC handler logic below.
CSA_ASSERT(this, IsName(name));
CSA_ASSERT(this, Word32BinaryNot(IsDeprecatedMap(receiver_map)));
CSA_ASSERT(this, WordEqual(name, LoadFeedbackVectorSlot(vector, slot, 0,
SMI_PARAMETERS)));
// Check if we have a matching handler for the {receiver_map}.
Node* array =
LoadFeedbackVectorSlot(vector, slot, kPointerSize, SMI_PARAMETERS);
HandlePolymorphicCase(receiver_map, array, &if_handler, &var_handler, &miss,
1);
BIND(&if_handler);
{
ExitPoint direct_exit(this);
HandleLoadICHandlerCase(p, var_handler.value(), &miss, &direct_exit,
kOnlyProperties);
}
BIND(&miss);
{
Comment("KeyedLoadIC_miss");
TailCallRuntime(Runtime::kKeyedLoadIC_Miss, context, receiver, name, slot,
vector);
}
}
void AccessorAssembler::StoreIC(const StoreICParameters* p) {
VARIABLE(var_handler, MachineRepresentation::kTagged);
Label if_handler(this, &var_handler), try_polymorphic(this, Label::kDeferred),
......@@ -2709,6 +2781,19 @@ void AccessorAssembler::GenerateKeyedLoadIC_Megamorphic() {
KeyedLoadICGeneric(&p);
}
void AccessorAssembler::GenerateKeyedLoadIC_PolymorphicName() {
typedef LoadWithVectorDescriptor Descriptor;
Node* receiver = Parameter(Descriptor::kReceiver);
Node* name = Parameter(Descriptor::kName);
Node* slot = Parameter(Descriptor::kSlot);
Node* vector = Parameter(Descriptor::kVector);
Node* context = Parameter(Descriptor::kContext);
LoadICParameters p(context, receiver, name, slot, vector);
KeyedLoadICPolymorphicName(&p);
}
void AccessorAssembler::GenerateStoreIC() {
typedef StoreWithVectorDescriptor Descriptor;
......
......@@ -31,6 +31,7 @@ class AccessorAssembler : public CodeStubAssembler {
void GenerateKeyedLoadIC();
void GenerateKeyedLoadICTrampoline();
void GenerateKeyedLoadIC_Megamorphic();
void GenerateKeyedLoadIC_PolymorphicName();
void GenerateStoreIC();
void GenerateStoreICTrampoline();
......@@ -114,6 +115,7 @@ class AccessorAssembler : public CodeStubAssembler {
void LoadGlobalIC(const LoadICParameters* p, TypeofMode typeof_mode);
void KeyedLoadIC(const LoadICParameters* p);
void KeyedLoadICGeneric(const LoadICParameters* p);
void KeyedLoadICPolymorphicName(const LoadICParameters* p);
void StoreIC(const StoreICParameters* p);
void StoreGlobalIC_PropertyCellCase(Node* property_cell, Node* value,
ExitPoint* exit_point, Label* miss);
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --opt
var o = {};
function foo(s) { return o[s]; }
var s = 'c' + 'c';
foo(s);
foo(s);
%OptimizeFunctionOnNextCall(foo);
assertEquals(undefined, foo(s));
assertOptimized(foo);
assertEquals(undefined, foo('c' + 'c'));
assertOptimized(foo);
assertEquals(undefined, foo('ccddeeffgg'.slice(0, 2)));
assertOptimized(foo);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment