Hydrogenized KeyedLoadGeneric stub: when probing the KeyedLookupCache fails,...

Hydrogenized KeyedLoadGeneric stub: when probing the KeyedLookupCache fails, call the runtime, don't stub-fail.

R=danno@chromium.org

Review URL: https://codereview.chromium.org/367343002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22254 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent b9a0cf19
......@@ -1654,50 +1654,67 @@ HValue* CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildCodeStub() {
HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
base_index->ClearFlag(HValue::kCanOverflow);
IfBuilder lookup_if(this);
for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
++probe) {
int probe_base = probe * KeyedLookupCache::kEntryLength;
HValue* map_index = AddUncasted<HAdd>(base_index,
Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
map_index->ClearFlag(HValue::kCanOverflow);
HValue* key_index = AddUncasted<HAdd>(base_index,
Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
key_index->ClearFlag(HValue::kCanOverflow);
HValue* map_to_check = Add<HLoadKeyed>(cache_keys,
map_index,
static_cast<HValue*>(NULL),
FAST_ELEMENTS,
NEVER_RETURN_HOLE, 0);
lookup_if.If<HCompareObjectEqAndBranch>(map_to_check, map);
lookup_if.And();
HValue* key_to_check = Add<HLoadKeyed>(cache_keys,
key_index,
static_cast<HValue*>(NULL),
FAST_ELEMENTS,
NEVER_RETURN_HOLE, 0);
lookup_if.If<HCompareObjectEqAndBranch>(key_to_check, key);
lookup_if.Then();
{
ExternalReference cache_field_offsets_ref =
ExternalReference::keyed_lookup_cache_field_offsets(isolate());
HValue* cache_field_offsets = Add<HConstant>(cache_field_offsets_ref);
HValue* index = AddUncasted<HAdd>(hash,
Add<HConstant>(probe));
index->ClearFlag(HValue::kCanOverflow);
HValue* property_index = Add<HLoadKeyed>(cache_field_offsets,
index,
static_cast<HValue*>(NULL),
EXTERNAL_INT32_ELEMENTS,
NEVER_RETURN_HOLE, 0);
Push(property_index);
HIfContinuation inline_or_runtime_continuation(
graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
{
IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
++probe) {
IfBuilder* lookup_if = &lookup_ifs[probe];
lookup_if->Initialize(this);
int probe_base = probe * KeyedLookupCache::kEntryLength;
HValue* map_index = AddUncasted<HAdd>(
base_index,
Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
map_index->ClearFlag(HValue::kCanOverflow);
HValue* key_index = AddUncasted<HAdd>(
base_index,
Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
key_index->ClearFlag(HValue::kCanOverflow);
HValue* map_to_check =
Add<HLoadKeyed>(cache_keys, map_index, static_cast<HValue*>(NULL),
FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
lookup_if->And();
HValue* key_to_check =
Add<HLoadKeyed>(cache_keys, key_index, static_cast<HValue*>(NULL),
FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
lookup_if->Then();
{
ExternalReference cache_field_offsets_ref =
ExternalReference::keyed_lookup_cache_field_offsets(isolate());
HValue* cache_field_offsets =
Add<HConstant>(cache_field_offsets_ref);
HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
index->ClearFlag(HValue::kCanOverflow);
HValue* property_index = Add<HLoadKeyed>(
cache_field_offsets, index, static_cast<HValue*>(NULL),
EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
Push(property_index);
}
lookup_if->Else();
}
for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
}
lookup_if.Else();
}
Add<HDeoptimize>("KeyedLoad fall-back", Deoptimizer::EAGER);
Push(graph()->GetConstant0());
lookup_if.End();
Push(Add<HLoadFieldByIndex>(receiver, Pop()));
IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
inline_or_runtime.Then();
{
// Found a cached index, load property inline.
Push(Add<HLoadFieldByIndex>(receiver, Pop()));
}
inline_or_runtime.Else();
{
// KeyedLookupCache miss; call runtime.
Add<HPushArguments>(receiver, key);
Push(Add<HCallRuntime>(
isolate()->factory()->empty_string(),
Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
}
inline_or_runtime.End();
}
if_dict_properties.End();
}
......
This diff is collapsed.
......@@ -1475,6 +1475,9 @@ class HGraphBuilder {
class IfBuilder V8_FINAL {
public:
// If using this constructor, Initialize() must be called explicitly!
IfBuilder();
explicit IfBuilder(HGraphBuilder* builder);
IfBuilder(HGraphBuilder* builder,
HIfContinuation* continuation);
......@@ -1483,6 +1486,8 @@ class HGraphBuilder {
if (!finished_) End();
}
void Initialize(HGraphBuilder* builder);
template<class Condition>
Condition* If(HValue *p) {
Condition* compare = builder()->New<Condition>(p);
......@@ -1625,9 +1630,14 @@ class HGraphBuilder {
void Return(HValue* value);
private:
void InitializeDontCreateBlocks(HGraphBuilder* builder);
HControlInstruction* AddCompare(HControlInstruction* compare);
HGraphBuilder* builder() const { return builder_; }
HGraphBuilder* builder() const {
ASSERT(builder_ != NULL); // Have you called "Initialize"?
return builder_;
}
void AddMergeAtJoinBlock(bool deopt);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment