Commit ca2def98 authored by ishell@chromium.org's avatar ishell@chromium.org

Map::Normalize() introduced as single entry point for map normalization and...

Map::Normalize() introduced as single entry point for map normalization and Map::NotifyLeafMapLayoutChange() made private.

R=verwaest@chromium.org

Review URL: https://codereview.chromium.org/263663002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@21117 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 5170a008
......@@ -2108,9 +2108,8 @@ void Genesis::InstallJSFunctionResultCaches() {
void Genesis::InitializeNormalizedMapCaches() {
Handle<FixedArray> array(
factory()->NewFixedArray(NormalizedMapCache::kEntries, TENURED));
native_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array));
native_context()->set_normalized_map_cache(
*NormalizedMapCache::New(isolate()));
}
......
......@@ -1015,7 +1015,7 @@ void NormalizedMapCache::NormalizedMapCacheVerify() {
FixedArray::cast(this)->FixedArrayVerify();
if (FLAG_enable_slow_asserts) {
for (int i = 0; i < length(); i++) {
Object* e = get(i);
Object* e = FixedArray::get(i);
if (e->IsMap()) {
Map::cast(e)->SharedMapVerify();
} else {
......
......@@ -850,13 +850,23 @@ bool Object::IsJSFunctionResultCache() {
bool Object::IsNormalizedMapCache() {
if (!IsFixedArray()) return false;
if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
return NormalizedMapCache::IsNormalizedMapCache(this);
}
int NormalizedMapCache::GetIndex(Handle<Map> map) {
return map->Hash() % NormalizedMapCache::kEntries;
}
bool NormalizedMapCache::IsNormalizedMapCache(Object* obj) {
if (!obj->IsFixedArray()) return false;
if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
return false;
}
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
reinterpret_cast<NormalizedMapCache*>(obj)->NormalizedMapCacheVerify();
}
#endif
return true;
......
......@@ -4659,48 +4659,30 @@ PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor(
}
Handle<Map> NormalizedMapCache::Get(Handle<NormalizedMapCache> cache,
Handle<Map> fast_map,
PropertyNormalizationMode mode) {
int index = fast_map->Hash() % kEntries;
Handle<Object> result = handle(cache->get(index), cache->GetIsolate());
if (result->IsMap() &&
Handle<Map>::cast(result)->EquivalentToForNormalization(
*fast_map, mode)) {
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Handle<Map>::cast(result)->SharedMapVerify();
}
#endif
#ifdef ENABLE_SLOW_ASSERTS
if (FLAG_enable_slow_asserts) {
// The cached map should match newly created normalized map bit-by-bit,
// except for the code cache, which can contain some ics which can be
// applied to the shared map.
Handle<Map> fresh = Map::CopyNormalized(
fast_map, mode, SHARED_NORMALIZED_MAP);
Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
Handle<FixedArray> array(
isolate->factory()->NewFixedArray(kEntries, TENURED));
return Handle<NormalizedMapCache>::cast(array);
}
ASSERT(memcmp(fresh->address(),
Handle<Map>::cast(result)->address(),
Map::kCodeCacheOffset) == 0);
STATIC_ASSERT(Map::kDependentCodeOffset ==
Map::kCodeCacheOffset + kPointerSize);
int offset = Map::kDependentCodeOffset + kPointerSize;
ASSERT(memcmp(fresh->address() + offset,
Handle<Map>::cast(result)->address() + offset,
Map::kSize - offset) == 0);
}
#endif
return Handle<Map>::cast(result);
MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
PropertyNormalizationMode mode) {
DisallowHeapAllocation no_gc;
Object* value = FixedArray::get(GetIndex(fast_map));
if (!value->IsMap() ||
!Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) {
return MaybeHandle<Map>();
}
return handle(Map::cast(value));
}
Isolate* isolate = cache->GetIsolate();
Handle<Map> map = Map::CopyNormalized(fast_map, mode, SHARED_NORMALIZED_MAP);
ASSERT(map->is_dictionary_map());
cache->set(index, *map);
isolate->counters()->normalized_maps()->Increment();
return map;
void NormalizedMapCache::Set(Handle<Map> fast_map,
Handle<Map> normalized_map) {
DisallowHeapAllocation no_gc;
ASSERT(normalized_map->is_dictionary_map());
FixedArray::set(GetIndex(fast_map), *normalized_map);
}
......@@ -4733,6 +4715,7 @@ void JSObject::NormalizeProperties(Handle<JSObject> object,
Isolate* isolate = object->GetIsolate();
HandleScope scope(isolate);
Handle<Map> map(object->map());
Handle<Map> new_map = Map::Normalize(map, mode);
// Allocate new content.
int real_size = map->NumberOfOwnDescriptors();
......@@ -4787,12 +4770,6 @@ void JSObject::NormalizeProperties(Handle<JSObject> object,
// Copy the next enumeration index from instance descriptor.
dictionary->SetNextEnumerationIndex(real_size + 1);
Handle<NormalizedMapCache> cache(
isolate->context()->native_context()->normalized_map_cache());
Handle<Map> new_map = NormalizedMapCache::Get(
cache, handle(object->map()), mode);
ASSERT(new_map->is_dictionary_map());
// From here on we cannot fail and we shouldn't GC anymore.
DisallowHeapAllocation no_allocation;
......@@ -4811,8 +4788,6 @@ void JSObject::NormalizeProperties(Handle<JSObject> object,
// the left-over space to avoid races with the sweeper thread.
object->synchronized_set_map(*new_map);
map->NotifyLeafMapLayoutChange();
object->set_properties(*dictionary);
isolate->counters()->props_to_dictionary()->Increment();
......@@ -7239,6 +7214,50 @@ Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) {
}
Handle<Map> Map::Normalize(Handle<Map> fast_map,
PropertyNormalizationMode mode) {
ASSERT(!fast_map->is_dictionary_map());
Isolate* isolate = fast_map->GetIsolate();
Handle<NormalizedMapCache> cache(
isolate->context()->native_context()->normalized_map_cache());
Handle<Map> new_map;
if (cache->Get(fast_map, mode).ToHandle(&new_map)) {
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
new_map->SharedMapVerify();
}
#endif
#ifdef ENABLE_SLOW_ASSERTS
if (FLAG_enable_slow_asserts) {
// The cached map should match newly created normalized map bit-by-bit,
// except for the code cache, which can contain some ics which can be
// applied to the shared map.
Handle<Map> fresh = Map::CopyNormalized(
fast_map, mode, SHARED_NORMALIZED_MAP);
ASSERT(memcmp(fresh->address(),
new_map->address(),
Map::kCodeCacheOffset) == 0);
STATIC_ASSERT(Map::kDependentCodeOffset ==
Map::kCodeCacheOffset + kPointerSize);
int offset = Map::kDependentCodeOffset + kPointerSize;
ASSERT(memcmp(fresh->address() + offset,
new_map->address() + offset,
Map::kSize - offset) == 0);
}
#endif
} else {
new_map = Map::CopyNormalized(fast_map, mode, SHARED_NORMALIZED_MAP);
cache->Set(fast_map, new_map);
isolate->counters()->normalized_maps()->Increment();
}
fast_map->NotifyLeafMapLayoutChange();
return new_map;
}
Handle<Map> Map::CopyNormalized(Handle<Map> map,
PropertyNormalizationMode mode,
NormalizedMapSharingMode sharing) {
......
......@@ -4628,18 +4628,27 @@ class ScopeInfo : public FixedArray {
// needs very limited number of distinct normalized maps.
class NormalizedMapCache: public FixedArray {
public:
static const int kEntries = 64;
static Handle<NormalizedMapCache> New(Isolate* isolate);
static Handle<Map> Get(Handle<NormalizedMapCache> cache,
Handle<Map> fast_map,
PropertyNormalizationMode mode);
MUST_USE_RESULT MaybeHandle<Map> Get(Handle<Map> fast_map,
PropertyNormalizationMode mode);
void Set(Handle<Map> fast_map, Handle<Map> normalized_map);
void Clear();
// Casting
static inline NormalizedMapCache* cast(Object* obj);
static inline bool IsNormalizedMapCache(Object* obj);
DECLARE_VERIFIER(NormalizedMapCache)
private:
static const int kEntries = 64;
static inline int GetIndex(Handle<Map> map);
// The following declarations hide base class methods.
Object* get(int index);
void set(int index, Object* value);
};
......@@ -6134,6 +6143,8 @@ class Map: public HeapObject {
PropertyAttributes attributes,
const char* reason);
static Handle<Map> Normalize(Handle<Map> map, PropertyNormalizationMode mode);
// Returns the constructor name (the name (possibly, inferred name) of the
// function that was used to instantiate the object).
String* constructor_name();
......@@ -6327,10 +6338,6 @@ class Map: public HeapObject {
static Handle<Map> CopyForFreeze(Handle<Map> map);
static Handle<Map> CopyNormalized(Handle<Map> map,
PropertyNormalizationMode mode,
NormalizedMapSharingMode sharing);
inline void AppendDescriptor(Descriptor* desc);
// Returns a copy of the map, with all transitions dropped from the
......@@ -6424,11 +6431,6 @@ class Map: public HeapObject {
return type == JS_GLOBAL_OBJECT_TYPE || type == JS_BUILTINS_OBJECT_TYPE;
}
// Fires when the layout of an object with a leaf map changes.
// This includes adding transitions to the leaf map or changing
// the descriptor array.
inline void NotifyLeafMapLayoutChange();
inline bool CanOmitMapChecks();
static void AddDependentCompilationInfo(Handle<Map> map,
......@@ -6578,6 +6580,15 @@ class Map: public HeapObject {
MaybeHandle<Name> maybe_name,
SimpleTransitionFlag simple_flag = FULL_TRANSITION);
static Handle<Map> CopyNormalized(Handle<Map> map,
PropertyNormalizationMode mode,
NormalizedMapSharingMode sharing);
// Fires when the layout of an object with a leaf map changes.
// This includes adding transitions to the leaf map or changing
// the descriptor array.
inline void NotifyLeafMapLayoutChange();
// Zaps the contents of backing data structures. Note that the
// heap verifier (i.e. VerifyMarkingVisitor) relies on zapping of objects
// holding weak references when incremental marking is used, because it also
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment